From bd2c94c96be234a05940aa09f94a9c0f0b65efb0 Mon Sep 17 00:00:00 2001 From: jtsmedley <38006759+jtsmedley@users.noreply.github.com> Date: Fri, 2 Feb 2024 11:52:33 -0600 Subject: [PATCH 01/16] Fix Support for ESM (#5) Co-authored-by: Roger Graham --- .gitignore | 3 +- dist/index.d.mts | 1354 ++++ dist/index.d.ts | 1354 ++++ dist/index.js | 18037 ++++++++++++++++++++++++++++++++++++++++++++ dist/index.mjs | 18045 +++++++++++++++++++++++++++++++++++++++++++++ package.json | 16 +- tsup.config.js | 11 + yarn.lock | 2487 +++---- 8 files changed, 39704 insertions(+), 1603 deletions(-) create mode 100644 dist/index.d.mts create mode 100644 dist/index.d.ts create mode 100644 dist/index.js create mode 100644 dist/index.mjs create mode 100644 tsup.config.js diff --git a/.gitignore b/.gitignore index c479dda..41b5ac4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,3 @@ -dist node_modules documentation -.env \ No newline at end of file +.env diff --git a/dist/index.d.mts b/dist/index.d.mts new file mode 100644 index 0000000..e2f52a1 --- /dev/null +++ b/dist/index.d.mts @@ -0,0 +1,1354 @@ +import { S3Client, CreateBucketCommand, ListBucketsCommand, DeleteBucketCommand, PutBucketAclCommand, GetBucketAclCommand, HeadObjectCommand, GetObjectCommand, ListObjectsV2Command, DeleteObjectCommand, CopyObjectCommand } from '@aws-sdk/client-s3'; +import axios from 'axios'; +import { Upload } from '@aws-sdk/lib-storage'; +import { CarWriter } from '@ipld/car'; +import { car } from '@helia/car'; +import { unixfs } from '@helia/unixfs'; +import { FsBlockstore } from 'blockstore-fs'; +import { createWriteStream, createReadStream } from 'node:fs'; +import { mkdir, rm } from 'node:fs/promises'; +import os from 'node:os'; +import path from 'node:path'; +import { Readable } from 'node:stream'; +import { v4 } from 'uuid'; + +/** Provides methods for managing buckets in an S3 endpoint. */ +class BucketManager { + #DEFAULT_ENDPOINT = "https://s3.filebase.com"; + #DEFAULT_REGION = "us-east-1"; + + #client; + + /** + * @summary Creates a new instance of the constructor. + * @param {string} clientKey - The access key ID for authentication. + * @param {string} clientSecret - The secret access key for authentication. + * @tutorial quickstart-bucket + * @example + * import { BucketManager } from "@filebase/sdk"; + * const bucketManager = new BucketManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD"); + */ + constructor(clientKey, clientSecret) { + const clientEndpoint = + process.env.NODE_ENV === "test" + ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT + : this.#DEFAULT_ENDPOINT, + clientConfiguration = { + credentials: { + accessKeyId: clientKey, + secretAccessKey: clientSecret, + }, + endpoint: clientEndpoint, + region: this.#DEFAULT_REGION, + forcePathStyle: true, + }; + this.#client = new S3Client(clientConfiguration); + } + + /** + * @typedef {Object} bucket + * @property {string} Name The name of the bucket + * @property {date} Date the bucket was created + */ + + /** + * @summary Creates a new bucket with the specified name. + * @param {string} name - The name of the bucket to create. + * @returns {Promise} - A promise that resolves when the bucket is created. + * @example + * // Create bucket with name of `create-bucket-example` + * await bucketManager.create(`create-bucket-example`); + */ + async create(name) { + const command = new CreateBucketCommand({ + Bucket: name, + }); + + return await this.#client.send(command); + } + + /** + * @summary Lists the buckets in the client. + * @returns {Promise>} - A promise that resolves with an array of objects representing the buckets in the client. + * @example + * // List all buckets + * await bucketManager.list(); + */ + async list() { + const command = new ListBucketsCommand({}), + { Buckets } = await this.#client.send(command); + + return Buckets; + } + + /** + * @summary Deletes the specified bucket. + * @param {string} name - The name of the bucket to delete. + * @returns {Promise} - A promise that resolves when the bucket is deleted. + * @example + * // Delete bucket with name of `bucket-name-to-delete` + * await bucketManager.delete(`bucket-name-to-delete`); + */ + async delete(name) { + const command = new DeleteBucketCommand({ + Bucket: name, + }); + + await this.#client.send(command); + return true; + } + + /** + * @summary Sets the privacy of a given bucket. + * @param {string} name - The name of the bucket to toggle. + * @param {boolean} targetState - The new target state. [true=private,false=public] + * @returns {Promise} A promise that resolves to true if the bucket was successfully toggled. + * @example + * // Toggle bucket with label of `toggle-bucket-example` + * await bucketManager.setPrivacy(`toggle-bucket-example`, true); // Enabled + * await bucketManager.setPrivacy(`toggle-bucket-example`, false); // Disabled + */ + + async setPrivacy(name, targetState) { + const command = new PutBucketAclCommand({ + Bucket: name, + ACL: targetState ? "private" : "public-read", + }); + + await this.#client.send(command); + return true; + } + + /** + * @summary Gets the privacy of a given bucket + * @param {string} name - The name of the bucket to query. + * @returns {Promise} A promise that resolves to true if the bucket is private. + */ + async getPrivacy(name) { + const command = new GetBucketAclCommand({ + Bucket: name, + }); + + const response = await this.#client.send(command), + readPermission = response.Grants.find((grant) => { + return grant.Grantee.Type === "Group" && grant.Permission === "READ"; + }); + return !(typeof readPermission !== "undefined"); + } +} + +const GATEWAY_DEFAULT_TIMEOUT = 60000; + +async function downloadFromGateway(cid, options) { + if (typeof options.endpoint !== "string") { + throw new Error(`Default Gateway must be set`); + } + + const downloadHeaders = {}; + if (options.token) { + downloadHeaders["x-filebase-gateway-token"] = options.token; + } + + const downloadResponse = await axios.request({ + method: "GET", + baseURL: options.endpoint, + url: `/ipfs/${cid}`, + headers: downloadHeaders, + type: "stream", + timeout: options?.timeout || GATEWAY_DEFAULT_TIMEOUT, + }); + return downloadResponse.data; +} + +function apiErrorHandler(err) { + if ( + err?.response && + err?.response?.status && + (err.response.status.toString()[0] === "4" || + err.response.status.toString()[0] === "5") + ) { + throw new Error( + err.response.data.error?.details || + err.response.data.error?.reason || + err, + ); + } + throw err; +} + +class GatewayManager { + #DEFAULT_ENDPOINT = "https://api.filebase.io"; + #DEFAULT_TIMEOUT = 60000; + + #client; + + /** + * @summary Creates a new instance of the constructor. + * @param {string} clientKey - The access key ID for authentication. + * @param {string} clientSecret - The secret access key for authentication. + * @tutorial quickstart-gateway + * @example + * import { GatewayManager } from "@filebase/sdk"; + * const gatewayManager = new GatewayManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD"); + */ + constructor(clientKey, clientSecret) { + const clientEndpoint = + process.env.NODE_ENV === "test" + ? process.env.TEST_GW_ENDPOINT || this.#DEFAULT_ENDPOINT + : this.#DEFAULT_ENDPOINT, + encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString( + "base64", + ), + baseURL = `${clientEndpoint}/v1/gateways`; + this.#client = axios.create({ + baseURL: baseURL, + timeout: this.#DEFAULT_TIMEOUT, + headers: { Authorization: `Bearer ${encodedToken}` }, + }); + } + + /** + * @typedef {Object} gateway + * @property {string} name Name for the gateway + * @property {string} domain Custom Domain for the gateway + * @property {boolean} enabled Whether the gateway is enabled or not + * @property {string} private Whether the gateway is scoped to users content + * @property {date} created_at Date the gateway was created + * @property {date} updated_at Date the gateway was last updated + */ + + /** + * @typedef {Object} gatewayOptions + * @property {boolean} [domain] Optional Domain to allow for using a Custom Domain + * @property {string} [enabled] Optional Toggle to use for enabling the gateway + * @property {boolean} [private] Optional Boolean determining if gateway is Public or Private + */ + + /** + * @summary Creates a gateway with the given name and options + * @param {string} name Unique name across entire platform for the gateway. Must be a valid subdomain name. + * @param {gatewayOptions} [options] + * @returns {Promise} - A promise that resolves to the value of a gateway. + * @example + * // Create gateway with name of `create-gateway-example` and a custom domain of `cname.mycustomdomain.com`. + * // The custom domain must already exist and have a CNAME record pointed at `create-gateway-example.myfilebase.com`. + * await gatewayManager.create(`create-gateway-example`, { + * domain: `cname.mycustomdomain.com` + * }); + */ + async create(name, options = {}) { + try { + let createOptions = { + name, + }; + if (typeof options.domain === "string") { + createOptions.domain = options.domain; + } + if (typeof options.enabled === "boolean") { + createOptions.enabled = options.enabled; + } + if (typeof options.private === "boolean") { + createOptions.private = options.private; + } + const createResponse = await this.#client.request({ + method: "POST", + data: createOptions, + }); + return createResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Deletes a gateway with the given name. + * @param {string} name - The name of the gateway to delete. + * @returns {Promise} - A promise that resolves to true if the gateway was successfully deleted. + * @example + * // Delete gateway with name of `delete-gateway-example` + * await gatewayManager.delete(`delete-name-example`); + */ + async delete(name) { + try { + await this.#client.request({ + method: "DELETE", + url: `/${name}`, + validateStatus: (status) => { + return status === 204; + }, + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Returns the value of a gateway + * @param {string} name - Parameter representing the name to get. + * @returns {Promise} - A promise that resolves to the value of a gateway. + * @example + * // Get gateway with name of `gateway-get-example` + * await gatewayManager.get(`gateway-get-example`); + */ + async get(name) { + try { + const getResponse = await this.#client.request({ + method: "GET", + url: `/${name}`, + validateStatus: (status) => { + return status === 200 || status === 404; + }, + }); + return getResponse.status === 200 ? getResponse.data : false; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Returns a list of gateways + * @returns {Promise>} - A promise that resolves to an array of gateways. + * @example + * // List all gateways + * await gatewayManager.list(); + */ + async list() { + try { + const getResponse = await this.#client.request({ + method: "GET", + }); + return getResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Updates the specified gateway. + * @param {string} name - The name of the gateway to update. + * @param {gatewayOptions} options - The options for the update operation. + * + * @returns {Promise} - A Promise that resolves to true if the gateway was updated. + * @example + * // Update gateway with name of `update-gateway-example` and set the gateway to only serve CIDs pinned by user. + * await gatewayManager.update(`update-gateway-example`, { + * private: true + * }); + */ + async update(name, options) { + try { + const updateOptions = { + name, + }; + if (options?.domain) { + updateOptions.domain = String(options.private); + } + if (options?.enabled) { + updateOptions.enabled = Boolean(options.enabled); + } + if (options?.private) { + updateOptions.private = Boolean(options.private); + } + await this.#client.request({ + method: "PUT", + url: `/${name}`, + data: updateOptions, + validateStatus: (status) => { + return status === 200; + }, + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Toggles the enabled state of a given gateway. + * @param {string} name - The name of the gateway to toggle. + * @param {boolean} targetState - The new target state. + * @returns {Promise} A promise that resolves to true if the gateway was successfully toggled. + * @example + * // Toggle gateway with label of `toggle-gateway-example` + * await gatewayManager.toggle(`toggle-gateway-example`, true); // Enabled + * await gatewayManager.toggle(`toggle-gateway-example`, false); // Disabled + */ + async toggle(name, targetState) { + try { + await this.#client.request({ + method: "PUT", + url: `/${name}`, + data: { + enabled: Boolean(targetState), + }, + validateStatus: (status) => { + return status === 200; + }, + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } +} + +/** Provides methods for managing names in an REST endpoint. */ +class NameManager { + #DEFAULT_ENDPOINT = "https://api.filebase.io"; + #DEFAULT_TIMEOUT = 60000; + + #client; + + /** + * @summary Creates a new instance of the constructor. + * @param {string} clientKey - The access key ID for authentication. + * @param {string} clientSecret - The secret access key for authentication. + * @tutorial quickstart-name + * @example + * import { NameManager } from "@filebase/sdk"; + * const nameManager = new NameManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD"); + */ + constructor(clientKey, clientSecret) { + const clientEndpoint = + process.env.NODE_ENV === "test" + ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT + : this.#DEFAULT_ENDPOINT, + encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString( + "base64", + ), + baseURL = `${clientEndpoint}/v1/names`; + this.#client = axios.create({ + baseURL: baseURL, + timeout: this.#DEFAULT_TIMEOUT, + headers: { Authorization: `Bearer ${encodedToken}` }, + }); + } + + /** + * @typedef {Object} name + * @property {string} label Descriptive label for the Key + * @property {string} network_key IPNS Key CID + * @property {string} cid Value that name Publishes + * @property {number} sequence Version Number for the name + * @property {boolean} enabled Whether the name is being Published or not + * @property {date} published_at Date the name was last published to the DHT + * @property {date} created_at Date the name was created + * @property {date} updated_at Date the name was last updated + */ + + /** + * @typedef {Object} nameOptions + * @property {boolean} [enabled] Whether the name is enabled or not. + */ + + /** + * @summary Creates a new IPNS name with the given name as the label and CID. + * @param {string} label - The label of the new IPNS name. + * @param {string} cid - The CID of the IPNS name. + * @param {nameOptions} [options] - Additional options for the IPNS name. + * @returns {Promise} - A Promise that resolves with the response JSON. + * @example + * // Create IPNS name with label of `create-name-example` and CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm` + * await nameManager.create(`create-name-example`, `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`); + */ + async create( + label, + cid, + options = { + enabled: true, + }, + ) { + try { + const createResponse = await this.#client.request({ + method: "POST", + data: { + label, + cid, + enabled: options?.enabled !== false, + }, + }); + return createResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Imports a user's IPNS private key. + * @param {string} label - The label for the IPNS name. + * @param {string} cid - The CID (Content Identifier) of the data. + * @param {string} privateKey - The existing private key encoded in Base64. + * @param {nameOptions} [options] - Additional options for the IPNS name. + * @returns {Promise} - A Promise that resolves to the server response. + * @example + * // Import IPNS private key with label of `create-name-example`, CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm` + * // and a private key encoded with base64 + * await nameManager.import( + * `create-name-example`, + * `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm` + * `BASE64_ENCODED_PRIVATEKEY` + * ); + */ + async import( + label, + cid, + privateKey, + options = { + enabled: true, + }, + ) { + try { + const importResponse = await this.#client.request({ + method: "POST", + data: { + label, + cid, + network_private_key: privateKey, + enabled: options?.enabled !== false, + }, + }); + return importResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Updates the specified name with the given CID. + * @param {string} label - The label of the name to update. + * @param {string} cid - The cid to associate with the name. + * @param {nameOptions} options - The options for the set operation. + * + * @returns {Promise} - A Promise that resolves to true if the IPNS name was updated. + * @example + * // Update name with label of `update-name-example` and set the value of the IPNS name. + * await nameManager.update(`update-name-example`, `bafybeidt4nmaci476lyon2mvgfmwyzysdazienhxs2bqnfpdainzjuwjom`); + */ + async update(label, cid, options = {}) { + try { + const updateOptions = { + cid, + }; + if (options?.enabled) { + updateOptions.enabled = Boolean(options.enabled); + } + await this.#client.request({ + method: "PUT", + url: `/${label}`, + data: updateOptions, + validateStatus: (status) => { + return status === 200; + }, + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Returns the value of an IPNS name + * @param {string} label - Parameter representing the label of the name to resolve. + * @returns {Promise} - A promise that resolves to the value of a name. + * @example + * // Get IPNS name with label of `list-name-example` + * await nameManager.get(`list-name-example`); + */ + async get(label) { + try { + const getResponse = await this.#client.request({ + method: "GET", + url: `/${label}`, + validateStatus: (status) => { + return status === 200 || status === 404; + }, + }); + return getResponse.status === 200 ? getResponse.data : false; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Returns a list of IPNS names + * @returns {Promise>} - A promise that resolves to an array of names. + * @example + * // List all IPNS names + * await nameManager.list(); + */ + async list() { + try { + const listResponse = await this.#client.request({ + method: "GET", + }); + return listResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Deletes an IPNS name with the given label. + * @param {string} label - The label of the IPNS name to delete. + * @returns {Promise} - A promise that resolves to true if the IPNS name was successfully deleted. + * @example + * // List IPNS name with label of `delete-name-example` + * await nameManager.delete(`delete-name-example`); + */ + async delete(label) { + try { + await this.#client.request({ + method: "DELETE", + url: `/${label}`, + validateStatus: (status) => { + return status === 204; + }, + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Toggles the enabled state of a given IPNS name. + * @param {string} label - The label of the IPNS name to toggle. + * @param {boolean} targetState - The new target state. + * @returns {Promise} A promise that resolves to true if the IPNS name was successfully toggled. + * @example + * // Toggle IPNS name with label of `toggle-name-example` + * await nameManager.toggle(`toggle-name-example`, true); // Enabled + * await nameManager.toggle(`toggle-name-example`, false); // Disabled + */ + async toggle(label, targetState) { + try { + await this.#client.request({ + method: "PUT", + url: `/${label}`, + data: { + enabled: targetState, + }, + validateStatus: (status) => { + return status === 200; + }, + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } +} + +// S3 Imports + +/** Interacts with an S3 client to perform various operations on objects in a bucket. */ +class ObjectManager { + #DEFAULT_ENDPOINT = "https://s3.filebase.com"; + #DEFAULT_REGION = "us-east-1"; + #DEFAULT_MAX_CONCURRENT_UPLOADS = 4; + + #client; + #credentials; + #defaultBucket; + #gatewayConfiguration; + #maxConcurrentUploads; + + /** + * @typedef {Object} objectManagerOptions Optional settings for the constructor. + * @property {string} [bucket] Default bucket to use. + * @property {objectDownloadOptions} [gateway] Default gateway to use. + * @property {number} [maxConcurrentUploads] The maximum number of concurrent uploads. + */ + + /** + * @typedef {Object} objectDownloadOptions Optional settings for downloading objects + * @property {string} endpoint Default gateway to use. + * @property {string} [token] Token for the default gateway. + * @property {number} [timeout=60000] Timeout for the default gateway + */ + + /** + * @summary Creates a new instance of the constructor. + * @param {string} clientKey - The access key ID for authentication. + * @param {string} clientSecret - The secret access key for authentication. + * @param {objectManagerOptions} options - Optional settings for the constructor. + * @tutorial quickstart-object + * @example + * import { ObjectManager } from "@filebase/sdk"; + * const objectManager = new ObjectManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", { + * bucket: "my-default-bucket", + * maxConcurrentUploads: 4, + * gateway: { + * endpoint: "https://my-default-gateway.mydomain.com + * token: SUPER_SECRET_GATEWAY_TOKEN + * } + * }); + */ + constructor(clientKey, clientSecret, options) { + const clientEndpoint = + process.env.NODE_ENV === "test" + ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT + : this.#DEFAULT_ENDPOINT, + clientConfiguration = { + credentials: { + accessKeyId: clientKey, + secretAccessKey: clientSecret, + }, + endpoint: clientEndpoint, + region: this.#DEFAULT_REGION, + forcePathStyle: true, + }; + this.#defaultBucket = options?.bucket; + this.#maxConcurrentUploads = + options?.maxConcurrentUploads || this.#DEFAULT_MAX_CONCURRENT_UPLOADS; + this.#credentials = { + key: clientKey, + secret: clientSecret, + }; + this.#client = new S3Client(clientConfiguration); + + this.#gatewayConfiguration = { + endpoint: options?.gateway?.endpoint, + token: options?.gateway?.token, + timeout: options?.gateway?.timeout, + }; + } + + /** + * @typedef {Object} objectOptions + * @property {string} [bucket] - The bucket to pin the IPFS CID into. + */ + + /** + * @typedef {Object} objectHeadResult + * @property {string} cid The CID of the uploaded object + * @property {function} download Convenience function to download the object via S3 or the selected gateway + * @property {array} [entries] If a directory then returns an array of the containing objects + * @property {string} entries.cid The CID of the uploaded object + * @property {string} entries.path The path of the object + */ + + /** + * If the source parameter is an array of objects, it will pack multiple files into a CAR file for upload. + * The method returns a Promise that resolves to an object containing the CID (Content Identifier) of the uploaded file + * and an optional entries object when uploading a CAR file. + * + * @summary Uploads a file or a CAR file to the specified bucket. + * @param {string} key - The key or path of the file in the bucket. + * @param {Buffer|ReadableStream|Array} source - The content of the object to be uploaded. + * If an array of files is provided, each file should have a 'path' property specifying the path of the file + * and a 'content' property specifying the content of the file. The SDK will then construct a CAR file locally + * and use that as the content of the object to be uploaded. + * @param {Object} [metadata] Optional metadata for pin object + * @param {objectOptions} [options] - The options for uploading the object. + * @returns {Promise} + * @example + * // Upload Object + * await objectManager.upload("my-object", Buffer.from("Hello World!")); + * // Upload Object with Metadata + * await objectManager.upload("my-custom-object", Buffer.from("Hello Big World!"), { + * "application": "my-filebase-app" + * }); + * // Upload Directory + * await objectManager.upload("my-first-directory", [ + * { + * path: "/testObjects/1.txt", + * content: Buffer.from("upload test object", "utf-8"), + * }, + * { + * path: "/testObjects/deep/1.txt", + * content: Buffer.from("upload deep test object", "utf-8"), + * }, + * { + * path: "/topLevel.txt", + * content: Buffer.from("upload top level test object", "utf-8"), + * }, + * ]); + */ + async upload(key, source, metadata, options) { + // Generate Upload UUID + const uploadUUID = v4(); + + // Setup Upload Options + const bucket = options?.bucket || this.#defaultBucket, + uploadOptions = { + client: this.#client, + params: { + Bucket: bucket, + Key: key, + Body: source, + Metadata: metadata || {}, + }, + queueSize: this.#maxConcurrentUploads, + partSize: 26843546, //25.6Mb || 250Gb Max File Size + }; + + // Pack Multiple Files into CAR file for upload + let parsedEntries = {}; + if (Array.isArray(source)) { + // Mark Upload as a CAR file import + uploadOptions.params.Metadata = { + ...uploadOptions.params.Metadata, + import: "car", + }; + + let temporaryCarFilePath, temporaryBlockstoreDir; + try { + // Setup Blockstore + temporaryBlockstoreDir = path.resolve( + os.tmpdir(), + "filebase-sdk", + "uploads", + uploadUUID, + ); + temporaryCarFilePath = `${temporaryBlockstoreDir}/main.car`; + await mkdir(temporaryBlockstoreDir, { recursive: true }); + const temporaryBlockstore = new FsBlockstore(temporaryBlockstoreDir); + + const heliaFs = unixfs({ + blockstore: temporaryBlockstore, + }); + + for (let sourceEntry of source) { + sourceEntry.path = + sourceEntry.path[0] === "/" + ? `/${uploadUUID}${sourceEntry.path}` + : `/${uploadUUID}/${sourceEntry.path}`; + } + for await (const entry of heliaFs.addAll(source)) { + parsedEntries[entry.path] = entry; + } + const rootEntry = parsedEntries[uploadUUID]; + + // Get carFile stream here + const carExporter = car({ blockstore: temporaryBlockstore }), + { writer, out } = CarWriter.create([rootEntry.cid]); + + // Put carFile stream to disk + const output = createWriteStream(temporaryCarFilePath); + Readable.from(out).pipe(output); + await carExporter.export(rootEntry.cid, writer); + + // Set Uploader to Read from carFile on disk + uploadOptions.params.Body = createReadStream(temporaryCarFilePath); + + // Upload carFile via S3 + const parallelUploads3 = new Upload(uploadOptions); + await parallelUploads3.done(); + await temporaryBlockstore.close(); + } finally { + if (typeof temporaryBlockstoreDir !== "undefined") { + // Delete Temporary Blockstore + await rm(temporaryBlockstoreDir, { recursive: true, force: true }); + } + } + } else { + // Upload file via S3 + const parallelUploads3 = new Upload(uploadOptions); + await parallelUploads3.done(); + } + + // Get CID from Platform + const command = new HeadObjectCommand({ + Bucket: bucket, + Key: key, + Body: source, + }), + headResult = await this.#client.send(command), + responseCid = headResult.Metadata.cid; + + if (Object.keys(parsedEntries).length === 0) { + return { + cid: responseCid, + download: () => { + return this.#routeDownload(responseCid, key, options); + }, + }; + } + return { + cid: responseCid, + download: () => { + return this.#routeDownload(responseCid, key, options); + }, + entries: parsedEntries, + }; + } + + async #routeDownload(cid, key, options) { + return typeof this.#gatewayConfiguration.endpoint !== "undefined" + ? downloadFromGateway(cid, this.#gatewayConfiguration) + : this.download(key, options); + } + + /** + * @summary Gets an objects info and metadata using the S3 API. + * @param {string} key - The key of the object to be inspected. + * @param {objectOptions} [options] - The options for inspecting the object. + * @returns {Promise} + */ + async get(key, options) { + const bucket = options?.bucket || this.#defaultBucket; + try { + const command = new HeadObjectCommand({ + Bucket: bucket, + Key: key, + }), + response = await this.#client.send(command); + + response.download = () => { + return this.#routeDownload(response.Metadata.cid, key, options); + }; + + return response; + } catch (err) { + if (err.name === "NotFound") { + return false; + } + throw err; + } + } + + /** + * @summary Downloads an object from the specified bucket using the provided key. + * @param {string} key - The key of the object to be downloaded. + * @param {objectOptions} [options] - The options for downloading the object.. + * @returns {Promise} - A promise that resolves with the contents of the downloaded object as a Stream. + * @example + * // Download object with name of `download-object-example` + * await objectManager.download(`download-object-example`); + */ + async download(key, options) { + // Download via IPFS Gateway if Setup or S3 by Default + if (typeof this.#gatewayConfiguration.endpoint === "string") { + const objectToFetch = await this.get(key, options); + return objectToFetch.download(); + } else { + const command = new GetObjectCommand({ + Bucket: options?.bucket || this.#defaultBucket, + Key: key, + }), + response = await this.#client.send(command); + + return response.Body; + } + } + + /** + * @typedef {Object} listObjectsResult + * @property {boolean} IsTruncated Indicates if more results exist on the server + * @property {string} NextContinuationToken ContinuationToken used to paginate list requests + * @property {Array} Contents List of Keys stored in the S3 Bucket + * @property {string} Contents.Key Key of the Object + * @property {string} Contents.LastModified Date Last Modified of the Object + * @property {string} Contents.CID CID of the Object + * @property {string} Contents.ETag ETag of the Object + * @property {number} Contents.Size Size in Bytes of the Object + * @property {string} Contents.StorageClass Class of Storage of the Object + * @property {function} Contents.download Convenience function to download the item using the S3 gateway + */ + + /** + * @typedef {Object} listObjectOptions + * @property {string} [Bucket] The name of the bucket. If not provided, the default bucket will be used. + * @property {string} [ContinuationToken=null] Continues listing from this objects name. + * @property {string} [Delimiter=null] Character used to group keys + * @property {number} [MaxKeys=1000] The maximum number of objects to retrieve. Defaults to 1000. + */ + + /** + * Retrieves a list of objects from a specified bucket. + * + * @param {listObjectOptions} options - The options for listing objects. + * @returns {Promise} - A promise that resolves to an array of objects. + * @example + * // List objects in bucket with a limit of 1000 + * await objectManager.list({ + * MaxKeys: 1000 + * }); + */ + async list( + options = { + Bucket: this.#defaultBucket, + ContinuationToken: null, + Delimiter: null, + MaxKeys: 1000, + }, + ) { + if (options?.MaxKeys && options.MaxKeys > 100000) { + throw new Error(`MaxKeys Maximum value is 100000`); + } + const bucket = options?.Bucket || this.#defaultBucket, + limit = options?.MaxKeys || 1000, + commandOptions = { + Bucket: bucket, + MaxKeys: limit, + }, + command = new ListObjectsV2Command({ + ...options, + ...commandOptions, + }); + + const { Contents, IsTruncated, NextContinuationToken } = + await this.#client.send(command); + return { Contents, IsTruncated, NextContinuationToken }; + } + + /** + * @summary Deletes an object from the specified bucket using the provided key. + * @param {string} key - The key of the object to be deleted. + * @param {objectOptions} [options] - The options for deleting the file. + * @returns {Promise} - A Promise that resolves with the result of the delete operation. + * @example + * // Delete object with name of `delete-object-example` + * await objectManager.delete(`delete-object-example`); + */ + async delete(key, options) { + const command = new DeleteObjectCommand({ + Bucket: options?.bucket || this.#defaultBucket, + Key: key, + }); + + await this.#client.send(command); + return true; + } + + /** + * @typedef {Object} copyObjectOptions + * @property {string} [sourceBucket] The source bucket from where the object is to be copied. + * @property {string} [destinationKey] The key of the object in the destination bucket. By default, it is the same as the sourceKey. + */ + + /** + * If the destinationKey is not provided, the object will be copied with the same key as the sourceKey. + * + * @summary Copy the object from sourceKey in the sourceBucket to destinationKey in the destinationBucket. + * @param {string} sourceKey - The key of the object to be copied from the sourceBucket. + * @param {string} destinationBucket - The bucket where the object will be copied to. + * @param {copyObjectOptions} [options] - Additional options for the copy operation. + * + * @returns {Promise} - A Promise that resolves with the result of the copy operation. + * @example + * // Copy object `copy-object-test` from `copy-object-test-pass-src` to `copy-object-test-pass-dest` + * // TIP: Set bucket on constructor and it will be used as the default source for copying objects. + * await objectManager.copy(`copy-object-test`, `copy-object-dest`, { + * sourceBucket: `copy-object-src` + * }); + */ + async copy( + sourceKey, + destinationBucket, + options = { + sourceBucket: this.#defaultBucket, + destinationKey: undefined, + }, + ) { + const copySource = `${ + options?.sourceBucket || this.#defaultBucket + }/${sourceKey}`, + command = new CopyObjectCommand({ + CopySource: copySource, + Bucket: destinationBucket, + Key: options?.destinationKey || sourceKey, + }); + + await this.#client.send(command); + return true; + } +} + +/** Provides methods for managing pins in an REST endpoint. */ +class PinManager { + #DEFAULT_ENDPOINT = "https://api.filebase.io"; + #DEFAULT_TIMEOUT = 60000; + + #client; + #credentials; + #gatewayConfiguration; + #defaultBucket; + + /** + * @typedef {Object} pinManagerOptions Optional settings for the constructor. + * @property {string} [bucket] Default bucket to use. + * @property {pinDownloadOptions} [gateway] Default gateway to use. + */ + + /** + * @typedef {Object} pinDownloadOptions Optional settings for downloading pins + * @property {string} endpoint Default gateway to use. + * @property {string} [token] Token for the default gateway. + * @property {number} [timeout=60000] Timeout for the default gateway + */ + + /** + * @summary Creates a new instance of the constructor. + * @param {string} clientKey - The access key ID for authentication. + * @param {string} clientSecret - The secret access key for authentication. + * @param {pinManagerOptions} [options] - Optional settings for the constructor. + * @tutorial quickstart-pin + * @example + * import { PinManager } from "@filebase/sdk"; + * const pinManager = new PinManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", { + * bucket: "my-default-bucket", + * gateway: { + * endpoint: "https://my-default-gateway.mydomain.com + * token: SUPER_SECRET_GATEWAY_TOKEN + * } + * }); + */ + constructor(clientKey, clientSecret, options) { + this.#defaultBucket = options?.bucket; + const PSAClientEndpoint = + process.env.NODE_ENV === "test" + ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT + : this.#DEFAULT_ENDPOINT, + baseURL = `${PSAClientEndpoint}/v1/ipfs/pins`; + this.#credentials = { + key: clientKey, + secret: clientSecret, + }; + this.#client = axios.create({ + baseURL: baseURL, + timeout: this.#DEFAULT_TIMEOUT, + }); + + this.#gatewayConfiguration = { + endpoint: options?.gateway?.endpoint, + token: options?.gateway?.token, + timeout: options?.gateway?.timeout || this.#DEFAULT_TIMEOUT, + }; + } + + /** + * @typedef {Object} pinStatus + * @property {string} requestid Globally unique identifier of the pin request; can be used to check the status of ongoing pinning, or pin removal + * @property {string} status Status a pin object can have at a pinning service. ("queued","pinning","pinned","failed") + * @property {string} created Immutable timestamp indicating when a pin request entered a pinning service; can be used for filtering results and pagination + * @property {Object} pin Pin object + * @property {string} pin.cid Content Identifier (CID) pinned recursively + * @property {string} pin.name Name for pinned data; can be used for lookups later + * @property {Array} pin.origins Optional list of multiaddrs known to provide the data + * @property {Object} pin.meta Optional metadata for pin object + * @property {Array} delegates List of multiaddrs designated by pinning service that will receive the pin data + * @property {object} [info] Optional info for PinStatus response + * @property {function} download Convenience function to download pin + */ + + /** + * @typedef {Object} pinOptions + * @property {string} [bucket] - The bucket to pin the IPFS CID into. + */ + + /** + * @typedef {Object} listPinOptions + * @property {Array} [cid] Return pin objects responsible for pinning the specified CID(s); be aware that using longer hash functions introduces further constraints on the number of CIDs that will fit under the limit of 2000 characters per URL in browser contexts + * @property {string} [name] Return pin objects with specified name (by default a case-sensitive, exact match) + * @property {string} [match] Customize the text matching strategy applied when the name filter is present; exact (the default) is a case-sensitive exact match, partial matches anywhere in the name, iexact and ipartial are case-insensitive versions of the exact and partial strategies + * @property {Array} [status] Return pin objects for pins with the specified status (when missing, service defaults to pinned only) + * @property {string} [before] Return results created (queued) before provided timestamp + * @property {string} [after] Return results created (queued) after provided timestamp + * @property {number} [limit] Max records to return + * @property {Object} [meta] Return pin objects that match specified metadata keys passed as a string representation of a JSON object; when implementing a client library, make sure the parameter is URL-encoded to ensure safe transport + */ + + /** + * @typedef {Object} listPinResults + * @property {number} count Total number of pin objects that exist for passed query filters + * @property {Array} Array of PinStatus results + */ + + /** + * @summary List the pins in a given bucket + * @param {listPinOptions} [listOptions] + * @param {pinOptions} [options] + * @returns {Promise} + * @example + * // List pins in bucket with a limit of 1000 + * await pinManager.list({ + * limit: 1000 + * }); + */ + async list(listOptions, options) { + try { + const encodedToken = this.#getEncodedToken(options?.bucket), + getResponse = await this.#client.request({ + method: "GET", + params: listOptions, + headers: { Authorization: `Bearer ${encodedToken}` }, + }); + for (let pinStatus of getResponse.data.results) { + pinStatus.download = () => { + return this.download(pinStatus.pin.cid); + }; + } + return getResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Create a pin in the selected bucket + * @param {string} key Key or path of the file in the bucket + * @param {string} cid Content Identifier (CID) to be pinned recursively + * @param {Object} [metadata] Optional metadata for pin object + * @param {pinOptions} [options] Options for pinning the object + * @returns {Promise} + * @example + * // Create Pin with Metadata + * await pinManager.create("my-pin", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", { + * "application": "my-custom-app-on-filebase" + * }); + */ + async create(key, cid, metadata, options) { + try { + const encodedToken = this.#getEncodedToken(options?.bucket), + pinStatus = await this.#client.request({ + method: "POST", + data: { + cid, + name: key, + meta: metadata, + }, + headers: { Authorization: `Bearer ${encodedToken}` }, + }); + pinStatus.data.download = () => { + return this.download(pinStatus.data.pin.cid); + }; + return pinStatus.data; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @typedef {Object} replacePinOptions + * @augments pinOptions + * @property {Object} [metadata] Optional metadata to set on pin during replacement + * @property {string} [name] Optional name for pin to set during replacement + */ + + /** + * @summary Replace a pinned object in the selected bucket + * @param {string} requestid Unique ID for the pinned object + * @param {string} cid Content Identifier (CID) to be pinned recursively + * @param {replacePinOptions} [options] Options for pinning the object + * @returns {Promise} + * @example + * // Replace Pin with Metadata + * await pinManager.create("qr4231213", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", { + * "revision": Date.now() + * } + */ + async replace(requestid, cid, options) { + try { + let replaceData = { + cid, + meta: options?.metadata || {}, + }; + if (options?.name) { + replaceData.name = options.name; + } + + const encodedToken = this.#getEncodedToken(options?.bucket), + pinStatusResult = await this.#client.request({ + method: "POST", + url: `/${requestid}`, + data: replaceData, + validateStatus: (status) => { + return status === 200; + }, + headers: { Authorization: `Bearer ${encodedToken}` }, + }); + const pinStatus = pinStatusResult.data; + pinStatus.download = () => { + return this.download(pinStatus.pin.cid); + }; + return pinStatus; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Download a pin from the selected IPFS gateway + * @param {string} cid + * @param {pinDownloadOptions} [options] + * @returns {Promise} + * @example + * // Download Pin by CID + * await pinManager.download("QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF"); + */ + async download(cid, options) { + const downloadOptions = Object.assign(this.#gatewayConfiguration, options); + return downloadFromGateway(cid, downloadOptions); + } + + /** + * @summary Get details about a pinned object + * @param {string} requestid Globally unique identifier of the pin request + * @param {pinOptions} [options] Options for getting the pin + * @returns {Promise} + * @example + * // Get Pin Info by RequestId + * await pinManager.get("qr4231214"); + */ + async get(requestid, options) { + try { + const encodedToken = this.#getEncodedToken(options?.bucket), + getResponseResult = await this.#client.request({ + method: "GET", + url: `/${requestid}`, + headers: { Authorization: `Bearer ${encodedToken}` }, + validateStatus: (status) => { + return status === 200 || status === 404; + }, + }); + if (getResponseResult.status === 404) { + return false; + } + const pinStatus = getResponseResult.data; + pinStatus.download = () => { + return this.download(pinStatus.pin.cid); + }; + return pinStatus; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Delete a pinned object from the selected bucket + * @param requestid Globally unique identifier of the pin request + * @param {pinOptions} [options] Options for deleting the pin + * @returns {Promise} + * @example + * // Delete Pin by RequestId + * await pinManager.delete("qr4231213"); + */ + async delete(requestid, options) { + try { + const encodedToken = this.#getEncodedToken(options?.bucket); + await this.#client.request({ + method: "DELETE", + url: `/${requestid}`, + headers: { Authorization: `Bearer ${encodedToken}` }, + validateStatus: (status) => { + return status === 202; + }, + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } + + #getEncodedToken(bucket) { + bucket = bucket || this.#defaultBucket; + return Buffer.from( + `${this.#credentials.key}:${this.#credentials.secret}:${bucket}`, + ).toString("base64"); + } +} + +export { BucketManager, GatewayManager, NameManager, ObjectManager, PinManager }; diff --git a/dist/index.d.ts b/dist/index.d.ts new file mode 100644 index 0000000..e2f52a1 --- /dev/null +++ b/dist/index.d.ts @@ -0,0 +1,1354 @@ +import { S3Client, CreateBucketCommand, ListBucketsCommand, DeleteBucketCommand, PutBucketAclCommand, GetBucketAclCommand, HeadObjectCommand, GetObjectCommand, ListObjectsV2Command, DeleteObjectCommand, CopyObjectCommand } from '@aws-sdk/client-s3'; +import axios from 'axios'; +import { Upload } from '@aws-sdk/lib-storage'; +import { CarWriter } from '@ipld/car'; +import { car } from '@helia/car'; +import { unixfs } from '@helia/unixfs'; +import { FsBlockstore } from 'blockstore-fs'; +import { createWriteStream, createReadStream } from 'node:fs'; +import { mkdir, rm } from 'node:fs/promises'; +import os from 'node:os'; +import path from 'node:path'; +import { Readable } from 'node:stream'; +import { v4 } from 'uuid'; + +/** Provides methods for managing buckets in an S3 endpoint. */ +class BucketManager { + #DEFAULT_ENDPOINT = "https://s3.filebase.com"; + #DEFAULT_REGION = "us-east-1"; + + #client; + + /** + * @summary Creates a new instance of the constructor. + * @param {string} clientKey - The access key ID for authentication. + * @param {string} clientSecret - The secret access key for authentication. + * @tutorial quickstart-bucket + * @example + * import { BucketManager } from "@filebase/sdk"; + * const bucketManager = new BucketManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD"); + */ + constructor(clientKey, clientSecret) { + const clientEndpoint = + process.env.NODE_ENV === "test" + ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT + : this.#DEFAULT_ENDPOINT, + clientConfiguration = { + credentials: { + accessKeyId: clientKey, + secretAccessKey: clientSecret, + }, + endpoint: clientEndpoint, + region: this.#DEFAULT_REGION, + forcePathStyle: true, + }; + this.#client = new S3Client(clientConfiguration); + } + + /** + * @typedef {Object} bucket + * @property {string} Name The name of the bucket + * @property {date} Date the bucket was created + */ + + /** + * @summary Creates a new bucket with the specified name. + * @param {string} name - The name of the bucket to create. + * @returns {Promise} - A promise that resolves when the bucket is created. + * @example + * // Create bucket with name of `create-bucket-example` + * await bucketManager.create(`create-bucket-example`); + */ + async create(name) { + const command = new CreateBucketCommand({ + Bucket: name, + }); + + return await this.#client.send(command); + } + + /** + * @summary Lists the buckets in the client. + * @returns {Promise>} - A promise that resolves with an array of objects representing the buckets in the client. + * @example + * // List all buckets + * await bucketManager.list(); + */ + async list() { + const command = new ListBucketsCommand({}), + { Buckets } = await this.#client.send(command); + + return Buckets; + } + + /** + * @summary Deletes the specified bucket. + * @param {string} name - The name of the bucket to delete. + * @returns {Promise} - A promise that resolves when the bucket is deleted. + * @example + * // Delete bucket with name of `bucket-name-to-delete` + * await bucketManager.delete(`bucket-name-to-delete`); + */ + async delete(name) { + const command = new DeleteBucketCommand({ + Bucket: name, + }); + + await this.#client.send(command); + return true; + } + + /** + * @summary Sets the privacy of a given bucket. + * @param {string} name - The name of the bucket to toggle. + * @param {boolean} targetState - The new target state. [true=private,false=public] + * @returns {Promise} A promise that resolves to true if the bucket was successfully toggled. + * @example + * // Toggle bucket with label of `toggle-bucket-example` + * await bucketManager.setPrivacy(`toggle-bucket-example`, true); // Enabled + * await bucketManager.setPrivacy(`toggle-bucket-example`, false); // Disabled + */ + + async setPrivacy(name, targetState) { + const command = new PutBucketAclCommand({ + Bucket: name, + ACL: targetState ? "private" : "public-read", + }); + + await this.#client.send(command); + return true; + } + + /** + * @summary Gets the privacy of a given bucket + * @param {string} name - The name of the bucket to query. + * @returns {Promise} A promise that resolves to true if the bucket is private. + */ + async getPrivacy(name) { + const command = new GetBucketAclCommand({ + Bucket: name, + }); + + const response = await this.#client.send(command), + readPermission = response.Grants.find((grant) => { + return grant.Grantee.Type === "Group" && grant.Permission === "READ"; + }); + return !(typeof readPermission !== "undefined"); + } +} + +const GATEWAY_DEFAULT_TIMEOUT = 60000; + +async function downloadFromGateway(cid, options) { + if (typeof options.endpoint !== "string") { + throw new Error(`Default Gateway must be set`); + } + + const downloadHeaders = {}; + if (options.token) { + downloadHeaders["x-filebase-gateway-token"] = options.token; + } + + const downloadResponse = await axios.request({ + method: "GET", + baseURL: options.endpoint, + url: `/ipfs/${cid}`, + headers: downloadHeaders, + type: "stream", + timeout: options?.timeout || GATEWAY_DEFAULT_TIMEOUT, + }); + return downloadResponse.data; +} + +function apiErrorHandler(err) { + if ( + err?.response && + err?.response?.status && + (err.response.status.toString()[0] === "4" || + err.response.status.toString()[0] === "5") + ) { + throw new Error( + err.response.data.error?.details || + err.response.data.error?.reason || + err, + ); + } + throw err; +} + +class GatewayManager { + #DEFAULT_ENDPOINT = "https://api.filebase.io"; + #DEFAULT_TIMEOUT = 60000; + + #client; + + /** + * @summary Creates a new instance of the constructor. + * @param {string} clientKey - The access key ID for authentication. + * @param {string} clientSecret - The secret access key for authentication. + * @tutorial quickstart-gateway + * @example + * import { GatewayManager } from "@filebase/sdk"; + * const gatewayManager = new GatewayManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD"); + */ + constructor(clientKey, clientSecret) { + const clientEndpoint = + process.env.NODE_ENV === "test" + ? process.env.TEST_GW_ENDPOINT || this.#DEFAULT_ENDPOINT + : this.#DEFAULT_ENDPOINT, + encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString( + "base64", + ), + baseURL = `${clientEndpoint}/v1/gateways`; + this.#client = axios.create({ + baseURL: baseURL, + timeout: this.#DEFAULT_TIMEOUT, + headers: { Authorization: `Bearer ${encodedToken}` }, + }); + } + + /** + * @typedef {Object} gateway + * @property {string} name Name for the gateway + * @property {string} domain Custom Domain for the gateway + * @property {boolean} enabled Whether the gateway is enabled or not + * @property {string} private Whether the gateway is scoped to users content + * @property {date} created_at Date the gateway was created + * @property {date} updated_at Date the gateway was last updated + */ + + /** + * @typedef {Object} gatewayOptions + * @property {boolean} [domain] Optional Domain to allow for using a Custom Domain + * @property {string} [enabled] Optional Toggle to use for enabling the gateway + * @property {boolean} [private] Optional Boolean determining if gateway is Public or Private + */ + + /** + * @summary Creates a gateway with the given name and options + * @param {string} name Unique name across entire platform for the gateway. Must be a valid subdomain name. + * @param {gatewayOptions} [options] + * @returns {Promise} - A promise that resolves to the value of a gateway. + * @example + * // Create gateway with name of `create-gateway-example` and a custom domain of `cname.mycustomdomain.com`. + * // The custom domain must already exist and have a CNAME record pointed at `create-gateway-example.myfilebase.com`. + * await gatewayManager.create(`create-gateway-example`, { + * domain: `cname.mycustomdomain.com` + * }); + */ + async create(name, options = {}) { + try { + let createOptions = { + name, + }; + if (typeof options.domain === "string") { + createOptions.domain = options.domain; + } + if (typeof options.enabled === "boolean") { + createOptions.enabled = options.enabled; + } + if (typeof options.private === "boolean") { + createOptions.private = options.private; + } + const createResponse = await this.#client.request({ + method: "POST", + data: createOptions, + }); + return createResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Deletes a gateway with the given name. + * @param {string} name - The name of the gateway to delete. + * @returns {Promise} - A promise that resolves to true if the gateway was successfully deleted. + * @example + * // Delete gateway with name of `delete-gateway-example` + * await gatewayManager.delete(`delete-name-example`); + */ + async delete(name) { + try { + await this.#client.request({ + method: "DELETE", + url: `/${name}`, + validateStatus: (status) => { + return status === 204; + }, + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Returns the value of a gateway + * @param {string} name - Parameter representing the name to get. + * @returns {Promise} - A promise that resolves to the value of a gateway. + * @example + * // Get gateway with name of `gateway-get-example` + * await gatewayManager.get(`gateway-get-example`); + */ + async get(name) { + try { + const getResponse = await this.#client.request({ + method: "GET", + url: `/${name}`, + validateStatus: (status) => { + return status === 200 || status === 404; + }, + }); + return getResponse.status === 200 ? getResponse.data : false; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Returns a list of gateways + * @returns {Promise>} - A promise that resolves to an array of gateways. + * @example + * // List all gateways + * await gatewayManager.list(); + */ + async list() { + try { + const getResponse = await this.#client.request({ + method: "GET", + }); + return getResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Updates the specified gateway. + * @param {string} name - The name of the gateway to update. + * @param {gatewayOptions} options - The options for the update operation. + * + * @returns {Promise} - A Promise that resolves to true if the gateway was updated. + * @example + * // Update gateway with name of `update-gateway-example` and set the gateway to only serve CIDs pinned by user. + * await gatewayManager.update(`update-gateway-example`, { + * private: true + * }); + */ + async update(name, options) { + try { + const updateOptions = { + name, + }; + if (options?.domain) { + updateOptions.domain = String(options.private); + } + if (options?.enabled) { + updateOptions.enabled = Boolean(options.enabled); + } + if (options?.private) { + updateOptions.private = Boolean(options.private); + } + await this.#client.request({ + method: "PUT", + url: `/${name}`, + data: updateOptions, + validateStatus: (status) => { + return status === 200; + }, + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Toggles the enabled state of a given gateway. + * @param {string} name - The name of the gateway to toggle. + * @param {boolean} targetState - The new target state. + * @returns {Promise} A promise that resolves to true if the gateway was successfully toggled. + * @example + * // Toggle gateway with label of `toggle-gateway-example` + * await gatewayManager.toggle(`toggle-gateway-example`, true); // Enabled + * await gatewayManager.toggle(`toggle-gateway-example`, false); // Disabled + */ + async toggle(name, targetState) { + try { + await this.#client.request({ + method: "PUT", + url: `/${name}`, + data: { + enabled: Boolean(targetState), + }, + validateStatus: (status) => { + return status === 200; + }, + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } +} + +/** Provides methods for managing names in an REST endpoint. */ +class NameManager { + #DEFAULT_ENDPOINT = "https://api.filebase.io"; + #DEFAULT_TIMEOUT = 60000; + + #client; + + /** + * @summary Creates a new instance of the constructor. + * @param {string} clientKey - The access key ID for authentication. + * @param {string} clientSecret - The secret access key for authentication. + * @tutorial quickstart-name + * @example + * import { NameManager } from "@filebase/sdk"; + * const nameManager = new NameManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD"); + */ + constructor(clientKey, clientSecret) { + const clientEndpoint = + process.env.NODE_ENV === "test" + ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT + : this.#DEFAULT_ENDPOINT, + encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString( + "base64", + ), + baseURL = `${clientEndpoint}/v1/names`; + this.#client = axios.create({ + baseURL: baseURL, + timeout: this.#DEFAULT_TIMEOUT, + headers: { Authorization: `Bearer ${encodedToken}` }, + }); + } + + /** + * @typedef {Object} name + * @property {string} label Descriptive label for the Key + * @property {string} network_key IPNS Key CID + * @property {string} cid Value that name Publishes + * @property {number} sequence Version Number for the name + * @property {boolean} enabled Whether the name is being Published or not + * @property {date} published_at Date the name was last published to the DHT + * @property {date} created_at Date the name was created + * @property {date} updated_at Date the name was last updated + */ + + /** + * @typedef {Object} nameOptions + * @property {boolean} [enabled] Whether the name is enabled or not. + */ + + /** + * @summary Creates a new IPNS name with the given name as the label and CID. + * @param {string} label - The label of the new IPNS name. + * @param {string} cid - The CID of the IPNS name. + * @param {nameOptions} [options] - Additional options for the IPNS name. + * @returns {Promise} - A Promise that resolves with the response JSON. + * @example + * // Create IPNS name with label of `create-name-example` and CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm` + * await nameManager.create(`create-name-example`, `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`); + */ + async create( + label, + cid, + options = { + enabled: true, + }, + ) { + try { + const createResponse = await this.#client.request({ + method: "POST", + data: { + label, + cid, + enabled: options?.enabled !== false, + }, + }); + return createResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Imports a user's IPNS private key. + * @param {string} label - The label for the IPNS name. + * @param {string} cid - The CID (Content Identifier) of the data. + * @param {string} privateKey - The existing private key encoded in Base64. + * @param {nameOptions} [options] - Additional options for the IPNS name. + * @returns {Promise} - A Promise that resolves to the server response. + * @example + * // Import IPNS private key with label of `create-name-example`, CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm` + * // and a private key encoded with base64 + * await nameManager.import( + * `create-name-example`, + * `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm` + * `BASE64_ENCODED_PRIVATEKEY` + * ); + */ + async import( + label, + cid, + privateKey, + options = { + enabled: true, + }, + ) { + try { + const importResponse = await this.#client.request({ + method: "POST", + data: { + label, + cid, + network_private_key: privateKey, + enabled: options?.enabled !== false, + }, + }); + return importResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Updates the specified name with the given CID. + * @param {string} label - The label of the name to update. + * @param {string} cid - The cid to associate with the name. + * @param {nameOptions} options - The options for the set operation. + * + * @returns {Promise} - A Promise that resolves to true if the IPNS name was updated. + * @example + * // Update name with label of `update-name-example` and set the value of the IPNS name. + * await nameManager.update(`update-name-example`, `bafybeidt4nmaci476lyon2mvgfmwyzysdazienhxs2bqnfpdainzjuwjom`); + */ + async update(label, cid, options = {}) { + try { + const updateOptions = { + cid, + }; + if (options?.enabled) { + updateOptions.enabled = Boolean(options.enabled); + } + await this.#client.request({ + method: "PUT", + url: `/${label}`, + data: updateOptions, + validateStatus: (status) => { + return status === 200; + }, + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Returns the value of an IPNS name + * @param {string} label - Parameter representing the label of the name to resolve. + * @returns {Promise} - A promise that resolves to the value of a name. + * @example + * // Get IPNS name with label of `list-name-example` + * await nameManager.get(`list-name-example`); + */ + async get(label) { + try { + const getResponse = await this.#client.request({ + method: "GET", + url: `/${label}`, + validateStatus: (status) => { + return status === 200 || status === 404; + }, + }); + return getResponse.status === 200 ? getResponse.data : false; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Returns a list of IPNS names + * @returns {Promise>} - A promise that resolves to an array of names. + * @example + * // List all IPNS names + * await nameManager.list(); + */ + async list() { + try { + const listResponse = await this.#client.request({ + method: "GET", + }); + return listResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Deletes an IPNS name with the given label. + * @param {string} label - The label of the IPNS name to delete. + * @returns {Promise} - A promise that resolves to true if the IPNS name was successfully deleted. + * @example + * // List IPNS name with label of `delete-name-example` + * await nameManager.delete(`delete-name-example`); + */ + async delete(label) { + try { + await this.#client.request({ + method: "DELETE", + url: `/${label}`, + validateStatus: (status) => { + return status === 204; + }, + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Toggles the enabled state of a given IPNS name. + * @param {string} label - The label of the IPNS name to toggle. + * @param {boolean} targetState - The new target state. + * @returns {Promise} A promise that resolves to true if the IPNS name was successfully toggled. + * @example + * // Toggle IPNS name with label of `toggle-name-example` + * await nameManager.toggle(`toggle-name-example`, true); // Enabled + * await nameManager.toggle(`toggle-name-example`, false); // Disabled + */ + async toggle(label, targetState) { + try { + await this.#client.request({ + method: "PUT", + url: `/${label}`, + data: { + enabled: targetState, + }, + validateStatus: (status) => { + return status === 200; + }, + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } +} + +// S3 Imports + +/** Interacts with an S3 client to perform various operations on objects in a bucket. */ +class ObjectManager { + #DEFAULT_ENDPOINT = "https://s3.filebase.com"; + #DEFAULT_REGION = "us-east-1"; + #DEFAULT_MAX_CONCURRENT_UPLOADS = 4; + + #client; + #credentials; + #defaultBucket; + #gatewayConfiguration; + #maxConcurrentUploads; + + /** + * @typedef {Object} objectManagerOptions Optional settings for the constructor. + * @property {string} [bucket] Default bucket to use. + * @property {objectDownloadOptions} [gateway] Default gateway to use. + * @property {number} [maxConcurrentUploads] The maximum number of concurrent uploads. + */ + + /** + * @typedef {Object} objectDownloadOptions Optional settings for downloading objects + * @property {string} endpoint Default gateway to use. + * @property {string} [token] Token for the default gateway. + * @property {number} [timeout=60000] Timeout for the default gateway + */ + + /** + * @summary Creates a new instance of the constructor. + * @param {string} clientKey - The access key ID for authentication. + * @param {string} clientSecret - The secret access key for authentication. + * @param {objectManagerOptions} options - Optional settings for the constructor. + * @tutorial quickstart-object + * @example + * import { ObjectManager } from "@filebase/sdk"; + * const objectManager = new ObjectManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", { + * bucket: "my-default-bucket", + * maxConcurrentUploads: 4, + * gateway: { + * endpoint: "https://my-default-gateway.mydomain.com + * token: SUPER_SECRET_GATEWAY_TOKEN + * } + * }); + */ + constructor(clientKey, clientSecret, options) { + const clientEndpoint = + process.env.NODE_ENV === "test" + ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT + : this.#DEFAULT_ENDPOINT, + clientConfiguration = { + credentials: { + accessKeyId: clientKey, + secretAccessKey: clientSecret, + }, + endpoint: clientEndpoint, + region: this.#DEFAULT_REGION, + forcePathStyle: true, + }; + this.#defaultBucket = options?.bucket; + this.#maxConcurrentUploads = + options?.maxConcurrentUploads || this.#DEFAULT_MAX_CONCURRENT_UPLOADS; + this.#credentials = { + key: clientKey, + secret: clientSecret, + }; + this.#client = new S3Client(clientConfiguration); + + this.#gatewayConfiguration = { + endpoint: options?.gateway?.endpoint, + token: options?.gateway?.token, + timeout: options?.gateway?.timeout, + }; + } + + /** + * @typedef {Object} objectOptions + * @property {string} [bucket] - The bucket to pin the IPFS CID into. + */ + + /** + * @typedef {Object} objectHeadResult + * @property {string} cid The CID of the uploaded object + * @property {function} download Convenience function to download the object via S3 or the selected gateway + * @property {array} [entries] If a directory then returns an array of the containing objects + * @property {string} entries.cid The CID of the uploaded object + * @property {string} entries.path The path of the object + */ + + /** + * If the source parameter is an array of objects, it will pack multiple files into a CAR file for upload. + * The method returns a Promise that resolves to an object containing the CID (Content Identifier) of the uploaded file + * and an optional entries object when uploading a CAR file. + * + * @summary Uploads a file or a CAR file to the specified bucket. + * @param {string} key - The key or path of the file in the bucket. + * @param {Buffer|ReadableStream|Array} source - The content of the object to be uploaded. + * If an array of files is provided, each file should have a 'path' property specifying the path of the file + * and a 'content' property specifying the content of the file. The SDK will then construct a CAR file locally + * and use that as the content of the object to be uploaded. + * @param {Object} [metadata] Optional metadata for pin object + * @param {objectOptions} [options] - The options for uploading the object. + * @returns {Promise} + * @example + * // Upload Object + * await objectManager.upload("my-object", Buffer.from("Hello World!")); + * // Upload Object with Metadata + * await objectManager.upload("my-custom-object", Buffer.from("Hello Big World!"), { + * "application": "my-filebase-app" + * }); + * // Upload Directory + * await objectManager.upload("my-first-directory", [ + * { + * path: "/testObjects/1.txt", + * content: Buffer.from("upload test object", "utf-8"), + * }, + * { + * path: "/testObjects/deep/1.txt", + * content: Buffer.from("upload deep test object", "utf-8"), + * }, + * { + * path: "/topLevel.txt", + * content: Buffer.from("upload top level test object", "utf-8"), + * }, + * ]); + */ + async upload(key, source, metadata, options) { + // Generate Upload UUID + const uploadUUID = v4(); + + // Setup Upload Options + const bucket = options?.bucket || this.#defaultBucket, + uploadOptions = { + client: this.#client, + params: { + Bucket: bucket, + Key: key, + Body: source, + Metadata: metadata || {}, + }, + queueSize: this.#maxConcurrentUploads, + partSize: 26843546, //25.6Mb || 250Gb Max File Size + }; + + // Pack Multiple Files into CAR file for upload + let parsedEntries = {}; + if (Array.isArray(source)) { + // Mark Upload as a CAR file import + uploadOptions.params.Metadata = { + ...uploadOptions.params.Metadata, + import: "car", + }; + + let temporaryCarFilePath, temporaryBlockstoreDir; + try { + // Setup Blockstore + temporaryBlockstoreDir = path.resolve( + os.tmpdir(), + "filebase-sdk", + "uploads", + uploadUUID, + ); + temporaryCarFilePath = `${temporaryBlockstoreDir}/main.car`; + await mkdir(temporaryBlockstoreDir, { recursive: true }); + const temporaryBlockstore = new FsBlockstore(temporaryBlockstoreDir); + + const heliaFs = unixfs({ + blockstore: temporaryBlockstore, + }); + + for (let sourceEntry of source) { + sourceEntry.path = + sourceEntry.path[0] === "/" + ? `/${uploadUUID}${sourceEntry.path}` + : `/${uploadUUID}/${sourceEntry.path}`; + } + for await (const entry of heliaFs.addAll(source)) { + parsedEntries[entry.path] = entry; + } + const rootEntry = parsedEntries[uploadUUID]; + + // Get carFile stream here + const carExporter = car({ blockstore: temporaryBlockstore }), + { writer, out } = CarWriter.create([rootEntry.cid]); + + // Put carFile stream to disk + const output = createWriteStream(temporaryCarFilePath); + Readable.from(out).pipe(output); + await carExporter.export(rootEntry.cid, writer); + + // Set Uploader to Read from carFile on disk + uploadOptions.params.Body = createReadStream(temporaryCarFilePath); + + // Upload carFile via S3 + const parallelUploads3 = new Upload(uploadOptions); + await parallelUploads3.done(); + await temporaryBlockstore.close(); + } finally { + if (typeof temporaryBlockstoreDir !== "undefined") { + // Delete Temporary Blockstore + await rm(temporaryBlockstoreDir, { recursive: true, force: true }); + } + } + } else { + // Upload file via S3 + const parallelUploads3 = new Upload(uploadOptions); + await parallelUploads3.done(); + } + + // Get CID from Platform + const command = new HeadObjectCommand({ + Bucket: bucket, + Key: key, + Body: source, + }), + headResult = await this.#client.send(command), + responseCid = headResult.Metadata.cid; + + if (Object.keys(parsedEntries).length === 0) { + return { + cid: responseCid, + download: () => { + return this.#routeDownload(responseCid, key, options); + }, + }; + } + return { + cid: responseCid, + download: () => { + return this.#routeDownload(responseCid, key, options); + }, + entries: parsedEntries, + }; + } + + async #routeDownload(cid, key, options) { + return typeof this.#gatewayConfiguration.endpoint !== "undefined" + ? downloadFromGateway(cid, this.#gatewayConfiguration) + : this.download(key, options); + } + + /** + * @summary Gets an objects info and metadata using the S3 API. + * @param {string} key - The key of the object to be inspected. + * @param {objectOptions} [options] - The options for inspecting the object. + * @returns {Promise} + */ + async get(key, options) { + const bucket = options?.bucket || this.#defaultBucket; + try { + const command = new HeadObjectCommand({ + Bucket: bucket, + Key: key, + }), + response = await this.#client.send(command); + + response.download = () => { + return this.#routeDownload(response.Metadata.cid, key, options); + }; + + return response; + } catch (err) { + if (err.name === "NotFound") { + return false; + } + throw err; + } + } + + /** + * @summary Downloads an object from the specified bucket using the provided key. + * @param {string} key - The key of the object to be downloaded. + * @param {objectOptions} [options] - The options for downloading the object.. + * @returns {Promise} - A promise that resolves with the contents of the downloaded object as a Stream. + * @example + * // Download object with name of `download-object-example` + * await objectManager.download(`download-object-example`); + */ + async download(key, options) { + // Download via IPFS Gateway if Setup or S3 by Default + if (typeof this.#gatewayConfiguration.endpoint === "string") { + const objectToFetch = await this.get(key, options); + return objectToFetch.download(); + } else { + const command = new GetObjectCommand({ + Bucket: options?.bucket || this.#defaultBucket, + Key: key, + }), + response = await this.#client.send(command); + + return response.Body; + } + } + + /** + * @typedef {Object} listObjectsResult + * @property {boolean} IsTruncated Indicates if more results exist on the server + * @property {string} NextContinuationToken ContinuationToken used to paginate list requests + * @property {Array} Contents List of Keys stored in the S3 Bucket + * @property {string} Contents.Key Key of the Object + * @property {string} Contents.LastModified Date Last Modified of the Object + * @property {string} Contents.CID CID of the Object + * @property {string} Contents.ETag ETag of the Object + * @property {number} Contents.Size Size in Bytes of the Object + * @property {string} Contents.StorageClass Class of Storage of the Object + * @property {function} Contents.download Convenience function to download the item using the S3 gateway + */ + + /** + * @typedef {Object} listObjectOptions + * @property {string} [Bucket] The name of the bucket. If not provided, the default bucket will be used. + * @property {string} [ContinuationToken=null] Continues listing from this objects name. + * @property {string} [Delimiter=null] Character used to group keys + * @property {number} [MaxKeys=1000] The maximum number of objects to retrieve. Defaults to 1000. + */ + + /** + * Retrieves a list of objects from a specified bucket. + * + * @param {listObjectOptions} options - The options for listing objects. + * @returns {Promise} - A promise that resolves to an array of objects. + * @example + * // List objects in bucket with a limit of 1000 + * await objectManager.list({ + * MaxKeys: 1000 + * }); + */ + async list( + options = { + Bucket: this.#defaultBucket, + ContinuationToken: null, + Delimiter: null, + MaxKeys: 1000, + }, + ) { + if (options?.MaxKeys && options.MaxKeys > 100000) { + throw new Error(`MaxKeys Maximum value is 100000`); + } + const bucket = options?.Bucket || this.#defaultBucket, + limit = options?.MaxKeys || 1000, + commandOptions = { + Bucket: bucket, + MaxKeys: limit, + }, + command = new ListObjectsV2Command({ + ...options, + ...commandOptions, + }); + + const { Contents, IsTruncated, NextContinuationToken } = + await this.#client.send(command); + return { Contents, IsTruncated, NextContinuationToken }; + } + + /** + * @summary Deletes an object from the specified bucket using the provided key. + * @param {string} key - The key of the object to be deleted. + * @param {objectOptions} [options] - The options for deleting the file. + * @returns {Promise} - A Promise that resolves with the result of the delete operation. + * @example + * // Delete object with name of `delete-object-example` + * await objectManager.delete(`delete-object-example`); + */ + async delete(key, options) { + const command = new DeleteObjectCommand({ + Bucket: options?.bucket || this.#defaultBucket, + Key: key, + }); + + await this.#client.send(command); + return true; + } + + /** + * @typedef {Object} copyObjectOptions + * @property {string} [sourceBucket] The source bucket from where the object is to be copied. + * @property {string} [destinationKey] The key of the object in the destination bucket. By default, it is the same as the sourceKey. + */ + + /** + * If the destinationKey is not provided, the object will be copied with the same key as the sourceKey. + * + * @summary Copy the object from sourceKey in the sourceBucket to destinationKey in the destinationBucket. + * @param {string} sourceKey - The key of the object to be copied from the sourceBucket. + * @param {string} destinationBucket - The bucket where the object will be copied to. + * @param {copyObjectOptions} [options] - Additional options for the copy operation. + * + * @returns {Promise} - A Promise that resolves with the result of the copy operation. + * @example + * // Copy object `copy-object-test` from `copy-object-test-pass-src` to `copy-object-test-pass-dest` + * // TIP: Set bucket on constructor and it will be used as the default source for copying objects. + * await objectManager.copy(`copy-object-test`, `copy-object-dest`, { + * sourceBucket: `copy-object-src` + * }); + */ + async copy( + sourceKey, + destinationBucket, + options = { + sourceBucket: this.#defaultBucket, + destinationKey: undefined, + }, + ) { + const copySource = `${ + options?.sourceBucket || this.#defaultBucket + }/${sourceKey}`, + command = new CopyObjectCommand({ + CopySource: copySource, + Bucket: destinationBucket, + Key: options?.destinationKey || sourceKey, + }); + + await this.#client.send(command); + return true; + } +} + +/** Provides methods for managing pins in an REST endpoint. */ +class PinManager { + #DEFAULT_ENDPOINT = "https://api.filebase.io"; + #DEFAULT_TIMEOUT = 60000; + + #client; + #credentials; + #gatewayConfiguration; + #defaultBucket; + + /** + * @typedef {Object} pinManagerOptions Optional settings for the constructor. + * @property {string} [bucket] Default bucket to use. + * @property {pinDownloadOptions} [gateway] Default gateway to use. + */ + + /** + * @typedef {Object} pinDownloadOptions Optional settings for downloading pins + * @property {string} endpoint Default gateway to use. + * @property {string} [token] Token for the default gateway. + * @property {number} [timeout=60000] Timeout for the default gateway + */ + + /** + * @summary Creates a new instance of the constructor. + * @param {string} clientKey - The access key ID for authentication. + * @param {string} clientSecret - The secret access key for authentication. + * @param {pinManagerOptions} [options] - Optional settings for the constructor. + * @tutorial quickstart-pin + * @example + * import { PinManager } from "@filebase/sdk"; + * const pinManager = new PinManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", { + * bucket: "my-default-bucket", + * gateway: { + * endpoint: "https://my-default-gateway.mydomain.com + * token: SUPER_SECRET_GATEWAY_TOKEN + * } + * }); + */ + constructor(clientKey, clientSecret, options) { + this.#defaultBucket = options?.bucket; + const PSAClientEndpoint = + process.env.NODE_ENV === "test" + ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT + : this.#DEFAULT_ENDPOINT, + baseURL = `${PSAClientEndpoint}/v1/ipfs/pins`; + this.#credentials = { + key: clientKey, + secret: clientSecret, + }; + this.#client = axios.create({ + baseURL: baseURL, + timeout: this.#DEFAULT_TIMEOUT, + }); + + this.#gatewayConfiguration = { + endpoint: options?.gateway?.endpoint, + token: options?.gateway?.token, + timeout: options?.gateway?.timeout || this.#DEFAULT_TIMEOUT, + }; + } + + /** + * @typedef {Object} pinStatus + * @property {string} requestid Globally unique identifier of the pin request; can be used to check the status of ongoing pinning, or pin removal + * @property {string} status Status a pin object can have at a pinning service. ("queued","pinning","pinned","failed") + * @property {string} created Immutable timestamp indicating when a pin request entered a pinning service; can be used for filtering results and pagination + * @property {Object} pin Pin object + * @property {string} pin.cid Content Identifier (CID) pinned recursively + * @property {string} pin.name Name for pinned data; can be used for lookups later + * @property {Array} pin.origins Optional list of multiaddrs known to provide the data + * @property {Object} pin.meta Optional metadata for pin object + * @property {Array} delegates List of multiaddrs designated by pinning service that will receive the pin data + * @property {object} [info] Optional info for PinStatus response + * @property {function} download Convenience function to download pin + */ + + /** + * @typedef {Object} pinOptions + * @property {string} [bucket] - The bucket to pin the IPFS CID into. + */ + + /** + * @typedef {Object} listPinOptions + * @property {Array} [cid] Return pin objects responsible for pinning the specified CID(s); be aware that using longer hash functions introduces further constraints on the number of CIDs that will fit under the limit of 2000 characters per URL in browser contexts + * @property {string} [name] Return pin objects with specified name (by default a case-sensitive, exact match) + * @property {string} [match] Customize the text matching strategy applied when the name filter is present; exact (the default) is a case-sensitive exact match, partial matches anywhere in the name, iexact and ipartial are case-insensitive versions of the exact and partial strategies + * @property {Array} [status] Return pin objects for pins with the specified status (when missing, service defaults to pinned only) + * @property {string} [before] Return results created (queued) before provided timestamp + * @property {string} [after] Return results created (queued) after provided timestamp + * @property {number} [limit] Max records to return + * @property {Object} [meta] Return pin objects that match specified metadata keys passed as a string representation of a JSON object; when implementing a client library, make sure the parameter is URL-encoded to ensure safe transport + */ + + /** + * @typedef {Object} listPinResults + * @property {number} count Total number of pin objects that exist for passed query filters + * @property {Array} Array of PinStatus results + */ + + /** + * @summary List the pins in a given bucket + * @param {listPinOptions} [listOptions] + * @param {pinOptions} [options] + * @returns {Promise} + * @example + * // List pins in bucket with a limit of 1000 + * await pinManager.list({ + * limit: 1000 + * }); + */ + async list(listOptions, options) { + try { + const encodedToken = this.#getEncodedToken(options?.bucket), + getResponse = await this.#client.request({ + method: "GET", + params: listOptions, + headers: { Authorization: `Bearer ${encodedToken}` }, + }); + for (let pinStatus of getResponse.data.results) { + pinStatus.download = () => { + return this.download(pinStatus.pin.cid); + }; + } + return getResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Create a pin in the selected bucket + * @param {string} key Key or path of the file in the bucket + * @param {string} cid Content Identifier (CID) to be pinned recursively + * @param {Object} [metadata] Optional metadata for pin object + * @param {pinOptions} [options] Options for pinning the object + * @returns {Promise} + * @example + * // Create Pin with Metadata + * await pinManager.create("my-pin", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", { + * "application": "my-custom-app-on-filebase" + * }); + */ + async create(key, cid, metadata, options) { + try { + const encodedToken = this.#getEncodedToken(options?.bucket), + pinStatus = await this.#client.request({ + method: "POST", + data: { + cid, + name: key, + meta: metadata, + }, + headers: { Authorization: `Bearer ${encodedToken}` }, + }); + pinStatus.data.download = () => { + return this.download(pinStatus.data.pin.cid); + }; + return pinStatus.data; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @typedef {Object} replacePinOptions + * @augments pinOptions + * @property {Object} [metadata] Optional metadata to set on pin during replacement + * @property {string} [name] Optional name for pin to set during replacement + */ + + /** + * @summary Replace a pinned object in the selected bucket + * @param {string} requestid Unique ID for the pinned object + * @param {string} cid Content Identifier (CID) to be pinned recursively + * @param {replacePinOptions} [options] Options for pinning the object + * @returns {Promise} + * @example + * // Replace Pin with Metadata + * await pinManager.create("qr4231213", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", { + * "revision": Date.now() + * } + */ + async replace(requestid, cid, options) { + try { + let replaceData = { + cid, + meta: options?.metadata || {}, + }; + if (options?.name) { + replaceData.name = options.name; + } + + const encodedToken = this.#getEncodedToken(options?.bucket), + pinStatusResult = await this.#client.request({ + method: "POST", + url: `/${requestid}`, + data: replaceData, + validateStatus: (status) => { + return status === 200; + }, + headers: { Authorization: `Bearer ${encodedToken}` }, + }); + const pinStatus = pinStatusResult.data; + pinStatus.download = () => { + return this.download(pinStatus.pin.cid); + }; + return pinStatus; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Download a pin from the selected IPFS gateway + * @param {string} cid + * @param {pinDownloadOptions} [options] + * @returns {Promise} + * @example + * // Download Pin by CID + * await pinManager.download("QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF"); + */ + async download(cid, options) { + const downloadOptions = Object.assign(this.#gatewayConfiguration, options); + return downloadFromGateway(cid, downloadOptions); + } + + /** + * @summary Get details about a pinned object + * @param {string} requestid Globally unique identifier of the pin request + * @param {pinOptions} [options] Options for getting the pin + * @returns {Promise} + * @example + * // Get Pin Info by RequestId + * await pinManager.get("qr4231214"); + */ + async get(requestid, options) { + try { + const encodedToken = this.#getEncodedToken(options?.bucket), + getResponseResult = await this.#client.request({ + method: "GET", + url: `/${requestid}`, + headers: { Authorization: `Bearer ${encodedToken}` }, + validateStatus: (status) => { + return status === 200 || status === 404; + }, + }); + if (getResponseResult.status === 404) { + return false; + } + const pinStatus = getResponseResult.data; + pinStatus.download = () => { + return this.download(pinStatus.pin.cid); + }; + return pinStatus; + } catch (err) { + apiErrorHandler(err); + } + } + + /** + * @summary Delete a pinned object from the selected bucket + * @param requestid Globally unique identifier of the pin request + * @param {pinOptions} [options] Options for deleting the pin + * @returns {Promise} + * @example + * // Delete Pin by RequestId + * await pinManager.delete("qr4231213"); + */ + async delete(requestid, options) { + try { + const encodedToken = this.#getEncodedToken(options?.bucket); + await this.#client.request({ + method: "DELETE", + url: `/${requestid}`, + headers: { Authorization: `Bearer ${encodedToken}` }, + validateStatus: (status) => { + return status === 202; + }, + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } + + #getEncodedToken(bucket) { + bucket = bucket || this.#defaultBucket; + return Buffer.from( + `${this.#credentials.key}:${this.#credentials.secret}:${bucket}`, + ).toString("base64"); + } +} + +export { BucketManager, GatewayManager, NameManager, ObjectManager, PinManager }; diff --git a/dist/index.js b/dist/index.js new file mode 100644 index 0000000..9538230 --- /dev/null +++ b/dist/index.js @@ -0,0 +1,18037 @@ +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __commonJS = (cb, mod) => function __require() { + return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports; +}; +var __export = (target, all) => { + for (var name4 in all) + __defProp(target, name4, { get: all[name4], enumerable: true }); +}; +var __copyProps = (to, from4, except, desc) => { + if (from4 && typeof from4 === "object" || typeof from4 === "function") { + for (let key of __getOwnPropNames(from4)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from4[key], enumerable: !(desc = __getOwnPropDesc(from4, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// node_modules/varint/encode.js +var require_encode = __commonJS({ + "node_modules/varint/encode.js"(exports2, module2) { + module2.exports = encode12; + var MSB3 = 128; + var REST3 = 127; + var MSBALL3 = ~REST3; + var INT3 = Math.pow(2, 31); + function encode12(num, out, offset) { + if (Number.MAX_SAFE_INTEGER && num > Number.MAX_SAFE_INTEGER) { + encode12.bytes = 0; + throw new RangeError("Could not encode varint"); + } + out = out || []; + offset = offset || 0; + var oldOffset = offset; + while (num >= INT3) { + out[offset++] = num & 255 | MSB3; + num /= 128; + } + while (num & MSBALL3) { + out[offset++] = num & 255 | MSB3; + num >>>= 7; + } + out[offset] = num | 0; + encode12.bytes = offset - oldOffset + 1; + return out; + } + } +}); + +// node_modules/varint/decode.js +var require_decode = __commonJS({ + "node_modules/varint/decode.js"(exports2, module2) { + module2.exports = read4; + var MSB3 = 128; + var REST3 = 127; + function read4(buf2, offset) { + var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf2.length; + do { + if (counter >= l || shift > 49) { + read4.bytes = 0; + throw new RangeError("Could not decode varint"); + } + b = buf2[counter++]; + res += shift < 28 ? (b & REST3) << shift : (b & REST3) * Math.pow(2, shift); + shift += 7; + } while (b >= MSB3); + read4.bytes = counter - offset; + return res; + } + } +}); + +// node_modules/varint/length.js +var require_length = __commonJS({ + "node_modules/varint/length.js"(exports2, module2) { + var N13 = Math.pow(2, 7); + var N23 = Math.pow(2, 14); + var N33 = Math.pow(2, 21); + var N43 = Math.pow(2, 28); + var N53 = Math.pow(2, 35); + var N63 = Math.pow(2, 42); + var N73 = Math.pow(2, 49); + var N83 = Math.pow(2, 56); + var N93 = Math.pow(2, 63); + module2.exports = function(value) { + return value < N13 ? 1 : value < N23 ? 2 : value < N33 ? 3 : value < N43 ? 4 : value < N53 ? 5 : value < N63 ? 6 : value < N73 ? 7 : value < N83 ? 8 : value < N93 ? 9 : 10; + }; + } +}); + +// node_modules/varint/index.js +var require_varint = __commonJS({ + "node_modules/varint/index.js"(exports2, module2) { + module2.exports = { + encode: require_encode(), + decode: require_decode(), + encodingLength: require_length() + }; + } +}); + +// node_modules/eventemitter3/index.js +var require_eventemitter3 = __commonJS({ + "node_modules/eventemitter3/index.js"(exports2, module2) { + "use strict"; + var has = Object.prototype.hasOwnProperty; + var prefix = "~"; + function Events() { + } + if (Object.create) { + Events.prototype = /* @__PURE__ */ Object.create(null); + if (!new Events().__proto__) + prefix = false; + } + function EE(fn, context, once) { + this.fn = fn; + this.context = context; + this.once = once || false; + } + function addListener(emitter, event, fn, context, once) { + if (typeof fn !== "function") { + throw new TypeError("The listener must be a function"); + } + var listener = new EE(fn, context || emitter, once), evt = prefix ? prefix + event : event; + if (!emitter._events[evt]) + emitter._events[evt] = listener, emitter._eventsCount++; + else if (!emitter._events[evt].fn) + emitter._events[evt].push(listener); + else + emitter._events[evt] = [emitter._events[evt], listener]; + return emitter; + } + function clearEvent(emitter, evt) { + if (--emitter._eventsCount === 0) + emitter._events = new Events(); + else + delete emitter._events[evt]; + } + function EventEmitter2() { + this._events = new Events(); + this._eventsCount = 0; + } + EventEmitter2.prototype.eventNames = function eventNames() { + var names = [], events, name4; + if (this._eventsCount === 0) + return names; + for (name4 in events = this._events) { + if (has.call(events, name4)) + names.push(prefix ? name4.slice(1) : name4); + } + if (Object.getOwnPropertySymbols) { + return names.concat(Object.getOwnPropertySymbols(events)); + } + return names; + }; + EventEmitter2.prototype.listeners = function listeners(event) { + var evt = prefix ? prefix + event : event, handlers = this._events[evt]; + if (!handlers) + return []; + if (handlers.fn) + return [handlers.fn]; + for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) { + ee[i] = handlers[i].fn; + } + return ee; + }; + EventEmitter2.prototype.listenerCount = function listenerCount(event) { + var evt = prefix ? prefix + event : event, listeners = this._events[evt]; + if (!listeners) + return 0; + if (listeners.fn) + return 1; + return listeners.length; + }; + EventEmitter2.prototype.emit = function emit(event, a1, a2, a3, a4, a5) { + var evt = prefix ? prefix + event : event; + if (!this._events[evt]) + return false; + var listeners = this._events[evt], len = arguments.length, args, i; + if (listeners.fn) { + if (listeners.once) + this.removeListener(event, listeners.fn, void 0, true); + switch (len) { + case 1: + return listeners.fn.call(listeners.context), true; + case 2: + return listeners.fn.call(listeners.context, a1), true; + case 3: + return listeners.fn.call(listeners.context, a1, a2), true; + case 4: + return listeners.fn.call(listeners.context, a1, a2, a3), true; + case 5: + return listeners.fn.call(listeners.context, a1, a2, a3, a4), true; + case 6: + return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true; + } + for (i = 1, args = new Array(len - 1); i < len; i++) { + args[i - 1] = arguments[i]; + } + listeners.fn.apply(listeners.context, args); + } else { + var length4 = listeners.length, j; + for (i = 0; i < length4; i++) { + if (listeners[i].once) + this.removeListener(event, listeners[i].fn, void 0, true); + switch (len) { + case 1: + listeners[i].fn.call(listeners[i].context); + break; + case 2: + listeners[i].fn.call(listeners[i].context, a1); + break; + case 3: + listeners[i].fn.call(listeners[i].context, a1, a2); + break; + case 4: + listeners[i].fn.call(listeners[i].context, a1, a2, a3); + break; + default: + if (!args) + for (j = 1, args = new Array(len - 1); j < len; j++) { + args[j - 1] = arguments[j]; + } + listeners[i].fn.apply(listeners[i].context, args); + } + } + } + return true; + }; + EventEmitter2.prototype.on = function on(event, fn, context) { + return addListener(this, event, fn, context, false); + }; + EventEmitter2.prototype.once = function once(event, fn, context) { + return addListener(this, event, fn, context, true); + }; + EventEmitter2.prototype.removeListener = function removeListener(event, fn, context, once) { + var evt = prefix ? prefix + event : event; + if (!this._events[evt]) + return this; + if (!fn) { + clearEvent(this, evt); + return this; + } + var listeners = this._events[evt]; + if (listeners.fn) { + if (listeners.fn === fn && (!once || listeners.once) && (!context || listeners.context === context)) { + clearEvent(this, evt); + } + } else { + for (var i = 0, events = [], length4 = listeners.length; i < length4; i++) { + if (listeners[i].fn !== fn || once && !listeners[i].once || context && listeners[i].context !== context) { + events.push(listeners[i]); + } + } + if (events.length) + this._events[evt] = events.length === 1 ? events[0] : events; + else + clearEvent(this, evt); + } + return this; + }; + EventEmitter2.prototype.removeAllListeners = function removeAllListeners(event) { + var evt; + if (event) { + evt = prefix ? prefix + event : event; + if (this._events[evt]) + clearEvent(this, evt); + } else { + this._events = new Events(); + this._eventsCount = 0; + } + return this; + }; + EventEmitter2.prototype.off = EventEmitter2.prototype.removeListener; + EventEmitter2.prototype.addListener = EventEmitter2.prototype.on; + EventEmitter2.prefixed = prefix; + EventEmitter2.EventEmitter = EventEmitter2; + if ("undefined" !== typeof module2) { + module2.exports = EventEmitter2; + } + } +}); + +// node_modules/err-code/index.js +var require_err_code = __commonJS({ + "node_modules/err-code/index.js"(exports2, module2) { + "use strict"; + function assign(obj, props) { + for (const key in props) { + Object.defineProperty(obj, key, { + value: props[key], + enumerable: true, + configurable: true + }); + } + return obj; + } + function createError(err, code5, props) { + if (!err || typeof err === "string") { + throw new TypeError("Please pass an Error to err-code"); + } + if (!props) { + props = {}; + } + if (typeof code5 === "object") { + props = code5; + code5 = ""; + } + if (code5) { + props.code = code5; + } + try { + return assign(err, props); + } catch (_) { + props.message = err.message; + props.stack = err.stack; + const ErrClass = function() { + }; + ErrClass.prototype = Object.create(Object.getPrototypeOf(err)); + const output = assign(new ErrClass(), props); + return output; + } + } + module2.exports = createError; + } +}); + +// node_modules/murmurhash3js-revisited/lib/murmurHash3js.js +var require_murmurHash3js = __commonJS({ + "node_modules/murmurhash3js-revisited/lib/murmurHash3js.js"(exports2, module2) { + (function(root, undefined2) { + "use strict"; + var library = { + "version": "3.0.0", + "x86": {}, + "x64": {}, + "inputValidation": true + }; + function _validBytes(bytes) { + if (!Array.isArray(bytes) && !ArrayBuffer.isView(bytes)) { + return false; + } + for (var i = 0; i < bytes.length; i++) { + if (!Number.isInteger(bytes[i]) || bytes[i] < 0 || bytes[i] > 255) { + return false; + } + } + return true; + } + function _x86Multiply(m, n) { + return (m & 65535) * n + (((m >>> 16) * n & 65535) << 16); + } + function _x86Rotl(m, n) { + return m << n | m >>> 32 - n; + } + function _x86Fmix(h) { + h ^= h >>> 16; + h = _x86Multiply(h, 2246822507); + h ^= h >>> 13; + h = _x86Multiply(h, 3266489909); + h ^= h >>> 16; + return h; + } + function _x64Add(m, n) { + m = [m[0] >>> 16, m[0] & 65535, m[1] >>> 16, m[1] & 65535]; + n = [n[0] >>> 16, n[0] & 65535, n[1] >>> 16, n[1] & 65535]; + var o = [0, 0, 0, 0]; + o[3] += m[3] + n[3]; + o[2] += o[3] >>> 16; + o[3] &= 65535; + o[2] += m[2] + n[2]; + o[1] += o[2] >>> 16; + o[2] &= 65535; + o[1] += m[1] + n[1]; + o[0] += o[1] >>> 16; + o[1] &= 65535; + o[0] += m[0] + n[0]; + o[0] &= 65535; + return [o[0] << 16 | o[1], o[2] << 16 | o[3]]; + } + function _x64Multiply(m, n) { + m = [m[0] >>> 16, m[0] & 65535, m[1] >>> 16, m[1] & 65535]; + n = [n[0] >>> 16, n[0] & 65535, n[1] >>> 16, n[1] & 65535]; + var o = [0, 0, 0, 0]; + o[3] += m[3] * n[3]; + o[2] += o[3] >>> 16; + o[3] &= 65535; + o[2] += m[2] * n[3]; + o[1] += o[2] >>> 16; + o[2] &= 65535; + o[2] += m[3] * n[2]; + o[1] += o[2] >>> 16; + o[2] &= 65535; + o[1] += m[1] * n[3]; + o[0] += o[1] >>> 16; + o[1] &= 65535; + o[1] += m[2] * n[2]; + o[0] += o[1] >>> 16; + o[1] &= 65535; + o[1] += m[3] * n[1]; + o[0] += o[1] >>> 16; + o[1] &= 65535; + o[0] += m[0] * n[3] + m[1] * n[2] + m[2] * n[1] + m[3] * n[0]; + o[0] &= 65535; + return [o[0] << 16 | o[1], o[2] << 16 | o[3]]; + } + function _x64Rotl(m, n) { + n %= 64; + if (n === 32) { + return [m[1], m[0]]; + } else if (n < 32) { + return [m[0] << n | m[1] >>> 32 - n, m[1] << n | m[0] >>> 32 - n]; + } else { + n -= 32; + return [m[1] << n | m[0] >>> 32 - n, m[0] << n | m[1] >>> 32 - n]; + } + } + function _x64LeftShift(m, n) { + n %= 64; + if (n === 0) { + return m; + } else if (n < 32) { + return [m[0] << n | m[1] >>> 32 - n, m[1] << n]; + } else { + return [m[1] << n - 32, 0]; + } + } + function _x64Xor(m, n) { + return [m[0] ^ n[0], m[1] ^ n[1]]; + } + function _x64Fmix(h) { + h = _x64Xor(h, [0, h[0] >>> 1]); + h = _x64Multiply(h, [4283543511, 3981806797]); + h = _x64Xor(h, [0, h[0] >>> 1]); + h = _x64Multiply(h, [3301882366, 444984403]); + h = _x64Xor(h, [0, h[0] >>> 1]); + return h; + } + library.x86.hash32 = function(bytes, seed) { + if (library.inputValidation && !_validBytes(bytes)) { + return undefined2; + } + seed = seed || 0; + var remainder = bytes.length % 4; + var blocks = bytes.length - remainder; + var h1 = seed; + var k1 = 0; + var c1 = 3432918353; + var c2 = 461845907; + for (var i = 0; i < blocks; i = i + 4) { + k1 = bytes[i] | bytes[i + 1] << 8 | bytes[i + 2] << 16 | bytes[i + 3] << 24; + k1 = _x86Multiply(k1, c1); + k1 = _x86Rotl(k1, 15); + k1 = _x86Multiply(k1, c2); + h1 ^= k1; + h1 = _x86Rotl(h1, 13); + h1 = _x86Multiply(h1, 5) + 3864292196; + } + k1 = 0; + switch (remainder) { + case 3: + k1 ^= bytes[i + 2] << 16; + case 2: + k1 ^= bytes[i + 1] << 8; + case 1: + k1 ^= bytes[i]; + k1 = _x86Multiply(k1, c1); + k1 = _x86Rotl(k1, 15); + k1 = _x86Multiply(k1, c2); + h1 ^= k1; + } + h1 ^= bytes.length; + h1 = _x86Fmix(h1); + return h1 >>> 0; + }; + library.x86.hash128 = function(bytes, seed) { + if (library.inputValidation && !_validBytes(bytes)) { + return undefined2; + } + seed = seed || 0; + var remainder = bytes.length % 16; + var blocks = bytes.length - remainder; + var h1 = seed; + var h2 = seed; + var h3 = seed; + var h4 = seed; + var k1 = 0; + var k2 = 0; + var k3 = 0; + var k4 = 0; + var c1 = 597399067; + var c2 = 2869860233; + var c3 = 951274213; + var c4 = 2716044179; + for (var i = 0; i < blocks; i = i + 16) { + k1 = bytes[i] | bytes[i + 1] << 8 | bytes[i + 2] << 16 | bytes[i + 3] << 24; + k2 = bytes[i + 4] | bytes[i + 5] << 8 | bytes[i + 6] << 16 | bytes[i + 7] << 24; + k3 = bytes[i + 8] | bytes[i + 9] << 8 | bytes[i + 10] << 16 | bytes[i + 11] << 24; + k4 = bytes[i + 12] | bytes[i + 13] << 8 | bytes[i + 14] << 16 | bytes[i + 15] << 24; + k1 = _x86Multiply(k1, c1); + k1 = _x86Rotl(k1, 15); + k1 = _x86Multiply(k1, c2); + h1 ^= k1; + h1 = _x86Rotl(h1, 19); + h1 += h2; + h1 = _x86Multiply(h1, 5) + 1444728091; + k2 = _x86Multiply(k2, c2); + k2 = _x86Rotl(k2, 16); + k2 = _x86Multiply(k2, c3); + h2 ^= k2; + h2 = _x86Rotl(h2, 17); + h2 += h3; + h2 = _x86Multiply(h2, 5) + 197830471; + k3 = _x86Multiply(k3, c3); + k3 = _x86Rotl(k3, 17); + k3 = _x86Multiply(k3, c4); + h3 ^= k3; + h3 = _x86Rotl(h3, 15); + h3 += h4; + h3 = _x86Multiply(h3, 5) + 2530024501; + k4 = _x86Multiply(k4, c4); + k4 = _x86Rotl(k4, 18); + k4 = _x86Multiply(k4, c1); + h4 ^= k4; + h4 = _x86Rotl(h4, 13); + h4 += h1; + h4 = _x86Multiply(h4, 5) + 850148119; + } + k1 = 0; + k2 = 0; + k3 = 0; + k4 = 0; + switch (remainder) { + case 15: + k4 ^= bytes[i + 14] << 16; + case 14: + k4 ^= bytes[i + 13] << 8; + case 13: + k4 ^= bytes[i + 12]; + k4 = _x86Multiply(k4, c4); + k4 = _x86Rotl(k4, 18); + k4 = _x86Multiply(k4, c1); + h4 ^= k4; + case 12: + k3 ^= bytes[i + 11] << 24; + case 11: + k3 ^= bytes[i + 10] << 16; + case 10: + k3 ^= bytes[i + 9] << 8; + case 9: + k3 ^= bytes[i + 8]; + k3 = _x86Multiply(k3, c3); + k3 = _x86Rotl(k3, 17); + k3 = _x86Multiply(k3, c4); + h3 ^= k3; + case 8: + k2 ^= bytes[i + 7] << 24; + case 7: + k2 ^= bytes[i + 6] << 16; + case 6: + k2 ^= bytes[i + 5] << 8; + case 5: + k2 ^= bytes[i + 4]; + k2 = _x86Multiply(k2, c2); + k2 = _x86Rotl(k2, 16); + k2 = _x86Multiply(k2, c3); + h2 ^= k2; + case 4: + k1 ^= bytes[i + 3] << 24; + case 3: + k1 ^= bytes[i + 2] << 16; + case 2: + k1 ^= bytes[i + 1] << 8; + case 1: + k1 ^= bytes[i]; + k1 = _x86Multiply(k1, c1); + k1 = _x86Rotl(k1, 15); + k1 = _x86Multiply(k1, c2); + h1 ^= k1; + } + h1 ^= bytes.length; + h2 ^= bytes.length; + h3 ^= bytes.length; + h4 ^= bytes.length; + h1 += h2; + h1 += h3; + h1 += h4; + h2 += h1; + h3 += h1; + h4 += h1; + h1 = _x86Fmix(h1); + h2 = _x86Fmix(h2); + h3 = _x86Fmix(h3); + h4 = _x86Fmix(h4); + h1 += h2; + h1 += h3; + h1 += h4; + h2 += h1; + h3 += h1; + h4 += h1; + return ("00000000" + (h1 >>> 0).toString(16)).slice(-8) + ("00000000" + (h2 >>> 0).toString(16)).slice(-8) + ("00000000" + (h3 >>> 0).toString(16)).slice(-8) + ("00000000" + (h4 >>> 0).toString(16)).slice(-8); + }; + library.x64.hash128 = function(bytes, seed) { + if (library.inputValidation && !_validBytes(bytes)) { + return undefined2; + } + seed = seed || 0; + var remainder = bytes.length % 16; + var blocks = bytes.length - remainder; + var h1 = [0, seed]; + var h2 = [0, seed]; + var k1 = [0, 0]; + var k2 = [0, 0]; + var c1 = [2277735313, 289559509]; + var c2 = [1291169091, 658871167]; + for (var i = 0; i < blocks; i = i + 16) { + k1 = [bytes[i + 4] | bytes[i + 5] << 8 | bytes[i + 6] << 16 | bytes[i + 7] << 24, bytes[i] | bytes[i + 1] << 8 | bytes[i + 2] << 16 | bytes[i + 3] << 24]; + k2 = [bytes[i + 12] | bytes[i + 13] << 8 | bytes[i + 14] << 16 | bytes[i + 15] << 24, bytes[i + 8] | bytes[i + 9] << 8 | bytes[i + 10] << 16 | bytes[i + 11] << 24]; + k1 = _x64Multiply(k1, c1); + k1 = _x64Rotl(k1, 31); + k1 = _x64Multiply(k1, c2); + h1 = _x64Xor(h1, k1); + h1 = _x64Rotl(h1, 27); + h1 = _x64Add(h1, h2); + h1 = _x64Add(_x64Multiply(h1, [0, 5]), [0, 1390208809]); + k2 = _x64Multiply(k2, c2); + k2 = _x64Rotl(k2, 33); + k2 = _x64Multiply(k2, c1); + h2 = _x64Xor(h2, k2); + h2 = _x64Rotl(h2, 31); + h2 = _x64Add(h2, h1); + h2 = _x64Add(_x64Multiply(h2, [0, 5]), [0, 944331445]); + } + k1 = [0, 0]; + k2 = [0, 0]; + switch (remainder) { + case 15: + k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 14]], 48)); + case 14: + k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 13]], 40)); + case 13: + k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 12]], 32)); + case 12: + k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 11]], 24)); + case 11: + k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 10]], 16)); + case 10: + k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 9]], 8)); + case 9: + k2 = _x64Xor(k2, [0, bytes[i + 8]]); + k2 = _x64Multiply(k2, c2); + k2 = _x64Rotl(k2, 33); + k2 = _x64Multiply(k2, c1); + h2 = _x64Xor(h2, k2); + case 8: + k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 7]], 56)); + case 7: + k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 6]], 48)); + case 6: + k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 5]], 40)); + case 5: + k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 4]], 32)); + case 4: + k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 3]], 24)); + case 3: + k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 2]], 16)); + case 2: + k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 1]], 8)); + case 1: + k1 = _x64Xor(k1, [0, bytes[i]]); + k1 = _x64Multiply(k1, c1); + k1 = _x64Rotl(k1, 31); + k1 = _x64Multiply(k1, c2); + h1 = _x64Xor(h1, k1); + } + h1 = _x64Xor(h1, [0, bytes.length]); + h2 = _x64Xor(h2, [0, bytes.length]); + h1 = _x64Add(h1, h2); + h2 = _x64Add(h2, h1); + h1 = _x64Fmix(h1); + h2 = _x64Fmix(h2); + h1 = _x64Add(h1, h2); + h2 = _x64Add(h2, h1); + return ("00000000" + (h1[0] >>> 0).toString(16)).slice(-8) + ("00000000" + (h1[1] >>> 0).toString(16)).slice(-8) + ("00000000" + (h2[0] >>> 0).toString(16)).slice(-8) + ("00000000" + (h2[1] >>> 0).toString(16)).slice(-8); + }; + if (typeof exports2 !== "undefined") { + if (typeof module2 !== "undefined" && module2.exports) { + exports2 = module2.exports = library; + } + exports2.murmurHash3 = library; + } else if (typeof define === "function" && define.amd) { + define([], function() { + return library; + }); + } else { + library._murmurHash3 = root.murmurHash3; + library.noConflict = function() { + root.murmurHash3 = library._murmurHash3; + library._murmurHash3 = undefined2; + library.noConflict = undefined2; + return library; + }; + root.murmurHash3 = library; + } + })(exports2); + } +}); + +// node_modules/murmurhash3js-revisited/index.js +var require_murmurhash3js_revisited = __commonJS({ + "node_modules/murmurhash3js-revisited/index.js"(exports2, module2) { + module2.exports = require_murmurHash3js(); + } +}); + +// node_modules/sparse-array/index.js +var require_sparse_array = __commonJS({ + "node_modules/sparse-array/index.js"(exports2, module2) { + "use strict"; + var BITS_PER_BYTE = 7; + module2.exports = class SparseArray { + constructor() { + this._bitArrays = []; + this._data = []; + this._length = 0; + this._changedLength = false; + this._changedData = false; + } + set(index, value) { + let pos = this._internalPositionFor(index, false); + if (value === void 0) { + if (pos !== -1) { + this._unsetInternalPos(pos); + this._unsetBit(index); + this._changedLength = true; + this._changedData = true; + } + } else { + let needsSort = false; + if (pos === -1) { + pos = this._data.length; + this._setBit(index); + this._changedData = true; + } else { + needsSort = true; + } + this._setInternalPos(pos, index, value, needsSort); + this._changedLength = true; + } + } + unset(index) { + this.set(index, void 0); + } + get(index) { + this._sortData(); + const pos = this._internalPositionFor(index, true); + if (pos === -1) { + return void 0; + } + return this._data[pos][1]; + } + push(value) { + this.set(this.length, value); + return this.length; + } + get length() { + this._sortData(); + if (this._changedLength) { + const last2 = this._data[this._data.length - 1]; + this._length = last2 ? last2[0] + 1 : 0; + this._changedLength = false; + } + return this._length; + } + forEach(iterator) { + let i = 0; + while (i < this.length) { + iterator(this.get(i), i, this); + i++; + } + } + map(iterator) { + let i = 0; + let mapped = new Array(this.length); + while (i < this.length) { + mapped[i] = iterator(this.get(i), i, this); + i++; + } + return mapped; + } + reduce(reducer, initialValue) { + let i = 0; + let acc = initialValue; + while (i < this.length) { + const value = this.get(i); + acc = reducer(acc, value, i); + i++; + } + return acc; + } + find(finder) { + let i = 0, found, last2; + while (i < this.length && !found) { + last2 = this.get(i); + found = finder(last2); + i++; + } + return found ? last2 : void 0; + } + _internalPositionFor(index, noCreate) { + const bytePos = this._bytePosFor(index, noCreate); + if (bytePos >= this._bitArrays.length) { + return -1; + } + const byte = this._bitArrays[bytePos]; + const bitPos = index - bytePos * BITS_PER_BYTE; + const exists2 = (byte & 1 << bitPos) > 0; + if (!exists2) { + return -1; + } + const previousPopCount = this._bitArrays.slice(0, bytePos).reduce(popCountReduce, 0); + const mask = ~(4294967295 << bitPos + 1); + const bytePopCount = popCount(byte & mask); + const arrayPos = previousPopCount + bytePopCount - 1; + return arrayPos; + } + _bytePosFor(index, noCreate) { + const bytePos = Math.floor(index / BITS_PER_BYTE); + const targetLength = bytePos + 1; + while (!noCreate && this._bitArrays.length < targetLength) { + this._bitArrays.push(0); + } + return bytePos; + } + _setBit(index) { + const bytePos = this._bytePosFor(index, false); + this._bitArrays[bytePos] |= 1 << index - bytePos * BITS_PER_BYTE; + } + _unsetBit(index) { + const bytePos = this._bytePosFor(index, false); + this._bitArrays[bytePos] &= ~(1 << index - bytePos * BITS_PER_BYTE); + } + _setInternalPos(pos, index, value, needsSort) { + const data = this._data; + const elem = [index, value]; + if (needsSort) { + this._sortData(); + data[pos] = elem; + } else { + if (data.length) { + if (data[data.length - 1][0] >= index) { + data.push(elem); + } else if (data[0][0] <= index) { + data.unshift(elem); + } else { + const randomIndex = Math.round(data.length / 2); + this._data = data.slice(0, randomIndex).concat(elem).concat(data.slice(randomIndex)); + } + } else { + this._data.push(elem); + } + this._changedData = true; + this._changedLength = true; + } + } + _unsetInternalPos(pos) { + this._data.splice(pos, 1); + } + _sortData() { + if (this._changedData) { + this._data.sort(sortInternal); + } + this._changedData = false; + } + bitField() { + const bytes = []; + let pendingBitsForResultingByte = 8; + let pendingBitsForNewByte = 0; + let resultingByte = 0; + let newByte; + const pending = this._bitArrays.slice(); + while (pending.length || pendingBitsForNewByte) { + if (pendingBitsForNewByte === 0) { + newByte = pending.shift(); + pendingBitsForNewByte = 7; + } + const usingBits = Math.min(pendingBitsForNewByte, pendingBitsForResultingByte); + const mask = ~(255 << usingBits); + const masked = newByte & mask; + resultingByte |= masked << 8 - pendingBitsForResultingByte; + newByte = newByte >>> usingBits; + pendingBitsForNewByte -= usingBits; + pendingBitsForResultingByte -= usingBits; + if (!pendingBitsForResultingByte || !pendingBitsForNewByte && !pending.length) { + bytes.push(resultingByte); + resultingByte = 0; + pendingBitsForResultingByte = 8; + } + } + for (var i = bytes.length - 1; i > 0; i--) { + const value = bytes[i]; + if (value === 0) { + bytes.pop(); + } else { + break; + } + } + return bytes; + } + compactArray() { + this._sortData(); + return this._data.map(valueOnly); + } + }; + function popCountReduce(count, byte) { + return count + popCount(byte); + } + function popCount(_v) { + let v = _v; + v = v - (v >> 1 & 1431655765); + v = (v & 858993459) + (v >> 2 & 858993459); + return (v + (v >> 4) & 252645135) * 16843009 >> 24; + } + function sortInternal(a, b) { + return a[0] - b[0]; + } + function valueOnly(elem) { + return elem[1]; + } + } +}); + +// node_modules/rabin-wasm/src/rabin.js +var require_rabin = __commonJS({ + "node_modules/rabin-wasm/src/rabin.js"(exports2, module2) { + var Rabin = class { + /** + * Creates an instance of Rabin. + * @param { import("./../dist/rabin-wasm") } asModule + * @param {number} [bits=12] + * @param {number} [min=8 * 1024] + * @param {number} [max=32 * 1024] + * @param {number} polynomial + * @memberof Rabin + */ + constructor(asModule, bits = 12, min = 8 * 1024, max = 32 * 1024, windowSize = 64, polynomial) { + this.bits = bits; + this.min = min; + this.max = max; + this.asModule = asModule; + this.rabin = new asModule.Rabin(bits, min, max, windowSize, polynomial); + this.polynomial = polynomial; + } + /** + * Fingerprints the buffer + * + * @param {Uint8Array} buf + * @returns {Array} + * @memberof Rabin + */ + fingerprint(buf2) { + const { + __retain, + __release, + __allocArray, + __getInt32Array, + Int32Array_ID, + Uint8Array_ID + } = this.asModule; + const lengths = new Int32Array(Math.ceil(buf2.length / this.min)); + const lengthsPtr = __retain(__allocArray(Int32Array_ID, lengths)); + const pointer = __retain(__allocArray(Uint8Array_ID, buf2)); + const out = this.rabin.fingerprint(pointer, lengthsPtr); + const processed = __getInt32Array(out); + __release(pointer); + __release(lengthsPtr); + const end = processed.indexOf(0); + return end >= 0 ? processed.subarray(0, end) : processed; + } + }; + module2.exports = Rabin; + } +}); + +// node_modules/@assemblyscript/loader/index.js +var require_loader = __commonJS({ + "node_modules/@assemblyscript/loader/index.js"(exports2) { + "use strict"; + var ID_OFFSET = -8; + var SIZE_OFFSET = -4; + var ARRAYBUFFER_ID = 0; + var STRING_ID = 1; + var ARRAYBUFFERVIEW = 1 << 0; + var ARRAY = 1 << 1; + var SET = 1 << 2; + var MAP = 1 << 3; + var VAL_ALIGN_OFFSET = 5; + var VAL_ALIGN = 1 << VAL_ALIGN_OFFSET; + var VAL_SIGNED = 1 << 10; + var VAL_FLOAT = 1 << 11; + var VAL_NULLABLE = 1 << 12; + var VAL_MANAGED = 1 << 13; + var KEY_ALIGN_OFFSET = 14; + var KEY_ALIGN = 1 << KEY_ALIGN_OFFSET; + var KEY_SIGNED = 1 << 19; + var KEY_FLOAT = 1 << 20; + var KEY_NULLABLE = 1 << 21; + var KEY_MANAGED = 1 << 22; + var ARRAYBUFFERVIEW_BUFFER_OFFSET = 0; + var ARRAYBUFFERVIEW_DATASTART_OFFSET = 4; + var ARRAYBUFFERVIEW_DATALENGTH_OFFSET = 8; + var ARRAYBUFFERVIEW_SIZE = 12; + var ARRAY_LENGTH_OFFSET = 12; + var ARRAY_SIZE = 16; + var BIGINT = typeof BigUint64Array !== "undefined"; + var THIS = Symbol(); + var CHUNKSIZE = 1024; + function getStringImpl(buffer2, ptr) { + const U32 = new Uint32Array(buffer2); + const U16 = new Uint16Array(buffer2); + var length4 = U32[ptr + SIZE_OFFSET >>> 2] >>> 1; + var offset = ptr >>> 1; + if (length4 <= CHUNKSIZE) + return String.fromCharCode.apply(String, U16.subarray(offset, offset + length4)); + const parts = []; + do { + const last2 = U16[offset + CHUNKSIZE - 1]; + const size = last2 >= 55296 && last2 < 56320 ? CHUNKSIZE - 1 : CHUNKSIZE; + parts.push(String.fromCharCode.apply(String, U16.subarray(offset, offset += size))); + length4 -= size; + } while (length4 > CHUNKSIZE); + return parts.join("") + String.fromCharCode.apply(String, U16.subarray(offset, offset + length4)); + } + function preInstantiate(imports) { + const baseModule = {}; + function getString(memory, ptr) { + if (!memory) + return ""; + return getStringImpl(memory.buffer, ptr); + } + const env = imports.env = imports.env || {}; + env.abort = env.abort || function abort(mesg, file, line, colm) { + const memory = baseModule.memory || env.memory; + throw Error("abort: " + getString(memory, mesg) + " at " + getString(memory, file) + ":" + line + ":" + colm); + }; + env.trace = env.trace || function trace(mesg, n) { + const memory = baseModule.memory || env.memory; + console.log("trace: " + getString(memory, mesg) + (n ? " " : "") + Array.prototype.slice.call(arguments, 2, 2 + n).join(", ")); + }; + imports.Math = imports.Math || Math; + imports.Date = imports.Date || Date; + return baseModule; + } + function postInstantiate(baseModule, instance) { + const rawExports = instance.exports; + const memory = rawExports.memory; + const table = rawExports.table; + const alloc4 = rawExports["__alloc"]; + const retain = rawExports["__retain"]; + const rttiBase = rawExports["__rtti_base"] || ~0; + function getInfo(id) { + const U32 = new Uint32Array(memory.buffer); + const count = U32[rttiBase >>> 2]; + if ((id >>>= 0) >= count) + throw Error("invalid id: " + id); + return U32[(rttiBase + 4 >>> 2) + id * 2]; + } + function getBase(id) { + const U32 = new Uint32Array(memory.buffer); + const count = U32[rttiBase >>> 2]; + if ((id >>>= 0) >= count) + throw Error("invalid id: " + id); + return U32[(rttiBase + 4 >>> 2) + id * 2 + 1]; + } + function getValueAlign(info) { + return 31 - Math.clz32(info >>> VAL_ALIGN_OFFSET & 31); + } + function getKeyAlign(info) { + return 31 - Math.clz32(info >>> KEY_ALIGN_OFFSET & 31); + } + function __allocString(str) { + const length4 = str.length; + const ptr = alloc4(length4 << 1, STRING_ID); + const U16 = new Uint16Array(memory.buffer); + for (var i = 0, p = ptr >>> 1; i < length4; ++i) + U16[p + i] = str.charCodeAt(i); + return ptr; + } + baseModule.__allocString = __allocString; + function __getString(ptr) { + const buffer2 = memory.buffer; + const id = new Uint32Array(buffer2)[ptr + ID_OFFSET >>> 2]; + if (id !== STRING_ID) + throw Error("not a string: " + ptr); + return getStringImpl(buffer2, ptr); + } + baseModule.__getString = __getString; + function getView(alignLog2, signed, float) { + const buffer2 = memory.buffer; + if (float) { + switch (alignLog2) { + case 2: + return new Float32Array(buffer2); + case 3: + return new Float64Array(buffer2); + } + } else { + switch (alignLog2) { + case 0: + return new (signed ? Int8Array : Uint8Array)(buffer2); + case 1: + return new (signed ? Int16Array : Uint16Array)(buffer2); + case 2: + return new (signed ? Int32Array : Uint32Array)(buffer2); + case 3: + return new (signed ? BigInt64Array : BigUint64Array)(buffer2); + } + } + throw Error("unsupported align: " + alignLog2); + } + function __allocArray(id, values) { + const info = getInfo(id); + if (!(info & (ARRAYBUFFERVIEW | ARRAY))) + throw Error("not an array: " + id + " @ " + info); + const align = getValueAlign(info); + const length4 = values.length; + const buf2 = alloc4(length4 << align, ARRAYBUFFER_ID); + const arr = alloc4(info & ARRAY ? ARRAY_SIZE : ARRAYBUFFERVIEW_SIZE, id); + const U32 = new Uint32Array(memory.buffer); + U32[arr + ARRAYBUFFERVIEW_BUFFER_OFFSET >>> 2] = retain(buf2); + U32[arr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2] = buf2; + U32[arr + ARRAYBUFFERVIEW_DATALENGTH_OFFSET >>> 2] = length4 << align; + if (info & ARRAY) + U32[arr + ARRAY_LENGTH_OFFSET >>> 2] = length4; + const view = getView(align, info & VAL_SIGNED, info & VAL_FLOAT); + if (info & VAL_MANAGED) { + for (let i = 0; i < length4; ++i) + view[(buf2 >>> align) + i] = retain(values[i]); + } else { + view.set(values, buf2 >>> align); + } + return arr; + } + baseModule.__allocArray = __allocArray; + function __getArrayView(arr) { + const U32 = new Uint32Array(memory.buffer); + const id = U32[arr + ID_OFFSET >>> 2]; + const info = getInfo(id); + if (!(info & ARRAYBUFFERVIEW)) + throw Error("not an array: " + id); + const align = getValueAlign(info); + var buf2 = U32[arr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2]; + const length4 = info & ARRAY ? U32[arr + ARRAY_LENGTH_OFFSET >>> 2] : U32[buf2 + SIZE_OFFSET >>> 2] >>> align; + return getView(align, info & VAL_SIGNED, info & VAL_FLOAT).subarray(buf2 >>>= align, buf2 + length4); + } + baseModule.__getArrayView = __getArrayView; + function __getArray(arr) { + const input = __getArrayView(arr); + const len = input.length; + const out = new Array(len); + for (let i = 0; i < len; i++) + out[i] = input[i]; + return out; + } + baseModule.__getArray = __getArray; + function __getArrayBuffer(ptr) { + const buffer2 = memory.buffer; + const length4 = new Uint32Array(buffer2)[ptr + SIZE_OFFSET >>> 2]; + return buffer2.slice(ptr, ptr + length4); + } + baseModule.__getArrayBuffer = __getArrayBuffer; + function getTypedArray(Type2, alignLog2, ptr) { + return new Type2(getTypedArrayView(Type2, alignLog2, ptr)); + } + function getTypedArrayView(Type2, alignLog2, ptr) { + const buffer2 = memory.buffer; + const U32 = new Uint32Array(buffer2); + const bufPtr = U32[ptr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2]; + return new Type2(buffer2, bufPtr, U32[bufPtr + SIZE_OFFSET >>> 2] >>> alignLog2); + } + baseModule.__getInt8Array = getTypedArray.bind(null, Int8Array, 0); + baseModule.__getInt8ArrayView = getTypedArrayView.bind(null, Int8Array, 0); + baseModule.__getUint8Array = getTypedArray.bind(null, Uint8Array, 0); + baseModule.__getUint8ArrayView = getTypedArrayView.bind(null, Uint8Array, 0); + baseModule.__getUint8ClampedArray = getTypedArray.bind(null, Uint8ClampedArray, 0); + baseModule.__getUint8ClampedArrayView = getTypedArrayView.bind(null, Uint8ClampedArray, 0); + baseModule.__getInt16Array = getTypedArray.bind(null, Int16Array, 1); + baseModule.__getInt16ArrayView = getTypedArrayView.bind(null, Int16Array, 1); + baseModule.__getUint16Array = getTypedArray.bind(null, Uint16Array, 1); + baseModule.__getUint16ArrayView = getTypedArrayView.bind(null, Uint16Array, 1); + baseModule.__getInt32Array = getTypedArray.bind(null, Int32Array, 2); + baseModule.__getInt32ArrayView = getTypedArrayView.bind(null, Int32Array, 2); + baseModule.__getUint32Array = getTypedArray.bind(null, Uint32Array, 2); + baseModule.__getUint32ArrayView = getTypedArrayView.bind(null, Uint32Array, 2); + if (BIGINT) { + baseModule.__getInt64Array = getTypedArray.bind(null, BigInt64Array, 3); + baseModule.__getInt64ArrayView = getTypedArrayView.bind(null, BigInt64Array, 3); + baseModule.__getUint64Array = getTypedArray.bind(null, BigUint64Array, 3); + baseModule.__getUint64ArrayView = getTypedArrayView.bind(null, BigUint64Array, 3); + } + baseModule.__getFloat32Array = getTypedArray.bind(null, Float32Array, 2); + baseModule.__getFloat32ArrayView = getTypedArrayView.bind(null, Float32Array, 2); + baseModule.__getFloat64Array = getTypedArray.bind(null, Float64Array, 3); + baseModule.__getFloat64ArrayView = getTypedArrayView.bind(null, Float64Array, 3); + function __instanceof(ptr, baseId) { + const U32 = new Uint32Array(memory.buffer); + var id = U32[ptr + ID_OFFSET >>> 2]; + if (id <= U32[rttiBase >>> 2]) { + do + if (id == baseId) + return true; + while (id = getBase(id)); + } + return false; + } + baseModule.__instanceof = __instanceof; + baseModule.memory = baseModule.memory || memory; + baseModule.table = baseModule.table || table; + return demangle(rawExports, baseModule); + } + function isResponse(o) { + return typeof Response !== "undefined" && o instanceof Response; + } + async function instantiate(source, imports) { + if (isResponse(source = await source)) + return instantiateStreaming(source, imports); + return postInstantiate( + preInstantiate(imports || (imports = {})), + await WebAssembly.instantiate( + source instanceof WebAssembly.Module ? source : await WebAssembly.compile(source), + imports + ) + ); + } + exports2.instantiate = instantiate; + function instantiateSync(source, imports) { + return postInstantiate( + preInstantiate(imports || (imports = {})), + new WebAssembly.Instance( + source instanceof WebAssembly.Module ? source : new WebAssembly.Module(source), + imports + ) + ); + } + exports2.instantiateSync = instantiateSync; + async function instantiateStreaming(source, imports) { + if (!WebAssembly.instantiateStreaming) { + return instantiate( + isResponse(source = await source) ? source.arrayBuffer() : source, + imports + ); + } + return postInstantiate( + preInstantiate(imports || (imports = {})), + (await WebAssembly.instantiateStreaming(source, imports)).instance + ); + } + exports2.instantiateStreaming = instantiateStreaming; + function demangle(exports3, baseModule) { + var module3 = baseModule ? Object.create(baseModule) : {}; + var setArgumentsLength = exports3["__argumentsLength"] ? function(length4) { + exports3["__argumentsLength"].value = length4; + } : exports3["__setArgumentsLength"] || exports3["__setargc"] || function() { + }; + for (let internalName in exports3) { + if (!Object.prototype.hasOwnProperty.call(exports3, internalName)) + continue; + const elem = exports3[internalName]; + let parts = internalName.split("."); + let curr = module3; + while (parts.length > 1) { + let part = parts.shift(); + if (!Object.prototype.hasOwnProperty.call(curr, part)) + curr[part] = {}; + curr = curr[part]; + } + let name4 = parts[0]; + let hash = name4.indexOf("#"); + if (hash >= 0) { + let className = name4.substring(0, hash); + let classElem = curr[className]; + if (typeof classElem === "undefined" || !classElem.prototype) { + let ctor = function(...args) { + return ctor.wrap(ctor.prototype.constructor(0, ...args)); + }; + ctor.prototype = { + valueOf: function valueOf() { + return this[THIS]; + } + }; + ctor.wrap = function(thisValue) { + return Object.create(ctor.prototype, { [THIS]: { value: thisValue, writable: false } }); + }; + if (classElem) + Object.getOwnPropertyNames(classElem).forEach( + (name5) => Object.defineProperty(ctor, name5, Object.getOwnPropertyDescriptor(classElem, name5)) + ); + curr[className] = ctor; + } + name4 = name4.substring(hash + 1); + curr = curr[className].prototype; + if (/^(get|set):/.test(name4)) { + if (!Object.prototype.hasOwnProperty.call(curr, name4 = name4.substring(4))) { + let getter = exports3[internalName.replace("set:", "get:")]; + let setter = exports3[internalName.replace("get:", "set:")]; + Object.defineProperty(curr, name4, { + get: function() { + return getter(this[THIS]); + }, + set: function(value) { + setter(this[THIS], value); + }, + enumerable: true + }); + } + } else { + if (name4 === "constructor") { + (curr[name4] = (...args) => { + setArgumentsLength(args.length); + return elem(...args); + }).original = elem; + } else { + (curr[name4] = function(...args) { + setArgumentsLength(args.length); + return elem(this[THIS], ...args); + }).original = elem; + } + } + } else { + if (/^(get|set):/.test(name4)) { + if (!Object.prototype.hasOwnProperty.call(curr, name4 = name4.substring(4))) { + Object.defineProperty(curr, name4, { + get: exports3[internalName.replace("set:", "get:")], + set: exports3[internalName.replace("get:", "set:")], + enumerable: true + }); + } + } else if (typeof elem === "function" && elem !== setArgumentsLength) { + (curr[name4] = (...args) => { + setArgumentsLength(args.length); + return elem(...args); + }).original = elem; + } else { + curr[name4] = elem; + } + } + } + return module3; + } + exports2.demangle = demangle; + } +}); + +// node_modules/rabin-wasm/dist/rabin-wasm.node.js +var require_rabin_wasm_node = __commonJS({ + "node_modules/rabin-wasm/dist/rabin-wasm.node.js"(exports2, module2) { + var { instantiateSync } = require_loader(); + var fs6 = require("fs"); + loadWebAssembly.supported = typeof WebAssembly !== "undefined"; + async function loadWebAssembly(imp = {}) { + if (!loadWebAssembly.supported) + return null; + return instantiateSync(fs6.readFileSync(__dirname + "/../dist/rabin.wasm"), imp); + } + module2.exports = loadWebAssembly; + } +}); + +// node_modules/rabin-wasm/src/index.js +var require_src = __commonJS({ + "node_modules/rabin-wasm/src/index.js"(exports2, module2) { + var Rabin = require_rabin(); + var getRabin = require_rabin_wasm_node(); + var create5 = async (avg, min, max, windowSize, polynomial) => { + const compiled = await getRabin(); + return new Rabin(compiled, avg, min, max, windowSize, polynomial); + }; + module2.exports = { + Rabin, + create: create5 + }; + } +}); + +// node_modules/is-plain-obj/index.js +var require_is_plain_obj = __commonJS({ + "node_modules/is-plain-obj/index.js"(exports2, module2) { + "use strict"; + module2.exports = (value) => { + if (Object.prototype.toString.call(value) !== "[object Object]") { + return false; + } + const prototype = Object.getPrototypeOf(value); + return prototype === null || prototype === Object.prototype; + }; + } +}); + +// node_modules/merge-options/index.js +var require_merge_options = __commonJS({ + "node_modules/merge-options/index.js"(exports2, module2) { + "use strict"; + var isOptionObject = require_is_plain_obj(); + var { hasOwnProperty } = Object.prototype; + var { propertyIsEnumerable } = Object; + var defineProperty = (object, name4, value) => Object.defineProperty(object, name4, { + value, + writable: true, + enumerable: true, + configurable: true + }); + var globalThis2 = exports2; + var defaultMergeOptions = { + concatArrays: false, + ignoreUndefined: false + }; + var getEnumerableOwnPropertyKeys = (value) => { + const keys = []; + for (const key in value) { + if (hasOwnProperty.call(value, key)) { + keys.push(key); + } + } + if (Object.getOwnPropertySymbols) { + const symbols = Object.getOwnPropertySymbols(value); + for (const symbol2 of symbols) { + if (propertyIsEnumerable.call(value, symbol2)) { + keys.push(symbol2); + } + } + } + return keys; + }; + function clone(value) { + if (Array.isArray(value)) { + return cloneArray(value); + } + if (isOptionObject(value)) { + return cloneOptionObject(value); + } + return value; + } + function cloneArray(array) { + const result = array.slice(0, 0); + getEnumerableOwnPropertyKeys(array).forEach((key) => { + defineProperty(result, key, clone(array[key])); + }); + return result; + } + function cloneOptionObject(object) { + const result = Object.getPrototypeOf(object) === null ? /* @__PURE__ */ Object.create(null) : {}; + getEnumerableOwnPropertyKeys(object).forEach((key) => { + defineProperty(result, key, clone(object[key])); + }); + return result; + } + var mergeKeys = (merged, source, keys, config) => { + keys.forEach((key) => { + if (typeof source[key] === "undefined" && config.ignoreUndefined) { + return; + } + if (key in merged && merged[key] !== Object.getPrototypeOf(merged)) { + defineProperty(merged, key, merge2(merged[key], source[key], config)); + } else { + defineProperty(merged, key, clone(source[key])); + } + }); + return merged; + }; + var concatArrays = (merged, source, config) => { + let result = merged.slice(0, 0); + let resultIndex = 0; + [merged, source].forEach((array) => { + const indices = []; + for (let k = 0; k < array.length; k++) { + if (!hasOwnProperty.call(array, k)) { + continue; + } + indices.push(String(k)); + if (array === merged) { + defineProperty(result, resultIndex++, array[k]); + } else { + defineProperty(result, resultIndex++, clone(array[k])); + } + } + result = mergeKeys(result, array, getEnumerableOwnPropertyKeys(array).filter((key) => !indices.includes(key)), config); + }); + return result; + }; + function merge2(merged, source, config) { + if (config.concatArrays && Array.isArray(merged) && Array.isArray(source)) { + return concatArrays(merged, source, config); + } + if (!isOptionObject(source) || !isOptionObject(merged)) { + return clone(source); + } + return mergeKeys(merged, source, getEnumerableOwnPropertyKeys(source), config); + } + module2.exports = function(...options) { + const config = merge2(clone(defaultMergeOptions), this !== globalThis2 && this || {}, defaultMergeOptions); + let merged = { _: {} }; + for (const option of options) { + if (option === void 0) { + continue; + } + if (!isOptionObject(option)) { + throw new TypeError("`" + option + "` is not an Option Object"); + } + merged = merge2(merged, { _: option }, config); + } + return merged._; + }; + } +}); + +// node_modules/ms/index.js +var require_ms = __commonJS({ + "node_modules/ms/index.js"(exports2, module2) { + var s = 1e3; + var m = s * 60; + var h = m * 60; + var d = h * 24; + var w = d * 7; + var y = d * 365.25; + module2.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === "string" && val.length > 0) { + return parse(val); + } else if (type === "number" && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + "val is not a non-empty string or a valid number. val=" + JSON.stringify(val) + ); + }; + function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match2 = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match2) { + return; + } + var n = parseFloat(match2[1]); + var type = (match2[2] || "ms").toLowerCase(); + switch (type) { + case "years": + case "year": + case "yrs": + case "yr": + case "y": + return n * y; + case "weeks": + case "week": + case "w": + return n * w; + case "days": + case "day": + case "d": + return n * d; + case "hours": + case "hour": + case "hrs": + case "hr": + case "h": + return n * h; + case "minutes": + case "minute": + case "mins": + case "min": + case "m": + return n * m; + case "seconds": + case "second": + case "secs": + case "sec": + case "s": + return n * s; + case "milliseconds": + case "millisecond": + case "msecs": + case "msec": + case "ms": + return n; + default: + return void 0; + } + } + function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + "d"; + } + if (msAbs >= h) { + return Math.round(ms / h) + "h"; + } + if (msAbs >= m) { + return Math.round(ms / m) + "m"; + } + if (msAbs >= s) { + return Math.round(ms / s) + "s"; + } + return ms + "ms"; + } + function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, "day"); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, "hour"); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, "minute"); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, "second"); + } + return ms + " ms"; + } + function plural(ms, msAbs, n, name4) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + " " + name4 + (isPlural ? "s" : ""); + } + } +}); + +// node_modules/debug/src/common.js +var require_common = __commonJS({ + "node_modules/debug/src/common.js"(exports2, module2) { + function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce3; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require_ms(); + createDebug.destroy = destroy; + Object.keys(env).forEach((key) => { + createDebug[key] = env[key]; + }); + createDebug.names = []; + createDebug.skips = []; + createDebug.formatters = {}; + function selectColor(namespace) { + let hash = 0; + for (let i = 0; i < namespace.length; i++) { + hash = (hash << 5) - hash + namespace.charCodeAt(i); + hash |= 0; + } + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + function debug3(...args) { + if (!debug3.enabled) { + return; + } + const self = debug3; + const curr = Number(/* @__PURE__ */ new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + args[0] = createDebug.coerce(args[0]); + if (typeof args[0] !== "string") { + args.unshift("%O"); + } + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match2, format3) => { + if (match2 === "%%") { + return "%"; + } + index++; + const formatter = createDebug.formatters[format3]; + if (typeof formatter === "function") { + const val = args[index]; + match2 = formatter.call(self, val); + args.splice(index, 1); + index--; + } + return match2; + }); + createDebug.formatArgs.call(self, args); + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + debug3.namespace = namespace; + debug3.useColors = createDebug.useColors(); + debug3.color = createDebug.selectColor(namespace); + debug3.extend = extend; + debug3.destroy = createDebug.destroy; + Object.defineProperty(debug3, "enabled", { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + return enabledCache; + }, + set: (v) => { + enableOverride = v; + } + }); + if (typeof createDebug.init === "function") { + createDebug.init(debug3); + } + return debug3; + } + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === "undefined" ? ":" : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + createDebug.names = []; + createDebug.skips = []; + let i; + const split = (typeof namespaces === "string" ? namespaces : "").split(/[\s,]+/); + const len = split.length; + for (i = 0; i < len; i++) { + if (!split[i]) { + continue; + } + namespaces = split[i].replace(/\*/g, ".*?"); + if (namespaces[0] === "-") { + createDebug.skips.push(new RegExp("^" + namespaces.slice(1) + "$")); + } else { + createDebug.names.push(new RegExp("^" + namespaces + "$")); + } + } + } + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map((namespace) => "-" + namespace) + ].join(","); + createDebug.enable(""); + return namespaces; + } + function enabled(name4) { + if (name4[name4.length - 1] === "*") { + return true; + } + let i; + let len; + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name4)) { + return false; + } + } + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name4)) { + return true; + } + } + return false; + } + function toNamespace(regexp) { + return regexp.toString().substring(2, regexp.toString().length - 2).replace(/\.\*\?$/, "*"); + } + function coerce3(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + function destroy() { + console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); + } + createDebug.enable(createDebug.load()); + return createDebug; + } + module2.exports = setup; + } +}); + +// node_modules/debug/src/browser.js +var require_browser = __commonJS({ + "node_modules/debug/src/browser.js"(exports2, module2) { + exports2.formatArgs = formatArgs; + exports2.save = save; + exports2.load = load; + exports2.useColors = useColors; + exports2.storage = localstorage(); + exports2.destroy = /* @__PURE__ */ (() => { + let warned = false; + return () => { + if (!warned) { + warned = true; + console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); + } + }; + })(); + exports2.colors = [ + "#0000CC", + "#0000FF", + "#0033CC", + "#0033FF", + "#0066CC", + "#0066FF", + "#0099CC", + "#0099FF", + "#00CC00", + "#00CC33", + "#00CC66", + "#00CC99", + "#00CCCC", + "#00CCFF", + "#3300CC", + "#3300FF", + "#3333CC", + "#3333FF", + "#3366CC", + "#3366FF", + "#3399CC", + "#3399FF", + "#33CC00", + "#33CC33", + "#33CC66", + "#33CC99", + "#33CCCC", + "#33CCFF", + "#6600CC", + "#6600FF", + "#6633CC", + "#6633FF", + "#66CC00", + "#66CC33", + "#9900CC", + "#9900FF", + "#9933CC", + "#9933FF", + "#99CC00", + "#99CC33", + "#CC0000", + "#CC0033", + "#CC0066", + "#CC0099", + "#CC00CC", + "#CC00FF", + "#CC3300", + "#CC3333", + "#CC3366", + "#CC3399", + "#CC33CC", + "#CC33FF", + "#CC6600", + "#CC6633", + "#CC9900", + "#CC9933", + "#CCCC00", + "#CCCC33", + "#FF0000", + "#FF0033", + "#FF0066", + "#FF0099", + "#FF00CC", + "#FF00FF", + "#FF3300", + "#FF3333", + "#FF3366", + "#FF3399", + "#FF33CC", + "#FF33FF", + "#FF6600", + "#FF6633", + "#FF9900", + "#FF9933", + "#FFCC00", + "#FFCC33" + ]; + function useColors() { + if (typeof window !== "undefined" && window.process && (window.process.type === "renderer" || window.process.__nwjs)) { + return true; + } + if (typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + return typeof document !== "undefined" && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773 + typeof window !== "undefined" && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker + typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/); + } + function formatArgs(args) { + args[0] = (this.useColors ? "%c" : "") + this.namespace + (this.useColors ? " %c" : " ") + args[0] + (this.useColors ? "%c " : " ") + "+" + module2.exports.humanize(this.diff); + if (!this.useColors) { + return; + } + const c = "color: " + this.color; + args.splice(1, 0, c, "color: inherit"); + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, (match2) => { + if (match2 === "%%") { + return; + } + index++; + if (match2 === "%c") { + lastC = index; + } + }); + args.splice(lastC, 0, c); + } + exports2.log = console.debug || console.log || (() => { + }); + function save(namespaces) { + try { + if (namespaces) { + exports2.storage.setItem("debug", namespaces); + } else { + exports2.storage.removeItem("debug"); + } + } catch (error) { + } + } + function load() { + let r; + try { + r = exports2.storage.getItem("debug"); + } catch (error) { + } + if (!r && typeof process !== "undefined" && "env" in process) { + r = process.env.DEBUG; + } + return r; + } + function localstorage() { + try { + return localStorage; + } catch (error) { + } + } + module2.exports = require_common()(exports2); + var { formatters } = module2.exports; + formatters.j = function(v) { + try { + return JSON.stringify(v); + } catch (error) { + return "[UnexpectedJSONParseError]: " + error.message; + } + }; + } +}); + +// node_modules/debug/src/node.js +var require_node = __commonJS({ + "node_modules/debug/src/node.js"(exports2, module2) { + var tty = require("tty"); + var util = require("util"); + exports2.init = init; + exports2.log = log12; + exports2.formatArgs = formatArgs; + exports2.save = save; + exports2.load = load; + exports2.useColors = useColors; + exports2.destroy = util.deprecate( + () => { + }, + "Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`." + ); + exports2.colors = [6, 2, 3, 4, 5, 1]; + try { + const supportsColor = require("supports-color"); + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports2.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } + } catch (error) { + } + exports2.inspectOpts = Object.keys(process.env).filter((key) => { + return /^debug_/i.test(key); + }).reduce((obj, key) => { + const prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === "null") { + val = null; + } else { + val = Number(val); + } + obj[prop] = val; + return obj; + }, {}); + function useColors() { + return "colors" in exports2.inspectOpts ? Boolean(exports2.inspectOpts.colors) : tty.isatty(process.stderr.fd); + } + function formatArgs(args) { + const { namespace: name4, useColors: useColors2 } = this; + if (useColors2) { + const c = this.color; + const colorCode = "\x1B[3" + (c < 8 ? c : "8;5;" + c); + const prefix = ` ${colorCode};1m${name4} \x1B[0m`; + args[0] = prefix + args[0].split("\n").join("\n" + prefix); + args.push(colorCode + "m+" + module2.exports.humanize(this.diff) + "\x1B[0m"); + } else { + args[0] = getDate() + name4 + " " + args[0]; + } + } + function getDate() { + if (exports2.inspectOpts.hideDate) { + return ""; + } + return (/* @__PURE__ */ new Date()).toISOString() + " "; + } + function log12(...args) { + return process.stderr.write(util.format(...args) + "\n"); + } + function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + delete process.env.DEBUG; + } + } + function load() { + return process.env.DEBUG; + } + function init(debug3) { + debug3.inspectOpts = {}; + const keys = Object.keys(exports2.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug3.inspectOpts[keys[i]] = exports2.inspectOpts[keys[i]]; + } + } + module2.exports = require_common()(exports2); + var { formatters } = module2.exports; + formatters.o = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts).split("\n").map((str) => str.trim()).join(" "); + }; + formatters.O = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); + }; + } +}); + +// node_modules/debug/src/index.js +var require_src2 = __commonJS({ + "node_modules/debug/src/index.js"(exports2, module2) { + if (typeof process === "undefined" || process.type === "renderer" || process.browser === true || process.__nwjs) { + module2.exports = require_browser(); + } else { + module2.exports = require_node(); + } + } +}); + +// node_modules/balanced-match/index.js +var require_balanced_match = __commonJS({ + "node_modules/balanced-match/index.js"(exports2, module2) { + "use strict"; + module2.exports = balanced2; + function balanced2(a, b, str) { + if (a instanceof RegExp) + a = maybeMatch(a, str); + if (b instanceof RegExp) + b = maybeMatch(b, str); + var r = range(a, b, str); + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; + } + function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; + } + balanced2.range = range; + function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + if (ai >= 0 && bi > 0) { + if (a === b) { + return [ai, bi]; + } + begs = []; + left = str.length; + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [begs.pop(), bi]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + bi = str.indexOf(b, i + 1); + } + i = ai < bi && ai >= 0 ? ai : bi; + } + if (begs.length) { + result = [left, right]; + } + } + return result; + } + } +}); + +// node_modules/brace-expansion/index.js +var require_brace_expansion = __commonJS({ + "node_modules/brace-expansion/index.js"(exports2, module2) { + var balanced2 = require_balanced_match(); + module2.exports = expandTop; + var escSlash = "\0SLASH" + Math.random() + "\0"; + var escOpen = "\0OPEN" + Math.random() + "\0"; + var escClose = "\0CLOSE" + Math.random() + "\0"; + var escComma = "\0COMMA" + Math.random() + "\0"; + var escPeriod = "\0PERIOD" + Math.random() + "\0"; + function numeric(str) { + return parseInt(str, 10) == str ? parseInt(str, 10) : str.charCodeAt(0); + } + function escapeBraces(str) { + return str.split("\\\\").join(escSlash).split("\\{").join(escOpen).split("\\}").join(escClose).split("\\,").join(escComma).split("\\.").join(escPeriod); + } + function unescapeBraces(str) { + return str.split(escSlash).join("\\").split(escOpen).join("{").split(escClose).join("}").split(escComma).join(",").split(escPeriod).join("."); + } + function parseCommaParts(str) { + if (!str) + return [""]; + var parts = []; + var m = balanced2("{", "}", str); + if (!m) + return str.split(","); + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(","); + p[p.length - 1] += "{" + body + "}"; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length - 1] += postParts.shift(); + p.push.apply(p, postParts); + } + parts.push.apply(parts, p); + return parts; + } + function expandTop(str) { + if (!str) + return []; + if (str.substr(0, 2) === "{}") { + str = "\\{\\}" + str.substr(2); + } + return expand2(escapeBraces(str), true).map(unescapeBraces); + } + function embrace(str) { + return "{" + str + "}"; + } + function isPadded(el) { + return /^-?0\d/.test(el); + } + function lte(i, y) { + return i <= y; + } + function gte(i, y) { + return i >= y; + } + function expand2(str, isTop) { + var expansions = []; + var m = balanced2("{", "}", str); + if (!m) + return [str]; + var pre = m.pre; + var post = m.post.length ? expand2(m.post, false) : [""]; + if (/\$$/.test(m.pre)) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + "{" + m.body + "}" + post[k]; + expansions.push(expansion); + } + } else { + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(",") >= 0; + if (!isSequence && !isOptions) { + if (m.post.match(/,.*\}/)) { + str = m.pre + "{" + m.body + escClose + m.post; + return expand2(str); + } + return [str]; + } + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + n = expand2(n[0], false).map(embrace); + if (n.length === 1) { + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + var N; + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length); + var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + N = []; + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === "\\") + c = ""; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join("0"); + if (i < 0) + c = "-" + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = []; + for (var j = 0; j < n.length; j++) { + N.push.apply(N, expand2(n[j], false)); + } + } + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + } + return expansions; + } + } +}); + +// node_modules/fast-write-atomic/index.js +var require_fast_write_atomic = __commonJS({ + "node_modules/fast-write-atomic/index.js"(exports2, module2) { + "use strict"; + var { open, write: write2, close, rename, fsync, unlink } = require("fs"); + var { join, dirname } = require("path"); + var counter = 0; + function cleanup(dest, err, cb) { + unlink(dest, function() { + cb(err); + }); + } + function closeAndCleanup(fd, dest, err, cb) { + close(fd, cleanup.bind(null, dest, err, cb)); + } + function writeLoop(fd, content, contentLength, offset, cb) { + write2(fd, content, offset, function(err, bytesWritten) { + if (err) { + cb(err); + return; + } + return bytesWritten < contentLength - offset ? writeLoop(fd, content, contentLength, offset + bytesWritten, cb) : cb(null); + }); + } + function openLoop(dest, cb) { + open(dest, "w", function(err, fd) { + if (err) { + return err.code === "EMFILE" ? openLoop(dest, cb) : cb(err); + } + cb(null, fd); + }); + } + function writeAtomic2(path6, content, cb) { + const tmp = join(dirname(path6), "." + process.pid + "." + counter++); + openLoop(tmp, function(err, fd) { + if (err) { + cb(err); + return; + } + const contentLength = Buffer.byteLength(content); + writeLoop(fd, content, contentLength, 0, function(err2) { + if (err2) { + closeAndCleanup(fd, tmp, err2, cb); + return; + } + fsync(fd, function(err3) { + if (err3) { + closeAndCleanup(fd, tmp, err3, cb); + return; + } + close(fd, function(err4) { + if (err4) { + cleanup(tmp, err4, cb); + return; + } + rename(tmp, path6, (err5) => { + if (err5) { + cleanup(tmp, err5, cb); + return; + } + cb(null); + }); + }); + }); + }); + content = null; + }); + } + module2.exports = writeAtomic2; + } +}); + +// src/index.js +var src_exports3 = {}; +__export(src_exports3, { + BucketManager: () => bucketManager_default, + GatewayManager: () => gatewayManager_default, + NameManager: () => nameManager_default, + ObjectManager: () => objectManager_default, + PinManager: () => pinManager_default +}); +module.exports = __toCommonJS(src_exports3); + +// src/bucketManager.js +var import_client_s3 = require("@aws-sdk/client-s3"); +var BucketManager = class { + #DEFAULT_ENDPOINT = "https://s3.filebase.com"; + #DEFAULT_REGION = "us-east-1"; + #client; + /** + * @summary Creates a new instance of the constructor. + * @param {string} clientKey - The access key ID for authentication. + * @param {string} clientSecret - The secret access key for authentication. + * @tutorial quickstart-bucket + * @example + * import { BucketManager } from "@filebase/sdk"; + * const bucketManager = new BucketManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD"); + */ + constructor(clientKey, clientSecret) { + const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, clientConfiguration = { + credentials: { + accessKeyId: clientKey, + secretAccessKey: clientSecret + }, + endpoint: clientEndpoint, + region: this.#DEFAULT_REGION, + forcePathStyle: true + }; + this.#client = new import_client_s3.S3Client(clientConfiguration); + } + /** + * @typedef {Object} bucket + * @property {string} Name The name of the bucket + * @property {date} Date the bucket was created + */ + /** + * @summary Creates a new bucket with the specified name. + * @param {string} name - The name of the bucket to create. + * @returns {Promise} - A promise that resolves when the bucket is created. + * @example + * // Create bucket with name of `create-bucket-example` + * await bucketManager.create(`create-bucket-example`); + */ + async create(name4) { + const command = new import_client_s3.CreateBucketCommand({ + Bucket: name4 + }); + return await this.#client.send(command); + } + /** + * @summary Lists the buckets in the client. + * @returns {Promise>} - A promise that resolves with an array of objects representing the buckets in the client. + * @example + * // List all buckets + * await bucketManager.list(); + */ + async list() { + const command = new import_client_s3.ListBucketsCommand({}), { Buckets } = await this.#client.send(command); + return Buckets; + } + /** + * @summary Deletes the specified bucket. + * @param {string} name - The name of the bucket to delete. + * @returns {Promise} - A promise that resolves when the bucket is deleted. + * @example + * // Delete bucket with name of `bucket-name-to-delete` + * await bucketManager.delete(`bucket-name-to-delete`); + */ + async delete(name4) { + const command = new import_client_s3.DeleteBucketCommand({ + Bucket: name4 + }); + await this.#client.send(command); + return true; + } + /** + * @summary Sets the privacy of a given bucket. + * @param {string} name - The name of the bucket to toggle. + * @param {boolean} targetState - The new target state. [true=private,false=public] + * @returns {Promise} A promise that resolves to true if the bucket was successfully toggled. + * @example + * // Toggle bucket with label of `toggle-bucket-example` + * await bucketManager.setPrivacy(`toggle-bucket-example`, true); // Enabled + * await bucketManager.setPrivacy(`toggle-bucket-example`, false); // Disabled + */ + async setPrivacy(name4, targetState) { + const command = new import_client_s3.PutBucketAclCommand({ + Bucket: name4, + ACL: targetState ? "private" : "public-read" + }); + await this.#client.send(command); + return true; + } + /** + * @summary Gets the privacy of a given bucket + * @param {string} name - The name of the bucket to query. + * @returns {Promise} A promise that resolves to true if the bucket is private. + */ + async getPrivacy(name4) { + const command = new import_client_s3.GetBucketAclCommand({ + Bucket: name4 + }); + const response = await this.#client.send(command), readPermission = response.Grants.find((grant) => { + return grant.Grantee.Type === "Group" && grant.Permission === "READ"; + }); + return !(typeof readPermission !== "undefined"); + } +}; +var bucketManager_default = BucketManager; + +// src/gatewayManager.js +var import_axios2 = __toESM(require("axios")); + +// src/helpers.js +var import_axios = __toESM(require("axios")); +var GATEWAY_DEFAULT_TIMEOUT = 6e4; +async function downloadFromGateway(cid, options) { + if (typeof options.endpoint !== "string") { + throw new Error(`Default Gateway must be set`); + } + const downloadHeaders = {}; + if (options.token) { + downloadHeaders["x-filebase-gateway-token"] = options.token; + } + const downloadResponse = await import_axios.default.request({ + method: "GET", + baseURL: options.endpoint, + url: `/ipfs/${cid}`, + headers: downloadHeaders, + type: "stream", + timeout: (options == null ? void 0 : options.timeout) || GATEWAY_DEFAULT_TIMEOUT + }); + return downloadResponse.data; +} +function apiErrorHandler(err) { + var _a, _b, _c; + if ((err == null ? void 0 : err.response) && ((_a = err == null ? void 0 : err.response) == null ? void 0 : _a.status) && (err.response.status.toString()[0] === "4" || err.response.status.toString()[0] === "5")) { + throw new Error( + ((_b = err.response.data.error) == null ? void 0 : _b.details) || ((_c = err.response.data.error) == null ? void 0 : _c.reason) || err + ); + } + throw err; +} + +// src/gatewayManager.js +var GatewayManager = class { + #DEFAULT_ENDPOINT = "https://api.filebase.io"; + #DEFAULT_TIMEOUT = 6e4; + #client; + /** + * @summary Creates a new instance of the constructor. + * @param {string} clientKey - The access key ID for authentication. + * @param {string} clientSecret - The secret access key for authentication. + * @tutorial quickstart-gateway + * @example + * import { GatewayManager } from "@filebase/sdk"; + * const gatewayManager = new GatewayManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD"); + */ + constructor(clientKey, clientSecret) { + const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_GW_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString( + "base64" + ), baseURL = `${clientEndpoint}/v1/gateways`; + this.#client = import_axios2.default.create({ + baseURL, + timeout: this.#DEFAULT_TIMEOUT, + headers: { Authorization: `Bearer ${encodedToken}` } + }); + } + /** + * @typedef {Object} gateway + * @property {string} name Name for the gateway + * @property {string} domain Custom Domain for the gateway + * @property {boolean} enabled Whether the gateway is enabled or not + * @property {string} private Whether the gateway is scoped to users content + * @property {date} created_at Date the gateway was created + * @property {date} updated_at Date the gateway was last updated + */ + /** + * @typedef {Object} gatewayOptions + * @property {boolean} [domain] Optional Domain to allow for using a Custom Domain + * @property {string} [enabled] Optional Toggle to use for enabling the gateway + * @property {boolean} [private] Optional Boolean determining if gateway is Public or Private + */ + /** + * @summary Creates a gateway with the given name and options + * @param {string} name Unique name across entire platform for the gateway. Must be a valid subdomain name. + * @param {gatewayOptions} [options] + * @returns {Promise} - A promise that resolves to the value of a gateway. + * @example + * // Create gateway with name of `create-gateway-example` and a custom domain of `cname.mycustomdomain.com`. + * // The custom domain must already exist and have a CNAME record pointed at `create-gateway-example.myfilebase.com`. + * await gatewayManager.create(`create-gateway-example`, { + * domain: `cname.mycustomdomain.com` + * }); + */ + async create(name4, options = {}) { + try { + let createOptions = { + name: name4 + }; + if (typeof options.domain === "string") { + createOptions.domain = options.domain; + } + if (typeof options.enabled === "boolean") { + createOptions.enabled = options.enabled; + } + if (typeof options.private === "boolean") { + createOptions.private = options.private; + } + const createResponse = await this.#client.request({ + method: "POST", + data: createOptions + }); + return createResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + /** + * @summary Deletes a gateway with the given name. + * @param {string} name - The name of the gateway to delete. + * @returns {Promise} - A promise that resolves to true if the gateway was successfully deleted. + * @example + * // Delete gateway with name of `delete-gateway-example` + * await gatewayManager.delete(`delete-name-example`); + */ + async delete(name4) { + try { + await this.#client.request({ + method: "DELETE", + url: `/${name4}`, + validateStatus: (status) => { + return status === 204; + } + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } + /** + * @summary Returns the value of a gateway + * @param {string} name - Parameter representing the name to get. + * @returns {Promise} - A promise that resolves to the value of a gateway. + * @example + * // Get gateway with name of `gateway-get-example` + * await gatewayManager.get(`gateway-get-example`); + */ + async get(name4) { + try { + const getResponse = await this.#client.request({ + method: "GET", + url: `/${name4}`, + validateStatus: (status) => { + return status === 200 || status === 404; + } + }); + return getResponse.status === 200 ? getResponse.data : false; + } catch (err) { + apiErrorHandler(err); + } + } + /** + * @summary Returns a list of gateways + * @returns {Promise>} - A promise that resolves to an array of gateways. + * @example + * // List all gateways + * await gatewayManager.list(); + */ + async list() { + try { + const getResponse = await this.#client.request({ + method: "GET" + }); + return getResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + /** + * @summary Updates the specified gateway. + * @param {string} name - The name of the gateway to update. + * @param {gatewayOptions} options - The options for the update operation. + * + * @returns {Promise} - A Promise that resolves to true if the gateway was updated. + * @example + * // Update gateway with name of `update-gateway-example` and set the gateway to only serve CIDs pinned by user. + * await gatewayManager.update(`update-gateway-example`, { + * private: true + * }); + */ + async update(name4, options) { + try { + const updateOptions = { + name: name4 + }; + if (options == null ? void 0 : options.domain) { + updateOptions.domain = String(options.private); + } + if (options == null ? void 0 : options.enabled) { + updateOptions.enabled = Boolean(options.enabled); + } + if (options == null ? void 0 : options.private) { + updateOptions.private = Boolean(options.private); + } + await this.#client.request({ + method: "PUT", + url: `/${name4}`, + data: updateOptions, + validateStatus: (status) => { + return status === 200; + } + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } + /** + * @summary Toggles the enabled state of a given gateway. + * @param {string} name - The name of the gateway to toggle. + * @param {boolean} targetState - The new target state. + * @returns {Promise} A promise that resolves to true if the gateway was successfully toggled. + * @example + * // Toggle gateway with label of `toggle-gateway-example` + * await gatewayManager.toggle(`toggle-gateway-example`, true); // Enabled + * await gatewayManager.toggle(`toggle-gateway-example`, false); // Disabled + */ + async toggle(name4, targetState) { + try { + await this.#client.request({ + method: "PUT", + url: `/${name4}`, + data: { + enabled: Boolean(targetState) + }, + validateStatus: (status) => { + return status === 200; + } + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } +}; +var gatewayManager_default = GatewayManager; + +// src/nameManager.js +var import_axios3 = __toESM(require("axios")); +var NameManager = class { + #DEFAULT_ENDPOINT = "https://api.filebase.io"; + #DEFAULT_TIMEOUT = 6e4; + #client; + /** + * @summary Creates a new instance of the constructor. + * @param {string} clientKey - The access key ID for authentication. + * @param {string} clientSecret - The secret access key for authentication. + * @tutorial quickstart-name + * @example + * import { NameManager } from "@filebase/sdk"; + * const nameManager = new NameManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD"); + */ + constructor(clientKey, clientSecret) { + const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString( + "base64" + ), baseURL = `${clientEndpoint}/v1/names`; + this.#client = import_axios3.default.create({ + baseURL, + timeout: this.#DEFAULT_TIMEOUT, + headers: { Authorization: `Bearer ${encodedToken}` } + }); + } + /** + * @typedef {Object} name + * @property {string} label Descriptive label for the Key + * @property {string} network_key IPNS Key CID + * @property {string} cid Value that name Publishes + * @property {number} sequence Version Number for the name + * @property {boolean} enabled Whether the name is being Published or not + * @property {date} published_at Date the name was last published to the DHT + * @property {date} created_at Date the name was created + * @property {date} updated_at Date the name was last updated + */ + /** + * @typedef {Object} nameOptions + * @property {boolean} [enabled] Whether the name is enabled or not. + */ + /** + * @summary Creates a new IPNS name with the given name as the label and CID. + * @param {string} label - The label of the new IPNS name. + * @param {string} cid - The CID of the IPNS name. + * @param {nameOptions} [options] - Additional options for the IPNS name. + * @returns {Promise} - A Promise that resolves with the response JSON. + * @example + * // Create IPNS name with label of `create-name-example` and CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm` + * await nameManager.create(`create-name-example`, `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`); + */ + async create(label, cid, options = { + enabled: true + }) { + try { + const createResponse = await this.#client.request({ + method: "POST", + data: { + label, + cid, + enabled: (options == null ? void 0 : options.enabled) !== false + } + }); + return createResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + /** + * @summary Imports a user's IPNS private key. + * @param {string} label - The label for the IPNS name. + * @param {string} cid - The CID (Content Identifier) of the data. + * @param {string} privateKey - The existing private key encoded in Base64. + * @param {nameOptions} [options] - Additional options for the IPNS name. + * @returns {Promise} - A Promise that resolves to the server response. + * @example + * // Import IPNS private key with label of `create-name-example`, CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm` + * // and a private key encoded with base64 + * await nameManager.import( + * `create-name-example`, + * `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm` + * `BASE64_ENCODED_PRIVATEKEY` + * ); + */ + async import(label, cid, privateKey, options = { + enabled: true + }) { + try { + const importResponse = await this.#client.request({ + method: "POST", + data: { + label, + cid, + network_private_key: privateKey, + enabled: (options == null ? void 0 : options.enabled) !== false + } + }); + return importResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + /** + * @summary Updates the specified name with the given CID. + * @param {string} label - The label of the name to update. + * @param {string} cid - The cid to associate with the name. + * @param {nameOptions} options - The options for the set operation. + * + * @returns {Promise} - A Promise that resolves to true if the IPNS name was updated. + * @example + * // Update name with label of `update-name-example` and set the value of the IPNS name. + * await nameManager.update(`update-name-example`, `bafybeidt4nmaci476lyon2mvgfmwyzysdazienhxs2bqnfpdainzjuwjom`); + */ + async update(label, cid, options = {}) { + try { + const updateOptions = { + cid + }; + if (options == null ? void 0 : options.enabled) { + updateOptions.enabled = Boolean(options.enabled); + } + await this.#client.request({ + method: "PUT", + url: `/${label}`, + data: updateOptions, + validateStatus: (status) => { + return status === 200; + } + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } + /** + * @summary Returns the value of an IPNS name + * @param {string} label - Parameter representing the label of the name to resolve. + * @returns {Promise} - A promise that resolves to the value of a name. + * @example + * // Get IPNS name with label of `list-name-example` + * await nameManager.get(`list-name-example`); + */ + async get(label) { + try { + const getResponse = await this.#client.request({ + method: "GET", + url: `/${label}`, + validateStatus: (status) => { + return status === 200 || status === 404; + } + }); + return getResponse.status === 200 ? getResponse.data : false; + } catch (err) { + apiErrorHandler(err); + } + } + /** + * @summary Returns a list of IPNS names + * @returns {Promise>} - A promise that resolves to an array of names. + * @example + * // List all IPNS names + * await nameManager.list(); + */ + async list() { + try { + const listResponse = await this.#client.request({ + method: "GET" + }); + return listResponse.data; + } catch (err) { + apiErrorHandler(err); + } + } + /** + * @summary Deletes an IPNS name with the given label. + * @param {string} label - The label of the IPNS name to delete. + * @returns {Promise} - A promise that resolves to true if the IPNS name was successfully deleted. + * @example + * // List IPNS name with label of `delete-name-example` + * await nameManager.delete(`delete-name-example`); + */ + async delete(label) { + try { + await this.#client.request({ + method: "DELETE", + url: `/${label}`, + validateStatus: (status) => { + return status === 204; + } + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } + /** + * @summary Toggles the enabled state of a given IPNS name. + * @param {string} label - The label of the IPNS name to toggle. + * @param {boolean} targetState - The new target state. + * @returns {Promise} A promise that resolves to true if the IPNS name was successfully toggled. + * @example + * // Toggle IPNS name with label of `toggle-name-example` + * await nameManager.toggle(`toggle-name-example`, true); // Enabled + * await nameManager.toggle(`toggle-name-example`, false); // Disabled + */ + async toggle(label, targetState) { + try { + await this.#client.request({ + method: "PUT", + url: `/${label}`, + data: { + enabled: targetState + }, + validateStatus: (status) => { + return status === 200; + } + }); + return true; + } catch (err) { + apiErrorHandler(err); + } + } +}; +var nameManager_default = NameManager; + +// src/objectManager.js +var import_client_s32 = require("@aws-sdk/client-s3"); +var import_lib_storage = require("@aws-sdk/lib-storage"); + +// node_modules/@ipld/car/src/buffer-reader.js +var import_fs = __toESM(require("fs"), 1); + +// node_modules/cborg/lib/is.js +var typeofs = [ + "string", + "number", + "bigint", + "symbol" +]; +var objectTypeNames = [ + "Function", + "Generator", + "AsyncGenerator", + "GeneratorFunction", + "AsyncGeneratorFunction", + "AsyncFunction", + "Observable", + "Array", + "Buffer", + "Object", + "RegExp", + "Date", + "Error", + "Map", + "Set", + "WeakMap", + "WeakSet", + "ArrayBuffer", + "SharedArrayBuffer", + "DataView", + "Promise", + "URL", + "HTMLElement", + "Int8Array", + "Uint8Array", + "Uint8ClampedArray", + "Int16Array", + "Uint16Array", + "Int32Array", + "Uint32Array", + "Float32Array", + "Float64Array", + "BigInt64Array", + "BigUint64Array" +]; +function is(value) { + if (value === null) { + return "null"; + } + if (value === void 0) { + return "undefined"; + } + if (value === true || value === false) { + return "boolean"; + } + const typeOf = typeof value; + if (typeofs.includes(typeOf)) { + return typeOf; + } + if (typeOf === "function") { + return "Function"; + } + if (Array.isArray(value)) { + return "Array"; + } + if (isBuffer(value)) { + return "Buffer"; + } + const objectType = getObjectType(value); + if (objectType) { + return objectType; + } + return "Object"; +} +function isBuffer(value) { + return value && value.constructor && value.constructor.isBuffer && value.constructor.isBuffer.call(null, value); +} +function getObjectType(value) { + const objectTypeName = Object.prototype.toString.call(value).slice(8, -1); + if (objectTypeNames.includes(objectTypeName)) { + return objectTypeName; + } + return void 0; +} + +// node_modules/cborg/lib/token.js +var Type = class { + /** + * @param {number} major + * @param {string} name + * @param {boolean} terminal + */ + constructor(major, name4, terminal) { + this.major = major; + this.majorEncoded = major << 5; + this.name = name4; + this.terminal = terminal; + } + /* c8 ignore next 3 */ + toString() { + return `Type[${this.major}].${this.name}`; + } + /** + * @param {Type} typ + * @returns {number} + */ + compare(typ) { + return this.major < typ.major ? -1 : this.major > typ.major ? 1 : 0; + } +}; +Type.uint = new Type(0, "uint", true); +Type.negint = new Type(1, "negint", true); +Type.bytes = new Type(2, "bytes", true); +Type.string = new Type(3, "string", true); +Type.array = new Type(4, "array", false); +Type.map = new Type(5, "map", false); +Type.tag = new Type(6, "tag", false); +Type.float = new Type(7, "float", true); +Type.false = new Type(7, "false", true); +Type.true = new Type(7, "true", true); +Type.null = new Type(7, "null", true); +Type.undefined = new Type(7, "undefined", true); +Type.break = new Type(7, "break", true); +var Token = class { + /** + * @param {Type} type + * @param {any} [value] + * @param {number} [encodedLength] + */ + constructor(type, value, encodedLength) { + this.type = type; + this.value = value; + this.encodedLength = encodedLength; + this.encodedBytes = void 0; + this.byteValue = void 0; + } + /* c8 ignore next 3 */ + toString() { + return `Token[${this.type}].${this.value}`; + } +}; + +// node_modules/cborg/lib/byte-utils.js +var useBuffer = globalThis.process && // @ts-ignore +!globalThis.process.browser && // @ts-ignore +globalThis.Buffer && // @ts-ignore +typeof globalThis.Buffer.isBuffer === "function"; +var textDecoder = new TextDecoder(); +var textEncoder = new TextEncoder(); +function isBuffer2(buf2) { + return useBuffer && globalThis.Buffer.isBuffer(buf2); +} +function asU8A(buf2) { + if (!(buf2 instanceof Uint8Array)) { + return Uint8Array.from(buf2); + } + return isBuffer2(buf2) ? new Uint8Array(buf2.buffer, buf2.byteOffset, buf2.byteLength) : buf2; +} +var toString = useBuffer ? ( + // eslint-disable-line operator-linebreak + /** + * @param {Uint8Array} bytes + * @param {number} start + * @param {number} end + */ + (bytes, start, end) => { + return end - start > 64 ? ( + // eslint-disable-line operator-linebreak + // @ts-ignore + globalThis.Buffer.from(bytes.subarray(start, end)).toString("utf8") + ) : utf8Slice(bytes, start, end); + } +) : ( + // eslint-disable-line operator-linebreak + /** + * @param {Uint8Array} bytes + * @param {number} start + * @param {number} end + */ + (bytes, start, end) => { + return end - start > 64 ? textDecoder.decode(bytes.subarray(start, end)) : utf8Slice(bytes, start, end); + } +); +var fromString = useBuffer ? ( + // eslint-disable-line operator-linebreak + /** + * @param {string} string + */ + (string2) => { + return string2.length > 64 ? ( + // eslint-disable-line operator-linebreak + // @ts-ignore + globalThis.Buffer.from(string2) + ) : utf8ToBytes(string2); + } +) : ( + // eslint-disable-line operator-linebreak + /** + * @param {string} string + */ + (string2) => { + return string2.length > 64 ? textEncoder.encode(string2) : utf8ToBytes(string2); + } +); +var fromArray = (arr) => { + return Uint8Array.from(arr); +}; +var slice = useBuffer ? ( + // eslint-disable-line operator-linebreak + /** + * @param {Uint8Array} bytes + * @param {number} start + * @param {number} end + */ + (bytes, start, end) => { + if (isBuffer2(bytes)) { + return new Uint8Array(bytes.subarray(start, end)); + } + return bytes.slice(start, end); + } +) : ( + // eslint-disable-line operator-linebreak + /** + * @param {Uint8Array} bytes + * @param {number} start + * @param {number} end + */ + (bytes, start, end) => { + return bytes.slice(start, end); + } +); +var concat = useBuffer ? ( + // eslint-disable-line operator-linebreak + /** + * @param {Uint8Array[]} chunks + * @param {number} length + * @returns {Uint8Array} + */ + (chunks, length4) => { + chunks = chunks.map((c) => c instanceof Uint8Array ? c : ( + // eslint-disable-line operator-linebreak + // @ts-ignore + globalThis.Buffer.from(c) + )); + return asU8A(globalThis.Buffer.concat(chunks, length4)); + } +) : ( + // eslint-disable-line operator-linebreak + /** + * @param {Uint8Array[]} chunks + * @param {number} length + * @returns {Uint8Array} + */ + (chunks, length4) => { + const out = new Uint8Array(length4); + let off = 0; + for (let b of chunks) { + if (off + b.length > out.length) { + b = b.subarray(0, out.length - off); + } + out.set(b, off); + off += b.length; + } + return out; + } +); +var alloc = useBuffer ? ( + // eslint-disable-line operator-linebreak + /** + * @param {number} size + * @returns {Uint8Array} + */ + (size) => { + return globalThis.Buffer.allocUnsafe(size); + } +) : ( + // eslint-disable-line operator-linebreak + /** + * @param {number} size + * @returns {Uint8Array} + */ + (size) => { + return new Uint8Array(size); + } +); +function compare(b1, b2) { + if (isBuffer2(b1) && isBuffer2(b2)) { + return b1.compare(b2); + } + for (let i = 0; i < b1.length; i++) { + if (b1[i] === b2[i]) { + continue; + } + return b1[i] < b2[i] ? -1 : 1; + } + return 0; +} +function utf8ToBytes(str) { + const out = []; + let p = 0; + for (let i = 0; i < str.length; i++) { + let c = str.charCodeAt(i); + if (c < 128) { + out[p++] = c; + } else if (c < 2048) { + out[p++] = c >> 6 | 192; + out[p++] = c & 63 | 128; + } else if ((c & 64512) === 55296 && i + 1 < str.length && (str.charCodeAt(i + 1) & 64512) === 56320) { + c = 65536 + ((c & 1023) << 10) + (str.charCodeAt(++i) & 1023); + out[p++] = c >> 18 | 240; + out[p++] = c >> 12 & 63 | 128; + out[p++] = c >> 6 & 63 | 128; + out[p++] = c & 63 | 128; + } else { + out[p++] = c >> 12 | 224; + out[p++] = c >> 6 & 63 | 128; + out[p++] = c & 63 | 128; + } + } + return out; +} +function utf8Slice(buf2, offset, end) { + const res = []; + while (offset < end) { + const firstByte = buf2[offset]; + let codePoint = null; + let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1; + if (offset + bytesPerSequence <= end) { + let secondByte, thirdByte, fourthByte, tempCodePoint; + switch (bytesPerSequence) { + case 1: + if (firstByte < 128) { + codePoint = firstByte; + } + break; + case 2: + secondByte = buf2[offset + 1]; + if ((secondByte & 192) === 128) { + tempCodePoint = (firstByte & 31) << 6 | secondByte & 63; + if (tempCodePoint > 127) { + codePoint = tempCodePoint; + } + } + break; + case 3: + secondByte = buf2[offset + 1]; + thirdByte = buf2[offset + 2]; + if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) { + tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63; + if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) { + codePoint = tempCodePoint; + } + } + break; + case 4: + secondByte = buf2[offset + 1]; + thirdByte = buf2[offset + 2]; + fourthByte = buf2[offset + 3]; + if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) { + tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63; + if (tempCodePoint > 65535 && tempCodePoint < 1114112) { + codePoint = tempCodePoint; + } + } + } + } + if (codePoint === null) { + codePoint = 65533; + bytesPerSequence = 1; + } else if (codePoint > 65535) { + codePoint -= 65536; + res.push(codePoint >>> 10 & 1023 | 55296); + codePoint = 56320 | codePoint & 1023; + } + res.push(codePoint); + offset += bytesPerSequence; + } + return decodeCodePointsArray(res); +} +var MAX_ARGUMENTS_LENGTH = 4096; +function decodeCodePointsArray(codePoints) { + const len = codePoints.length; + if (len <= MAX_ARGUMENTS_LENGTH) { + return String.fromCharCode.apply(String, codePoints); + } + let res = ""; + let i = 0; + while (i < len) { + res += String.fromCharCode.apply( + String, + codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH) + ); + } + return res; +} + +// node_modules/cborg/lib/bl.js +var defaultChunkSize = 256; +var Bl = class { + /** + * @param {number} [chunkSize] + */ + constructor(chunkSize = defaultChunkSize) { + this.chunkSize = chunkSize; + this.cursor = 0; + this.maxCursor = -1; + this.chunks = []; + this._initReuseChunk = null; + } + reset() { + this.cursor = 0; + this.maxCursor = -1; + if (this.chunks.length) { + this.chunks = []; + } + if (this._initReuseChunk !== null) { + this.chunks.push(this._initReuseChunk); + this.maxCursor = this._initReuseChunk.length - 1; + } + } + /** + * @param {Uint8Array|number[]} bytes + */ + push(bytes) { + let topChunk = this.chunks[this.chunks.length - 1]; + const newMax = this.cursor + bytes.length; + if (newMax <= this.maxCursor + 1) { + const chunkPos = topChunk.length - (this.maxCursor - this.cursor) - 1; + topChunk.set(bytes, chunkPos); + } else { + if (topChunk) { + const chunkPos = topChunk.length - (this.maxCursor - this.cursor) - 1; + if (chunkPos < topChunk.length) { + this.chunks[this.chunks.length - 1] = topChunk.subarray(0, chunkPos); + this.maxCursor = this.cursor - 1; + } + } + if (bytes.length < 64 && bytes.length < this.chunkSize) { + topChunk = alloc(this.chunkSize); + this.chunks.push(topChunk); + this.maxCursor += topChunk.length; + if (this._initReuseChunk === null) { + this._initReuseChunk = topChunk; + } + topChunk.set(bytes, 0); + } else { + this.chunks.push(bytes); + this.maxCursor += bytes.length; + } + } + this.cursor += bytes.length; + } + /** + * @param {boolean} [reset] + * @returns {Uint8Array} + */ + toBytes(reset = false) { + let byts; + if (this.chunks.length === 1) { + const chunk = this.chunks[0]; + if (reset && this.cursor > chunk.length / 2) { + byts = this.cursor === chunk.length ? chunk : chunk.subarray(0, this.cursor); + this._initReuseChunk = null; + this.chunks = []; + } else { + byts = slice(chunk, 0, this.cursor); + } + } else { + byts = concat(this.chunks, this.cursor); + } + if (reset) { + this.reset(); + } + return byts; + } +}; + +// node_modules/cborg/lib/common.js +var decodeErrPrefix = "CBOR decode error:"; +var encodeErrPrefix = "CBOR encode error:"; +var uintMinorPrefixBytes = []; +uintMinorPrefixBytes[23] = 1; +uintMinorPrefixBytes[24] = 2; +uintMinorPrefixBytes[25] = 3; +uintMinorPrefixBytes[26] = 5; +uintMinorPrefixBytes[27] = 9; +function assertEnoughData(data, pos, need) { + if (data.length - pos < need) { + throw new Error(`${decodeErrPrefix} not enough data for type`); + } +} + +// node_modules/cborg/lib/0uint.js +var uintBoundaries = [24, 256, 65536, 4294967296, BigInt("18446744073709551616")]; +function readUint8(data, offset, options) { + assertEnoughData(data, offset, 1); + const value = data[offset]; + if (options.strict === true && value < uintBoundaries[0]) { + throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`); + } + return value; +} +function readUint16(data, offset, options) { + assertEnoughData(data, offset, 2); + const value = data[offset] << 8 | data[offset + 1]; + if (options.strict === true && value < uintBoundaries[1]) { + throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`); + } + return value; +} +function readUint32(data, offset, options) { + assertEnoughData(data, offset, 4); + const value = data[offset] * 16777216 + (data[offset + 1] << 16) + (data[offset + 2] << 8) + data[offset + 3]; + if (options.strict === true && value < uintBoundaries[2]) { + throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`); + } + return value; +} +function readUint64(data, offset, options) { + assertEnoughData(data, offset, 8); + const hi = data[offset] * 16777216 + (data[offset + 1] << 16) + (data[offset + 2] << 8) + data[offset + 3]; + const lo = data[offset + 4] * 16777216 + (data[offset + 5] << 16) + (data[offset + 6] << 8) + data[offset + 7]; + const value = (BigInt(hi) << BigInt(32)) + BigInt(lo); + if (options.strict === true && value < uintBoundaries[3]) { + throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`); + } + if (value <= Number.MAX_SAFE_INTEGER) { + return Number(value); + } + if (options.allowBigInt === true) { + return value; + } + throw new Error(`${decodeErrPrefix} integers outside of the safe integer range are not supported`); +} +function decodeUint8(data, pos, _minor, options) { + return new Token(Type.uint, readUint8(data, pos + 1, options), 2); +} +function decodeUint16(data, pos, _minor, options) { + return new Token(Type.uint, readUint16(data, pos + 1, options), 3); +} +function decodeUint32(data, pos, _minor, options) { + return new Token(Type.uint, readUint32(data, pos + 1, options), 5); +} +function decodeUint64(data, pos, _minor, options) { + return new Token(Type.uint, readUint64(data, pos + 1, options), 9); +} +function encodeUint(buf2, token) { + return encodeUintValue(buf2, 0, token.value); +} +function encodeUintValue(buf2, major, uint) { + if (uint < uintBoundaries[0]) { + const nuint = Number(uint); + buf2.push([major | nuint]); + } else if (uint < uintBoundaries[1]) { + const nuint = Number(uint); + buf2.push([major | 24, nuint]); + } else if (uint < uintBoundaries[2]) { + const nuint = Number(uint); + buf2.push([major | 25, nuint >>> 8, nuint & 255]); + } else if (uint < uintBoundaries[3]) { + const nuint = Number(uint); + buf2.push([major | 26, nuint >>> 24 & 255, nuint >>> 16 & 255, nuint >>> 8 & 255, nuint & 255]); + } else { + const buint = BigInt(uint); + if (buint < uintBoundaries[4]) { + const set = [major | 27, 0, 0, 0, 0, 0, 0, 0]; + let lo = Number(buint & BigInt(4294967295)); + let hi = Number(buint >> BigInt(32) & BigInt(4294967295)); + set[8] = lo & 255; + lo = lo >> 8; + set[7] = lo & 255; + lo = lo >> 8; + set[6] = lo & 255; + lo = lo >> 8; + set[5] = lo & 255; + set[4] = hi & 255; + hi = hi >> 8; + set[3] = hi & 255; + hi = hi >> 8; + set[2] = hi & 255; + hi = hi >> 8; + set[1] = hi & 255; + buf2.push(set); + } else { + throw new Error(`${decodeErrPrefix} encountered BigInt larger than allowable range`); + } + } +} +encodeUint.encodedSize = function encodedSize(token) { + return encodeUintValue.encodedSize(token.value); +}; +encodeUintValue.encodedSize = function encodedSize2(uint) { + if (uint < uintBoundaries[0]) { + return 1; + } + if (uint < uintBoundaries[1]) { + return 2; + } + if (uint < uintBoundaries[2]) { + return 3; + } + if (uint < uintBoundaries[3]) { + return 5; + } + return 9; +}; +encodeUint.compareTokens = function compareTokens(tok1, tok2) { + return tok1.value < tok2.value ? -1 : tok1.value > tok2.value ? 1 : ( + /* c8 ignore next */ + 0 + ); +}; + +// node_modules/cborg/lib/1negint.js +function decodeNegint8(data, pos, _minor, options) { + return new Token(Type.negint, -1 - readUint8(data, pos + 1, options), 2); +} +function decodeNegint16(data, pos, _minor, options) { + return new Token(Type.negint, -1 - readUint16(data, pos + 1, options), 3); +} +function decodeNegint32(data, pos, _minor, options) { + return new Token(Type.negint, -1 - readUint32(data, pos + 1, options), 5); +} +var neg1b = BigInt(-1); +var pos1b = BigInt(1); +function decodeNegint64(data, pos, _minor, options) { + const int = readUint64(data, pos + 1, options); + if (typeof int !== "bigint") { + const value = -1 - int; + if (value >= Number.MIN_SAFE_INTEGER) { + return new Token(Type.negint, value, 9); + } + } + if (options.allowBigInt !== true) { + throw new Error(`${decodeErrPrefix} integers outside of the safe integer range are not supported`); + } + return new Token(Type.negint, neg1b - BigInt(int), 9); +} +function encodeNegint(buf2, token) { + const negint = token.value; + const unsigned = typeof negint === "bigint" ? negint * neg1b - pos1b : negint * -1 - 1; + encodeUintValue(buf2, token.type.majorEncoded, unsigned); +} +encodeNegint.encodedSize = function encodedSize3(token) { + const negint = token.value; + const unsigned = typeof negint === "bigint" ? negint * neg1b - pos1b : negint * -1 - 1; + if (unsigned < uintBoundaries[0]) { + return 1; + } + if (unsigned < uintBoundaries[1]) { + return 2; + } + if (unsigned < uintBoundaries[2]) { + return 3; + } + if (unsigned < uintBoundaries[3]) { + return 5; + } + return 9; +}; +encodeNegint.compareTokens = function compareTokens2(tok1, tok2) { + return tok1.value < tok2.value ? 1 : tok1.value > tok2.value ? -1 : ( + /* c8 ignore next */ + 0 + ); +}; + +// node_modules/cborg/lib/2bytes.js +function toToken(data, pos, prefix, length4) { + assertEnoughData(data, pos, prefix + length4); + const buf2 = slice(data, pos + prefix, pos + prefix + length4); + return new Token(Type.bytes, buf2, prefix + length4); +} +function decodeBytesCompact(data, pos, minor, _options) { + return toToken(data, pos, 1, minor); +} +function decodeBytes8(data, pos, _minor, options) { + return toToken(data, pos, 2, readUint8(data, pos + 1, options)); +} +function decodeBytes16(data, pos, _minor, options) { + return toToken(data, pos, 3, readUint16(data, pos + 1, options)); +} +function decodeBytes32(data, pos, _minor, options) { + return toToken(data, pos, 5, readUint32(data, pos + 1, options)); +} +function decodeBytes64(data, pos, _minor, options) { + const l = readUint64(data, pos + 1, options); + if (typeof l === "bigint") { + throw new Error(`${decodeErrPrefix} 64-bit integer bytes lengths not supported`); + } + return toToken(data, pos, 9, l); +} +function tokenBytes(token) { + if (token.encodedBytes === void 0) { + token.encodedBytes = token.type === Type.string ? fromString(token.value) : token.value; + } + return token.encodedBytes; +} +function encodeBytes(buf2, token) { + const bytes = tokenBytes(token); + encodeUintValue(buf2, token.type.majorEncoded, bytes.length); + buf2.push(bytes); +} +encodeBytes.encodedSize = function encodedSize4(token) { + const bytes = tokenBytes(token); + return encodeUintValue.encodedSize(bytes.length) + bytes.length; +}; +encodeBytes.compareTokens = function compareTokens3(tok1, tok2) { + return compareBytes(tokenBytes(tok1), tokenBytes(tok2)); +}; +function compareBytes(b1, b2) { + return b1.length < b2.length ? -1 : b1.length > b2.length ? 1 : compare(b1, b2); +} + +// node_modules/cborg/lib/3string.js +function toToken2(data, pos, prefix, length4, options) { + const totLength = prefix + length4; + assertEnoughData(data, pos, totLength); + const tok = new Token(Type.string, toString(data, pos + prefix, pos + totLength), totLength); + if (options.retainStringBytes === true) { + tok.byteValue = slice(data, pos + prefix, pos + totLength); + } + return tok; +} +function decodeStringCompact(data, pos, minor, options) { + return toToken2(data, pos, 1, minor, options); +} +function decodeString8(data, pos, _minor, options) { + return toToken2(data, pos, 2, readUint8(data, pos + 1, options), options); +} +function decodeString16(data, pos, _minor, options) { + return toToken2(data, pos, 3, readUint16(data, pos + 1, options), options); +} +function decodeString32(data, pos, _minor, options) { + return toToken2(data, pos, 5, readUint32(data, pos + 1, options), options); +} +function decodeString64(data, pos, _minor, options) { + const l = readUint64(data, pos + 1, options); + if (typeof l === "bigint") { + throw new Error(`${decodeErrPrefix} 64-bit integer string lengths not supported`); + } + return toToken2(data, pos, 9, l, options); +} +var encodeString = encodeBytes; + +// node_modules/cborg/lib/4array.js +function toToken3(_data, _pos, prefix, length4) { + return new Token(Type.array, length4, prefix); +} +function decodeArrayCompact(data, pos, minor, _options) { + return toToken3(data, pos, 1, minor); +} +function decodeArray8(data, pos, _minor, options) { + return toToken3(data, pos, 2, readUint8(data, pos + 1, options)); +} +function decodeArray16(data, pos, _minor, options) { + return toToken3(data, pos, 3, readUint16(data, pos + 1, options)); +} +function decodeArray32(data, pos, _minor, options) { + return toToken3(data, pos, 5, readUint32(data, pos + 1, options)); +} +function decodeArray64(data, pos, _minor, options) { + const l = readUint64(data, pos + 1, options); + if (typeof l === "bigint") { + throw new Error(`${decodeErrPrefix} 64-bit integer array lengths not supported`); + } + return toToken3(data, pos, 9, l); +} +function decodeArrayIndefinite(data, pos, _minor, options) { + if (options.allowIndefinite === false) { + throw new Error(`${decodeErrPrefix} indefinite length items not allowed`); + } + return toToken3(data, pos, 1, Infinity); +} +function encodeArray(buf2, token) { + encodeUintValue(buf2, Type.array.majorEncoded, token.value); +} +encodeArray.compareTokens = encodeUint.compareTokens; +encodeArray.encodedSize = function encodedSize5(token) { + return encodeUintValue.encodedSize(token.value); +}; + +// node_modules/cborg/lib/5map.js +function toToken4(_data, _pos, prefix, length4) { + return new Token(Type.map, length4, prefix); +} +function decodeMapCompact(data, pos, minor, _options) { + return toToken4(data, pos, 1, minor); +} +function decodeMap8(data, pos, _minor, options) { + return toToken4(data, pos, 2, readUint8(data, pos + 1, options)); +} +function decodeMap16(data, pos, _minor, options) { + return toToken4(data, pos, 3, readUint16(data, pos + 1, options)); +} +function decodeMap32(data, pos, _minor, options) { + return toToken4(data, pos, 5, readUint32(data, pos + 1, options)); +} +function decodeMap64(data, pos, _minor, options) { + const l = readUint64(data, pos + 1, options); + if (typeof l === "bigint") { + throw new Error(`${decodeErrPrefix} 64-bit integer map lengths not supported`); + } + return toToken4(data, pos, 9, l); +} +function decodeMapIndefinite(data, pos, _minor, options) { + if (options.allowIndefinite === false) { + throw new Error(`${decodeErrPrefix} indefinite length items not allowed`); + } + return toToken4(data, pos, 1, Infinity); +} +function encodeMap(buf2, token) { + encodeUintValue(buf2, Type.map.majorEncoded, token.value); +} +encodeMap.compareTokens = encodeUint.compareTokens; +encodeMap.encodedSize = function encodedSize6(token) { + return encodeUintValue.encodedSize(token.value); +}; + +// node_modules/cborg/lib/6tag.js +function decodeTagCompact(_data, _pos, minor, _options) { + return new Token(Type.tag, minor, 1); +} +function decodeTag8(data, pos, _minor, options) { + return new Token(Type.tag, readUint8(data, pos + 1, options), 2); +} +function decodeTag16(data, pos, _minor, options) { + return new Token(Type.tag, readUint16(data, pos + 1, options), 3); +} +function decodeTag32(data, pos, _minor, options) { + return new Token(Type.tag, readUint32(data, pos + 1, options), 5); +} +function decodeTag64(data, pos, _minor, options) { + return new Token(Type.tag, readUint64(data, pos + 1, options), 9); +} +function encodeTag(buf2, token) { + encodeUintValue(buf2, Type.tag.majorEncoded, token.value); +} +encodeTag.compareTokens = encodeUint.compareTokens; +encodeTag.encodedSize = function encodedSize7(token) { + return encodeUintValue.encodedSize(token.value); +}; + +// node_modules/cborg/lib/7float.js +var MINOR_FALSE = 20; +var MINOR_TRUE = 21; +var MINOR_NULL = 22; +var MINOR_UNDEFINED = 23; +function decodeUndefined(_data, _pos, _minor, options) { + if (options.allowUndefined === false) { + throw new Error(`${decodeErrPrefix} undefined values are not supported`); + } else if (options.coerceUndefinedToNull === true) { + return new Token(Type.null, null, 1); + } + return new Token(Type.undefined, void 0, 1); +} +function decodeBreak(_data, _pos, _minor, options) { + if (options.allowIndefinite === false) { + throw new Error(`${decodeErrPrefix} indefinite length items not allowed`); + } + return new Token(Type.break, void 0, 1); +} +function createToken(value, bytes, options) { + if (options) { + if (options.allowNaN === false && Number.isNaN(value)) { + throw new Error(`${decodeErrPrefix} NaN values are not supported`); + } + if (options.allowInfinity === false && (value === Infinity || value === -Infinity)) { + throw new Error(`${decodeErrPrefix} Infinity values are not supported`); + } + } + return new Token(Type.float, value, bytes); +} +function decodeFloat16(data, pos, _minor, options) { + return createToken(readFloat16(data, pos + 1), 3, options); +} +function decodeFloat32(data, pos, _minor, options) { + return createToken(readFloat32(data, pos + 1), 5, options); +} +function decodeFloat64(data, pos, _minor, options) { + return createToken(readFloat64(data, pos + 1), 9, options); +} +function encodeFloat(buf2, token, options) { + const float = token.value; + if (float === false) { + buf2.push([Type.float.majorEncoded | MINOR_FALSE]); + } else if (float === true) { + buf2.push([Type.float.majorEncoded | MINOR_TRUE]); + } else if (float === null) { + buf2.push([Type.float.majorEncoded | MINOR_NULL]); + } else if (float === void 0) { + buf2.push([Type.float.majorEncoded | MINOR_UNDEFINED]); + } else { + let decoded; + let success = false; + if (!options || options.float64 !== true) { + encodeFloat16(float); + decoded = readFloat16(ui8a, 1); + if (float === decoded || Number.isNaN(float)) { + ui8a[0] = 249; + buf2.push(ui8a.slice(0, 3)); + success = true; + } else { + encodeFloat32(float); + decoded = readFloat32(ui8a, 1); + if (float === decoded) { + ui8a[0] = 250; + buf2.push(ui8a.slice(0, 5)); + success = true; + } + } + } + if (!success) { + encodeFloat64(float); + decoded = readFloat64(ui8a, 1); + ui8a[0] = 251; + buf2.push(ui8a.slice(0, 9)); + } + } +} +encodeFloat.encodedSize = function encodedSize8(token, options) { + const float = token.value; + if (float === false || float === true || float === null || float === void 0) { + return 1; + } + if (!options || options.float64 !== true) { + encodeFloat16(float); + let decoded = readFloat16(ui8a, 1); + if (float === decoded || Number.isNaN(float)) { + return 3; + } + encodeFloat32(float); + decoded = readFloat32(ui8a, 1); + if (float === decoded) { + return 5; + } + } + return 9; +}; +var buffer = new ArrayBuffer(9); +var dataView = new DataView(buffer, 1); +var ui8a = new Uint8Array(buffer, 0); +function encodeFloat16(inp) { + if (inp === Infinity) { + dataView.setUint16(0, 31744, false); + } else if (inp === -Infinity) { + dataView.setUint16(0, 64512, false); + } else if (Number.isNaN(inp)) { + dataView.setUint16(0, 32256, false); + } else { + dataView.setFloat32(0, inp); + const valu32 = dataView.getUint32(0); + const exponent = (valu32 & 2139095040) >> 23; + const mantissa = valu32 & 8388607; + if (exponent === 255) { + dataView.setUint16(0, 31744, false); + } else if (exponent === 0) { + dataView.setUint16(0, (inp & 2147483648) >> 16 | mantissa >> 13, false); + } else { + const logicalExponent = exponent - 127; + if (logicalExponent < -24) { + dataView.setUint16(0, 0); + } else if (logicalExponent < -14) { + dataView.setUint16(0, (valu32 & 2147483648) >> 16 | /* sign bit */ + 1 << 24 + logicalExponent, false); + } else { + dataView.setUint16(0, (valu32 & 2147483648) >> 16 | logicalExponent + 15 << 10 | mantissa >> 13, false); + } + } + } +} +function readFloat16(ui8a2, pos) { + if (ui8a2.length - pos < 2) { + throw new Error(`${decodeErrPrefix} not enough data for float16`); + } + const half = (ui8a2[pos] << 8) + ui8a2[pos + 1]; + if (half === 31744) { + return Infinity; + } + if (half === 64512) { + return -Infinity; + } + if (half === 32256) { + return NaN; + } + const exp = half >> 10 & 31; + const mant = half & 1023; + let val; + if (exp === 0) { + val = mant * 2 ** -24; + } else if (exp !== 31) { + val = (mant + 1024) * 2 ** (exp - 25); + } else { + val = mant === 0 ? Infinity : NaN; + } + return half & 32768 ? -val : val; +} +function encodeFloat32(inp) { + dataView.setFloat32(0, inp, false); +} +function readFloat32(ui8a2, pos) { + if (ui8a2.length - pos < 4) { + throw new Error(`${decodeErrPrefix} not enough data for float32`); + } + const offset = (ui8a2.byteOffset || 0) + pos; + return new DataView(ui8a2.buffer, offset, 4).getFloat32(0, false); +} +function encodeFloat64(inp) { + dataView.setFloat64(0, inp, false); +} +function readFloat64(ui8a2, pos) { + if (ui8a2.length - pos < 8) { + throw new Error(`${decodeErrPrefix} not enough data for float64`); + } + const offset = (ui8a2.byteOffset || 0) + pos; + return new DataView(ui8a2.buffer, offset, 8).getFloat64(0, false); +} +encodeFloat.compareTokens = encodeUint.compareTokens; + +// node_modules/cborg/lib/jump.js +function invalidMinor(data, pos, minor) { + throw new Error(`${decodeErrPrefix} encountered invalid minor (${minor}) for major ${data[pos] >>> 5}`); +} +function errorer(msg) { + return () => { + throw new Error(`${decodeErrPrefix} ${msg}`); + }; +} +var jump = []; +for (let i = 0; i <= 23; i++) { + jump[i] = invalidMinor; +} +jump[24] = decodeUint8; +jump[25] = decodeUint16; +jump[26] = decodeUint32; +jump[27] = decodeUint64; +jump[28] = invalidMinor; +jump[29] = invalidMinor; +jump[30] = invalidMinor; +jump[31] = invalidMinor; +for (let i = 32; i <= 55; i++) { + jump[i] = invalidMinor; +} +jump[56] = decodeNegint8; +jump[57] = decodeNegint16; +jump[58] = decodeNegint32; +jump[59] = decodeNegint64; +jump[60] = invalidMinor; +jump[61] = invalidMinor; +jump[62] = invalidMinor; +jump[63] = invalidMinor; +for (let i = 64; i <= 87; i++) { + jump[i] = decodeBytesCompact; +} +jump[88] = decodeBytes8; +jump[89] = decodeBytes16; +jump[90] = decodeBytes32; +jump[91] = decodeBytes64; +jump[92] = invalidMinor; +jump[93] = invalidMinor; +jump[94] = invalidMinor; +jump[95] = errorer("indefinite length bytes/strings are not supported"); +for (let i = 96; i <= 119; i++) { + jump[i] = decodeStringCompact; +} +jump[120] = decodeString8; +jump[121] = decodeString16; +jump[122] = decodeString32; +jump[123] = decodeString64; +jump[124] = invalidMinor; +jump[125] = invalidMinor; +jump[126] = invalidMinor; +jump[127] = errorer("indefinite length bytes/strings are not supported"); +for (let i = 128; i <= 151; i++) { + jump[i] = decodeArrayCompact; +} +jump[152] = decodeArray8; +jump[153] = decodeArray16; +jump[154] = decodeArray32; +jump[155] = decodeArray64; +jump[156] = invalidMinor; +jump[157] = invalidMinor; +jump[158] = invalidMinor; +jump[159] = decodeArrayIndefinite; +for (let i = 160; i <= 183; i++) { + jump[i] = decodeMapCompact; +} +jump[184] = decodeMap8; +jump[185] = decodeMap16; +jump[186] = decodeMap32; +jump[187] = decodeMap64; +jump[188] = invalidMinor; +jump[189] = invalidMinor; +jump[190] = invalidMinor; +jump[191] = decodeMapIndefinite; +for (let i = 192; i <= 215; i++) { + jump[i] = decodeTagCompact; +} +jump[216] = decodeTag8; +jump[217] = decodeTag16; +jump[218] = decodeTag32; +jump[219] = decodeTag64; +jump[220] = invalidMinor; +jump[221] = invalidMinor; +jump[222] = invalidMinor; +jump[223] = invalidMinor; +for (let i = 224; i <= 243; i++) { + jump[i] = errorer("simple values are not supported"); +} +jump[244] = invalidMinor; +jump[245] = invalidMinor; +jump[246] = invalidMinor; +jump[247] = decodeUndefined; +jump[248] = errorer("simple values are not supported"); +jump[249] = decodeFloat16; +jump[250] = decodeFloat32; +jump[251] = decodeFloat64; +jump[252] = invalidMinor; +jump[253] = invalidMinor; +jump[254] = invalidMinor; +jump[255] = decodeBreak; +var quick = []; +for (let i = 0; i < 24; i++) { + quick[i] = new Token(Type.uint, i, 1); +} +for (let i = -1; i >= -24; i--) { + quick[31 - i] = new Token(Type.negint, i, 1); +} +quick[64] = new Token(Type.bytes, new Uint8Array(0), 1); +quick[96] = new Token(Type.string, "", 1); +quick[128] = new Token(Type.array, 0, 1); +quick[160] = new Token(Type.map, 0, 1); +quick[244] = new Token(Type.false, false, 1); +quick[245] = new Token(Type.true, true, 1); +quick[246] = new Token(Type.null, null, 1); +function quickEncodeToken(token) { + switch (token.type) { + case Type.false: + return fromArray([244]); + case Type.true: + return fromArray([245]); + case Type.null: + return fromArray([246]); + case Type.bytes: + if (!token.value.length) { + return fromArray([64]); + } + return; + case Type.string: + if (token.value === "") { + return fromArray([96]); + } + return; + case Type.array: + if (token.value === 0) { + return fromArray([128]); + } + return; + case Type.map: + if (token.value === 0) { + return fromArray([160]); + } + return; + case Type.uint: + if (token.value < 24) { + return fromArray([Number(token.value)]); + } + return; + case Type.negint: + if (token.value >= -24) { + return fromArray([31 - Number(token.value)]); + } + } +} + +// node_modules/cborg/lib/encode.js +var defaultEncodeOptions = { + float64: false, + mapSorter, + quickEncodeToken +}; +function makeCborEncoders() { + const encoders = []; + encoders[Type.uint.major] = encodeUint; + encoders[Type.negint.major] = encodeNegint; + encoders[Type.bytes.major] = encodeBytes; + encoders[Type.string.major] = encodeString; + encoders[Type.array.major] = encodeArray; + encoders[Type.map.major] = encodeMap; + encoders[Type.tag.major] = encodeTag; + encoders[Type.float.major] = encodeFloat; + return encoders; +} +var cborEncoders = makeCborEncoders(); +var buf = new Bl(); +var Ref = class _Ref { + /** + * @param {object|any[]} obj + * @param {Reference|undefined} parent + */ + constructor(obj, parent) { + this.obj = obj; + this.parent = parent; + } + /** + * @param {object|any[]} obj + * @returns {boolean} + */ + includes(obj) { + let p = this; + do { + if (p.obj === obj) { + return true; + } + } while (p = p.parent); + return false; + } + /** + * @param {Reference|undefined} stack + * @param {object|any[]} obj + * @returns {Reference} + */ + static createCheck(stack, obj) { + if (stack && stack.includes(obj)) { + throw new Error(`${encodeErrPrefix} object contains circular references`); + } + return new _Ref(obj, stack); + } +}; +var simpleTokens = { + null: new Token(Type.null, null), + undefined: new Token(Type.undefined, void 0), + true: new Token(Type.true, true), + false: new Token(Type.false, false), + emptyArray: new Token(Type.array, 0), + emptyMap: new Token(Type.map, 0) +}; +var typeEncoders = { + /** + * @param {any} obj + * @param {string} _typ + * @param {EncodeOptions} _options + * @param {Reference} [_refStack] + * @returns {TokenOrNestedTokens} + */ + number(obj, _typ, _options, _refStack) { + if (!Number.isInteger(obj) || !Number.isSafeInteger(obj)) { + return new Token(Type.float, obj); + } else if (obj >= 0) { + return new Token(Type.uint, obj); + } else { + return new Token(Type.negint, obj); + } + }, + /** + * @param {any} obj + * @param {string} _typ + * @param {EncodeOptions} _options + * @param {Reference} [_refStack] + * @returns {TokenOrNestedTokens} + */ + bigint(obj, _typ, _options, _refStack) { + if (obj >= BigInt(0)) { + return new Token(Type.uint, obj); + } else { + return new Token(Type.negint, obj); + } + }, + /** + * @param {any} obj + * @param {string} _typ + * @param {EncodeOptions} _options + * @param {Reference} [_refStack] + * @returns {TokenOrNestedTokens} + */ + Uint8Array(obj, _typ, _options, _refStack) { + return new Token(Type.bytes, obj); + }, + /** + * @param {any} obj + * @param {string} _typ + * @param {EncodeOptions} _options + * @param {Reference} [_refStack] + * @returns {TokenOrNestedTokens} + */ + string(obj, _typ, _options, _refStack) { + return new Token(Type.string, obj); + }, + /** + * @param {any} obj + * @param {string} _typ + * @param {EncodeOptions} _options + * @param {Reference} [_refStack] + * @returns {TokenOrNestedTokens} + */ + boolean(obj, _typ, _options, _refStack) { + return obj ? simpleTokens.true : simpleTokens.false; + }, + /** + * @param {any} _obj + * @param {string} _typ + * @param {EncodeOptions} _options + * @param {Reference} [_refStack] + * @returns {TokenOrNestedTokens} + */ + null(_obj, _typ, _options, _refStack) { + return simpleTokens.null; + }, + /** + * @param {any} _obj + * @param {string} _typ + * @param {EncodeOptions} _options + * @param {Reference} [_refStack] + * @returns {TokenOrNestedTokens} + */ + undefined(_obj, _typ, _options, _refStack) { + return simpleTokens.undefined; + }, + /** + * @param {any} obj + * @param {string} _typ + * @param {EncodeOptions} _options + * @param {Reference} [_refStack] + * @returns {TokenOrNestedTokens} + */ + ArrayBuffer(obj, _typ, _options, _refStack) { + return new Token(Type.bytes, new Uint8Array(obj)); + }, + /** + * @param {any} obj + * @param {string} _typ + * @param {EncodeOptions} _options + * @param {Reference} [_refStack] + * @returns {TokenOrNestedTokens} + */ + DataView(obj, _typ, _options, _refStack) { + return new Token(Type.bytes, new Uint8Array(obj.buffer, obj.byteOffset, obj.byteLength)); + }, + /** + * @param {any} obj + * @param {string} _typ + * @param {EncodeOptions} options + * @param {Reference} [refStack] + * @returns {TokenOrNestedTokens} + */ + Array(obj, _typ, options, refStack) { + if (!obj.length) { + if (options.addBreakTokens === true) { + return [simpleTokens.emptyArray, new Token(Type.break)]; + } + return simpleTokens.emptyArray; + } + refStack = Ref.createCheck(refStack, obj); + const entries = []; + let i = 0; + for (const e of obj) { + entries[i++] = objectToTokens(e, options, refStack); + } + if (options.addBreakTokens) { + return [new Token(Type.array, obj.length), entries, new Token(Type.break)]; + } + return [new Token(Type.array, obj.length), entries]; + }, + /** + * @param {any} obj + * @param {string} typ + * @param {EncodeOptions} options + * @param {Reference} [refStack] + * @returns {TokenOrNestedTokens} + */ + Object(obj, typ, options, refStack) { + const isMap = typ !== "Object"; + const keys = isMap ? obj.keys() : Object.keys(obj); + const length4 = isMap ? obj.size : keys.length; + if (!length4) { + if (options.addBreakTokens === true) { + return [simpleTokens.emptyMap, new Token(Type.break)]; + } + return simpleTokens.emptyMap; + } + refStack = Ref.createCheck(refStack, obj); + const entries = []; + let i = 0; + for (const key of keys) { + entries[i++] = [ + objectToTokens(key, options, refStack), + objectToTokens(isMap ? obj.get(key) : obj[key], options, refStack) + ]; + } + sortMapEntries(entries, options); + if (options.addBreakTokens) { + return [new Token(Type.map, length4), entries, new Token(Type.break)]; + } + return [new Token(Type.map, length4), entries]; + } +}; +typeEncoders.Map = typeEncoders.Object; +typeEncoders.Buffer = typeEncoders.Uint8Array; +for (const typ of "Uint8Clamped Uint16 Uint32 Int8 Int16 Int32 BigUint64 BigInt64 Float32 Float64".split(" ")) { + typeEncoders[`${typ}Array`] = typeEncoders.DataView; +} +function objectToTokens(obj, options = {}, refStack) { + const typ = is(obj); + const customTypeEncoder = options && options.typeEncoders && /** @type {OptionalTypeEncoder} */ + options.typeEncoders[typ] || typeEncoders[typ]; + if (typeof customTypeEncoder === "function") { + const tokens = customTypeEncoder(obj, typ, options, refStack); + if (tokens != null) { + return tokens; + } + } + const typeEncoder = typeEncoders[typ]; + if (!typeEncoder) { + throw new Error(`${encodeErrPrefix} unsupported type: ${typ}`); + } + return typeEncoder(obj, typ, options, refStack); +} +function sortMapEntries(entries, options) { + if (options.mapSorter) { + entries.sort(options.mapSorter); + } +} +function mapSorter(e1, e2) { + const keyToken1 = Array.isArray(e1[0]) ? e1[0][0] : e1[0]; + const keyToken2 = Array.isArray(e2[0]) ? e2[0][0] : e2[0]; + if (keyToken1.type !== keyToken2.type) { + return keyToken1.type.compare(keyToken2.type); + } + const major = keyToken1.type.major; + const tcmp = cborEncoders[major].compareTokens(keyToken1, keyToken2); + if (tcmp === 0) { + console.warn("WARNING: complex key types used, CBOR key sorting guarantees are gone"); + } + return tcmp; +} +function tokensToEncoded(buf2, tokens, encoders, options) { + if (Array.isArray(tokens)) { + for (const token of tokens) { + tokensToEncoded(buf2, token, encoders, options); + } + } else { + encoders[tokens.type.major](buf2, tokens, options); + } +} +function encodeCustom(data, encoders, options) { + const tokens = objectToTokens(data, options); + if (!Array.isArray(tokens) && options.quickEncodeToken) { + const quickBytes = options.quickEncodeToken(tokens); + if (quickBytes) { + return quickBytes; + } + const encoder = encoders[tokens.type.major]; + if (encoder.encodedSize) { + const size = encoder.encodedSize(tokens, options); + const buf2 = new Bl(size); + encoder(buf2, tokens, options); + if (buf2.chunks.length !== 1) { + throw new Error(`Unexpected error: pre-calculated length for ${tokens} was wrong`); + } + return asU8A(buf2.chunks[0]); + } + } + buf.reset(); + tokensToEncoded(buf, tokens, encoders, options); + return buf.toBytes(true); +} +function encode(data, options) { + options = Object.assign({}, defaultEncodeOptions, options); + return encodeCustom(data, cborEncoders, options); +} + +// node_modules/cborg/lib/decode.js +var defaultDecodeOptions = { + strict: false, + allowIndefinite: true, + allowUndefined: true, + allowBigInt: true +}; +var Tokeniser = class { + /** + * @param {Uint8Array} data + * @param {DecodeOptions} options + */ + constructor(data, options = {}) { + this._pos = 0; + this.data = data; + this.options = options; + } + pos() { + return this._pos; + } + done() { + return this._pos >= this.data.length; + } + next() { + const byt = this.data[this._pos]; + let token = quick[byt]; + if (token === void 0) { + const decoder = jump[byt]; + if (!decoder) { + throw new Error(`${decodeErrPrefix} no decoder for major type ${byt >>> 5} (byte 0x${byt.toString(16).padStart(2, "0")})`); + } + const minor = byt & 31; + token = decoder(this.data, this._pos, minor, this.options); + } + this._pos += token.encodedLength; + return token; + } +}; +var DONE = Symbol.for("DONE"); +var BREAK = Symbol.for("BREAK"); +function tokenToArray(token, tokeniser, options) { + const arr = []; + for (let i = 0; i < token.value; i++) { + const value = tokensToObject(tokeniser, options); + if (value === BREAK) { + if (token.value === Infinity) { + break; + } + throw new Error(`${decodeErrPrefix} got unexpected break to lengthed array`); + } + if (value === DONE) { + throw new Error(`${decodeErrPrefix} found array but not enough entries (got ${i}, expected ${token.value})`); + } + arr[i] = value; + } + return arr; +} +function tokenToMap(token, tokeniser, options) { + const useMaps = options.useMaps === true; + const obj = useMaps ? void 0 : {}; + const m = useMaps ? /* @__PURE__ */ new Map() : void 0; + for (let i = 0; i < token.value; i++) { + const key = tokensToObject(tokeniser, options); + if (key === BREAK) { + if (token.value === Infinity) { + break; + } + throw new Error(`${decodeErrPrefix} got unexpected break to lengthed map`); + } + if (key === DONE) { + throw new Error(`${decodeErrPrefix} found map but not enough entries (got ${i} [no key], expected ${token.value})`); + } + if (useMaps !== true && typeof key !== "string") { + throw new Error(`${decodeErrPrefix} non-string keys not supported (got ${typeof key})`); + } + if (options.rejectDuplicateMapKeys === true) { + if (useMaps && m.has(key) || !useMaps && key in obj) { + throw new Error(`${decodeErrPrefix} found repeat map key "${key}"`); + } + } + const value = tokensToObject(tokeniser, options); + if (value === DONE) { + throw new Error(`${decodeErrPrefix} found map but not enough entries (got ${i} [no value], expected ${token.value})`); + } + if (useMaps) { + m.set(key, value); + } else { + obj[key] = value; + } + } + return useMaps ? m : obj; +} +function tokensToObject(tokeniser, options) { + if (tokeniser.done()) { + return DONE; + } + const token = tokeniser.next(); + if (token.type === Type.break) { + return BREAK; + } + if (token.type.terminal) { + return token.value; + } + if (token.type === Type.array) { + return tokenToArray(token, tokeniser, options); + } + if (token.type === Type.map) { + return tokenToMap(token, tokeniser, options); + } + if (token.type === Type.tag) { + if (options.tags && typeof options.tags[token.value] === "function") { + const tagged = tokensToObject(tokeniser, options); + return options.tags[token.value](tagged); + } + throw new Error(`${decodeErrPrefix} tag not supported (${token.value})`); + } + throw new Error("unsupported"); +} +function decodeFirst(data, options) { + if (!(data instanceof Uint8Array)) { + throw new Error(`${decodeErrPrefix} data to decode must be a Uint8Array`); + } + options = Object.assign({}, defaultDecodeOptions, options); + const tokeniser = options.tokenizer || new Tokeniser(data, options); + const decoded = tokensToObject(tokeniser, options); + if (decoded === DONE) { + throw new Error(`${decodeErrPrefix} did not find any content to decode`); + } + if (decoded === BREAK) { + throw new Error(`${decodeErrPrefix} got unexpected break`); + } + return [decoded, data.subarray(tokeniser.pos())]; +} +function decode(data, options) { + const [decoded, remainder] = decodeFirst(data, options); + if (remainder.length > 0) { + throw new Error(`${decodeErrPrefix} too many terminals, data makes no sense`); + } + return decoded; +} + +// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bytes.js +var empty = new Uint8Array(0); +function equals(aa, bb) { + if (aa === bb) + return true; + if (aa.byteLength !== bb.byteLength) { + return false; + } + for (let ii = 0; ii < aa.byteLength; ii++) { + if (aa[ii] !== bb[ii]) { + return false; + } + } + return true; +} +function coerce(o) { + if (o instanceof Uint8Array && o.constructor.name === "Uint8Array") + return o; + if (o instanceof ArrayBuffer) + return new Uint8Array(o); + if (ArrayBuffer.isView(o)) { + return new Uint8Array(o.buffer, o.byteOffset, o.byteLength); + } + throw new Error("Unknown type, must be binary type"); +} + +// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/vendor/base-x.js +function base(ALPHABET, name4) { + if (ALPHABET.length >= 255) { + throw new TypeError("Alphabet too long"); + } + var BASE_MAP = new Uint8Array(256); + for (var j = 0; j < BASE_MAP.length; j++) { + BASE_MAP[j] = 255; + } + for (var i = 0; i < ALPHABET.length; i++) { + var x = ALPHABET.charAt(i); + var xc = x.charCodeAt(0); + if (BASE_MAP[xc] !== 255) { + throw new TypeError(x + " is ambiguous"); + } + BASE_MAP[xc] = i; + } + var BASE = ALPHABET.length; + var LEADER = ALPHABET.charAt(0); + var FACTOR = Math.log(BASE) / Math.log(256); + var iFACTOR = Math.log(256) / Math.log(BASE); + function encode12(source) { + if (source instanceof Uint8Array) + ; + else if (ArrayBuffer.isView(source)) { + source = new Uint8Array(source.buffer, source.byteOffset, source.byteLength); + } else if (Array.isArray(source)) { + source = Uint8Array.from(source); + } + if (!(source instanceof Uint8Array)) { + throw new TypeError("Expected Uint8Array"); + } + if (source.length === 0) { + return ""; + } + var zeroes = 0; + var length4 = 0; + var pbegin = 0; + var pend = source.length; + while (pbegin !== pend && source[pbegin] === 0) { + pbegin++; + zeroes++; + } + var size = (pend - pbegin) * iFACTOR + 1 >>> 0; + var b58 = new Uint8Array(size); + while (pbegin !== pend) { + var carry = source[pbegin]; + var i2 = 0; + for (var it1 = size - 1; (carry !== 0 || i2 < length4) && it1 !== -1; it1--, i2++) { + carry += 256 * b58[it1] >>> 0; + b58[it1] = carry % BASE >>> 0; + carry = carry / BASE >>> 0; + } + if (carry !== 0) { + throw new Error("Non-zero carry"); + } + length4 = i2; + pbegin++; + } + var it2 = size - length4; + while (it2 !== size && b58[it2] === 0) { + it2++; + } + var str = LEADER.repeat(zeroes); + for (; it2 < size; ++it2) { + str += ALPHABET.charAt(b58[it2]); + } + return str; + } + function decodeUnsafe(source) { + if (typeof source !== "string") { + throw new TypeError("Expected String"); + } + if (source.length === 0) { + return new Uint8Array(); + } + var psz = 0; + if (source[psz] === " ") { + return; + } + var zeroes = 0; + var length4 = 0; + while (source[psz] === LEADER) { + zeroes++; + psz++; + } + var size = (source.length - psz) * FACTOR + 1 >>> 0; + var b256 = new Uint8Array(size); + while (source[psz]) { + var carry = BASE_MAP[source.charCodeAt(psz)]; + if (carry === 255) { + return; + } + var i2 = 0; + for (var it3 = size - 1; (carry !== 0 || i2 < length4) && it3 !== -1; it3--, i2++) { + carry += BASE * b256[it3] >>> 0; + b256[it3] = carry % 256 >>> 0; + carry = carry / 256 >>> 0; + } + if (carry !== 0) { + throw new Error("Non-zero carry"); + } + length4 = i2; + psz++; + } + if (source[psz] === " ") { + return; + } + var it4 = size - length4; + while (it4 !== size && b256[it4] === 0) { + it4++; + } + var vch = new Uint8Array(zeroes + (size - it4)); + var j2 = zeroes; + while (it4 !== size) { + vch[j2++] = b256[it4++]; + } + return vch; + } + function decode15(string2) { + var buffer2 = decodeUnsafe(string2); + if (buffer2) { + return buffer2; + } + throw new Error(`Non-${name4} character`); + } + return { + encode: encode12, + decodeUnsafe, + decode: decode15 + }; +} +var src = base; +var _brrp__multiformats_scope_baseX = src; +var base_x_default = _brrp__multiformats_scope_baseX; + +// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bases/base.js +var Encoder = class { + name; + prefix; + baseEncode; + constructor(name4, prefix, baseEncode) { + this.name = name4; + this.prefix = prefix; + this.baseEncode = baseEncode; + } + encode(bytes) { + if (bytes instanceof Uint8Array) { + return `${this.prefix}${this.baseEncode(bytes)}`; + } else { + throw Error("Unknown type, must be binary type"); + } + } +}; +var Decoder = class { + name; + prefix; + baseDecode; + prefixCodePoint; + constructor(name4, prefix, baseDecode) { + this.name = name4; + this.prefix = prefix; + if (prefix.codePointAt(0) === void 0) { + throw new Error("Invalid prefix character"); + } + this.prefixCodePoint = prefix.codePointAt(0); + this.baseDecode = baseDecode; + } + decode(text) { + if (typeof text === "string") { + if (text.codePointAt(0) !== this.prefixCodePoint) { + throw Error(`Unable to decode multibase string ${JSON.stringify(text)}, ${this.name} decoder only supports inputs prefixed with ${this.prefix}`); + } + return this.baseDecode(text.slice(this.prefix.length)); + } else { + throw Error("Can only multibase decode strings"); + } + } + or(decoder) { + return or(this, decoder); + } +}; +var ComposedDecoder = class { + decoders; + constructor(decoders) { + this.decoders = decoders; + } + or(decoder) { + return or(this, decoder); + } + decode(input) { + const prefix = input[0]; + const decoder = this.decoders[prefix]; + if (decoder != null) { + return decoder.decode(input); + } else { + throw RangeError(`Unable to decode multibase string ${JSON.stringify(input)}, only inputs prefixed with ${Object.keys(this.decoders)} are supported`); + } + } +}; +function or(left, right) { + return new ComposedDecoder({ + ...left.decoders ?? { [left.prefix]: left }, + ...right.decoders ?? { [right.prefix]: right } + }); +} +var Codec = class { + name; + prefix; + baseEncode; + baseDecode; + encoder; + decoder; + constructor(name4, prefix, baseEncode, baseDecode) { + this.name = name4; + this.prefix = prefix; + this.baseEncode = baseEncode; + this.baseDecode = baseDecode; + this.encoder = new Encoder(name4, prefix, baseEncode); + this.decoder = new Decoder(name4, prefix, baseDecode); + } + encode(input) { + return this.encoder.encode(input); + } + decode(input) { + return this.decoder.decode(input); + } +}; +function from({ name: name4, prefix, encode: encode12, decode: decode15 }) { + return new Codec(name4, prefix, encode12, decode15); +} +function baseX({ name: name4, prefix, alphabet: alphabet2 }) { + const { encode: encode12, decode: decode15 } = base_x_default(alphabet2, name4); + return from({ + prefix, + name: name4, + encode: encode12, + decode: (text) => coerce(decode15(text)) + }); +} +function decode2(string2, alphabet2, bitsPerChar, name4) { + const codes = {}; + for (let i = 0; i < alphabet2.length; ++i) { + codes[alphabet2[i]] = i; + } + let end = string2.length; + while (string2[end - 1] === "=") { + --end; + } + const out = new Uint8Array(end * bitsPerChar / 8 | 0); + let bits = 0; + let buffer2 = 0; + let written = 0; + for (let i = 0; i < end; ++i) { + const value = codes[string2[i]]; + if (value === void 0) { + throw new SyntaxError(`Non-${name4} character`); + } + buffer2 = buffer2 << bitsPerChar | value; + bits += bitsPerChar; + if (bits >= 8) { + bits -= 8; + out[written++] = 255 & buffer2 >> bits; + } + } + if (bits >= bitsPerChar || (255 & buffer2 << 8 - bits) !== 0) { + throw new SyntaxError("Unexpected end of data"); + } + return out; +} +function encode2(data, alphabet2, bitsPerChar) { + const pad = alphabet2[alphabet2.length - 1] === "="; + const mask = (1 << bitsPerChar) - 1; + let out = ""; + let bits = 0; + let buffer2 = 0; + for (let i = 0; i < data.length; ++i) { + buffer2 = buffer2 << 8 | data[i]; + bits += 8; + while (bits > bitsPerChar) { + bits -= bitsPerChar; + out += alphabet2[mask & buffer2 >> bits]; + } + } + if (bits !== 0) { + out += alphabet2[mask & buffer2 << bitsPerChar - bits]; + } + if (pad) { + while ((out.length * bitsPerChar & 7) !== 0) { + out += "="; + } + } + return out; +} +function rfc4648({ name: name4, prefix, bitsPerChar, alphabet: alphabet2 }) { + return from({ + prefix, + name: name4, + encode(input) { + return encode2(input, alphabet2, bitsPerChar); + }, + decode(input) { + return decode2(input, alphabet2, bitsPerChar, name4); + } + }); +} + +// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bases/base32.js +var base32 = rfc4648({ + prefix: "b", + name: "base32", + alphabet: "abcdefghijklmnopqrstuvwxyz234567", + bitsPerChar: 5 +}); +var base32upper = rfc4648({ + prefix: "B", + name: "base32upper", + alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567", + bitsPerChar: 5 +}); +var base32pad = rfc4648({ + prefix: "c", + name: "base32pad", + alphabet: "abcdefghijklmnopqrstuvwxyz234567=", + bitsPerChar: 5 +}); +var base32padupper = rfc4648({ + prefix: "C", + name: "base32padupper", + alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=", + bitsPerChar: 5 +}); +var base32hex = rfc4648({ + prefix: "v", + name: "base32hex", + alphabet: "0123456789abcdefghijklmnopqrstuv", + bitsPerChar: 5 +}); +var base32hexupper = rfc4648({ + prefix: "V", + name: "base32hexupper", + alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV", + bitsPerChar: 5 +}); +var base32hexpad = rfc4648({ + prefix: "t", + name: "base32hexpad", + alphabet: "0123456789abcdefghijklmnopqrstuv=", + bitsPerChar: 5 +}); +var base32hexpadupper = rfc4648({ + prefix: "T", + name: "base32hexpadupper", + alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV=", + bitsPerChar: 5 +}); +var base32z = rfc4648({ + prefix: "h", + name: "base32z", + alphabet: "ybndrfg8ejkmcpqxot1uwisza345h769", + bitsPerChar: 5 +}); + +// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bases/base58.js +var base58btc = baseX({ + name: "base58btc", + prefix: "z", + alphabet: "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" +}); +var base58flickr = baseX({ + name: "base58flickr", + prefix: "Z", + alphabet: "123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ" +}); + +// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/vendor/varint.js +var encode_1 = encode3; +var MSB = 128; +var REST = 127; +var MSBALL = ~REST; +var INT = Math.pow(2, 31); +function encode3(num, out, offset) { + out = out || []; + offset = offset || 0; + var oldOffset = offset; + while (num >= INT) { + out[offset++] = num & 255 | MSB; + num /= 128; + } + while (num & MSBALL) { + out[offset++] = num & 255 | MSB; + num >>>= 7; + } + out[offset] = num | 0; + encode3.bytes = offset - oldOffset + 1; + return out; +} +var decode3 = read; +var MSB$1 = 128; +var REST$1 = 127; +function read(buf2, offset) { + var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf2.length; + do { + if (counter >= l) { + read.bytes = 0; + throw new RangeError("Could not decode varint"); + } + b = buf2[counter++]; + res += shift < 28 ? (b & REST$1) << shift : (b & REST$1) * Math.pow(2, shift); + shift += 7; + } while (b >= MSB$1); + read.bytes = counter - offset; + return res; +} +var N1 = Math.pow(2, 7); +var N2 = Math.pow(2, 14); +var N3 = Math.pow(2, 21); +var N4 = Math.pow(2, 28); +var N5 = Math.pow(2, 35); +var N6 = Math.pow(2, 42); +var N7 = Math.pow(2, 49); +var N8 = Math.pow(2, 56); +var N9 = Math.pow(2, 63); +var length = function(value) { + return value < N1 ? 1 : value < N2 ? 2 : value < N3 ? 3 : value < N4 ? 4 : value < N5 ? 5 : value < N6 ? 6 : value < N7 ? 7 : value < N8 ? 8 : value < N9 ? 9 : 10; +}; +var varint = { + encode: encode_1, + decode: decode3, + encodingLength: length +}; +var _brrp_varint = varint; +var varint_default = _brrp_varint; + +// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/varint.js +function decode4(data, offset = 0) { + const code5 = varint_default.decode(data, offset); + return [code5, varint_default.decode.bytes]; +} +function encodeTo(int, target, offset = 0) { + varint_default.encode(int, target, offset); + return target; +} +function encodingLength(int) { + return varint_default.encodingLength(int); +} + +// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/hashes/digest.js +function create(code5, digest2) { + const size = digest2.byteLength; + const sizeOffset = encodingLength(code5); + const digestOffset = sizeOffset + encodingLength(size); + const bytes = new Uint8Array(digestOffset + size); + encodeTo(code5, bytes, 0); + encodeTo(size, bytes, sizeOffset); + bytes.set(digest2, digestOffset); + return new Digest(code5, size, digest2, bytes); +} +function decode5(multihash) { + const bytes = coerce(multihash); + const [code5, sizeOffset] = decode4(bytes); + const [size, digestOffset] = decode4(bytes.subarray(sizeOffset)); + const digest2 = bytes.subarray(sizeOffset + digestOffset); + if (digest2.byteLength !== size) { + throw new Error("Incorrect length"); + } + return new Digest(code5, size, digest2, bytes); +} +function equals2(a, b) { + if (a === b) { + return true; + } else { + const data = b; + return a.code === data.code && a.size === data.size && data.bytes instanceof Uint8Array && equals(a.bytes, data.bytes); + } +} +var Digest = class { + code; + size; + digest; + bytes; + /** + * Creates a multihash digest. + */ + constructor(code5, size, digest2, bytes) { + this.code = code5; + this.size = size; + this.digest = digest2; + this.bytes = bytes; + } +}; + +// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/cid.js +function format(link, base3) { + const { bytes, version } = link; + switch (version) { + case 0: + return toStringV0(bytes, baseCache(link), base3 ?? base58btc.encoder); + default: + return toStringV1(bytes, baseCache(link), base3 ?? base32.encoder); + } +} +var cache = /* @__PURE__ */ new WeakMap(); +function baseCache(cid) { + const baseCache3 = cache.get(cid); + if (baseCache3 == null) { + const baseCache4 = /* @__PURE__ */ new Map(); + cache.set(cid, baseCache4); + return baseCache4; + } + return baseCache3; +} +var CID = class _CID { + code; + version; + multihash; + bytes; + "/"; + /** + * @param version - Version of the CID + * @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv + * @param multihash - (Multi)hash of the of the content. + */ + constructor(version, code5, multihash, bytes) { + this.code = code5; + this.version = version; + this.multihash = multihash; + this.bytes = bytes; + this["/"] = bytes; + } + /** + * Signalling `cid.asCID === cid` has been replaced with `cid['/'] === cid.bytes` + * please either use `CID.asCID(cid)` or switch to new signalling mechanism + * + * @deprecated + */ + get asCID() { + return this; + } + // ArrayBufferView + get byteOffset() { + return this.bytes.byteOffset; + } + // ArrayBufferView + get byteLength() { + return this.bytes.byteLength; + } + toV0() { + switch (this.version) { + case 0: { + return this; + } + case 1: { + const { code: code5, multihash } = this; + if (code5 !== DAG_PB_CODE) { + throw new Error("Cannot convert a non dag-pb CID to CIDv0"); + } + if (multihash.code !== SHA_256_CODE) { + throw new Error("Cannot convert non sha2-256 multihash CID to CIDv0"); + } + return _CID.createV0(multihash); + } + default: { + throw Error(`Can not convert CID version ${this.version} to version 0. This is a bug please report`); + } + } + } + toV1() { + switch (this.version) { + case 0: { + const { code: code5, digest: digest2 } = this.multihash; + const multihash = create(code5, digest2); + return _CID.createV1(this.code, multihash); + } + case 1: { + return this; + } + default: { + throw Error(`Can not convert CID version ${this.version} to version 1. This is a bug please report`); + } + } + } + equals(other) { + return _CID.equals(this, other); + } + static equals(self, other) { + const unknown = other; + return unknown != null && self.code === unknown.code && self.version === unknown.version && equals2(self.multihash, unknown.multihash); + } + toString(base3) { + return format(this, base3); + } + toJSON() { + return { "/": format(this) }; + } + link() { + return this; + } + [Symbol.toStringTag] = "CID"; + // Legacy + [Symbol.for("nodejs.util.inspect.custom")]() { + return `CID(${this.toString()})`; + } + /** + * Takes any input `value` and returns a `CID` instance if it was + * a `CID` otherwise returns `null`. If `value` is instanceof `CID` + * it will return value back. If `value` is not instance of this CID + * class, but is compatible CID it will return new instance of this + * `CID` class. Otherwise returns null. + * + * This allows two different incompatible versions of CID library to + * co-exist and interop as long as binary interface is compatible. + */ + static asCID(input) { + if (input == null) { + return null; + } + const value = input; + if (value instanceof _CID) { + return value; + } else if (value["/"] != null && value["/"] === value.bytes || value.asCID === value) { + const { version, code: code5, multihash, bytes } = value; + return new _CID(version, code5, multihash, bytes ?? encodeCID(version, code5, multihash.bytes)); + } else if (value[cidSymbol] === true) { + const { version, multihash, code: code5 } = value; + const digest2 = decode5(multihash); + return _CID.create(version, code5, digest2); + } else { + return null; + } + } + /** + * @param version - Version of the CID + * @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv + * @param digest - (Multi)hash of the of the content. + */ + static create(version, code5, digest2) { + if (typeof code5 !== "number") { + throw new Error("String codecs are no longer supported"); + } + if (!(digest2.bytes instanceof Uint8Array)) { + throw new Error("Invalid digest"); + } + switch (version) { + case 0: { + if (code5 !== DAG_PB_CODE) { + throw new Error(`Version 0 CID must use dag-pb (code: ${DAG_PB_CODE}) block encoding`); + } else { + return new _CID(version, code5, digest2, digest2.bytes); + } + } + case 1: { + const bytes = encodeCID(version, code5, digest2.bytes); + return new _CID(version, code5, digest2, bytes); + } + default: { + throw new Error("Invalid version"); + } + } + } + /** + * Simplified version of `create` for CIDv0. + */ + static createV0(digest2) { + return _CID.create(0, DAG_PB_CODE, digest2); + } + /** + * Simplified version of `create` for CIDv1. + * + * @param code - Content encoding format code. + * @param digest - Multihash of the content. + */ + static createV1(code5, digest2) { + return _CID.create(1, code5, digest2); + } + /** + * Decoded a CID from its binary representation. The byte array must contain + * only the CID with no additional bytes. + * + * An error will be thrown if the bytes provided do not contain a valid + * binary representation of a CID. + */ + static decode(bytes) { + const [cid, remainder] = _CID.decodeFirst(bytes); + if (remainder.length !== 0) { + throw new Error("Incorrect length"); + } + return cid; + } + /** + * Decoded a CID from its binary representation at the beginning of a byte + * array. + * + * Returns an array with the first element containing the CID and the second + * element containing the remainder of the original byte array. The remainder + * will be a zero-length byte array if the provided bytes only contained a + * binary CID representation. + */ + static decodeFirst(bytes) { + const specs = _CID.inspectBytes(bytes); + const prefixSize = specs.size - specs.multihashSize; + const multihashBytes = coerce(bytes.subarray(prefixSize, prefixSize + specs.multihashSize)); + if (multihashBytes.byteLength !== specs.multihashSize) { + throw new Error("Incorrect length"); + } + const digestBytes = multihashBytes.subarray(specs.multihashSize - specs.digestSize); + const digest2 = new Digest(specs.multihashCode, specs.digestSize, digestBytes, multihashBytes); + const cid = specs.version === 0 ? _CID.createV0(digest2) : _CID.createV1(specs.codec, digest2); + return [cid, bytes.subarray(specs.size)]; + } + /** + * Inspect the initial bytes of a CID to determine its properties. + * + * Involves decoding up to 4 varints. Typically this will require only 4 to 6 + * bytes but for larger multicodec code values and larger multihash digest + * lengths these varints can be quite large. It is recommended that at least + * 10 bytes be made available in the `initialBytes` argument for a complete + * inspection. + */ + static inspectBytes(initialBytes) { + let offset = 0; + const next = () => { + const [i, length4] = decode4(initialBytes.subarray(offset)); + offset += length4; + return i; + }; + let version = next(); + let codec = DAG_PB_CODE; + if (version === 18) { + version = 0; + offset = 0; + } else { + codec = next(); + } + if (version !== 0 && version !== 1) { + throw new RangeError(`Invalid CID version ${version}`); + } + const prefixSize = offset; + const multihashCode = next(); + const digestSize = next(); + const size = offset + digestSize; + const multihashSize = size - prefixSize; + return { version, codec, multihashCode, digestSize, multihashSize, size }; + } + /** + * Takes cid in a string representation and creates an instance. If `base` + * decoder is not provided will use a default from the configuration. It will + * throw an error if encoding of the CID is not compatible with supplied (or + * a default decoder). + */ + static parse(source, base3) { + const [prefix, bytes] = parseCIDtoBytes(source, base3); + const cid = _CID.decode(bytes); + if (cid.version === 0 && source[0] !== "Q") { + throw Error("Version 0 CID string must not include multibase prefix"); + } + baseCache(cid).set(prefix, source); + return cid; + } +}; +function parseCIDtoBytes(source, base3) { + switch (source[0]) { + case "Q": { + const decoder = base3 ?? base58btc; + return [ + base58btc.prefix, + decoder.decode(`${base58btc.prefix}${source}`) + ]; + } + case base58btc.prefix: { + const decoder = base3 ?? base58btc; + return [base58btc.prefix, decoder.decode(source)]; + } + case base32.prefix: { + const decoder = base3 ?? base32; + return [base32.prefix, decoder.decode(source)]; + } + default: { + if (base3 == null) { + throw Error("To parse non base32 or base58btc encoded CID multibase decoder must be provided"); + } + return [source[0], base3.decode(source)]; + } + } +} +function toStringV0(bytes, cache3, base3) { + const { prefix } = base3; + if (prefix !== base58btc.prefix) { + throw Error(`Cannot string encode V0 in ${base3.name} encoding`); + } + const cid = cache3.get(prefix); + if (cid == null) { + const cid2 = base3.encode(bytes).slice(1); + cache3.set(prefix, cid2); + return cid2; + } else { + return cid; + } +} +function toStringV1(bytes, cache3, base3) { + const { prefix } = base3; + const cid = cache3.get(prefix); + if (cid == null) { + const cid2 = base3.encode(bytes); + cache3.set(prefix, cid2); + return cid2; + } else { + return cid; + } +} +var DAG_PB_CODE = 112; +var SHA_256_CODE = 18; +function encodeCID(version, code5, multihash) { + const codeOffset = encodingLength(version); + const hashOffset = codeOffset + encodingLength(code5); + const bytes = new Uint8Array(hashOffset + multihash.byteLength); + encodeTo(version, bytes, 0); + encodeTo(code5, bytes, codeOffset); + bytes.set(multihash, hashOffset); + return bytes; +} +var cidSymbol = Symbol.for("@ipld/js-cid/CID"); + +// node_modules/@ipld/dag-cbor/src/index.js +var CID_CBOR_TAG = 42; +function cidEncoder(obj) { + if (obj.asCID !== obj && obj["/"] !== obj.bytes) { + return null; + } + const cid = CID.asCID(obj); + if (!cid) { + return null; + } + const bytes = new Uint8Array(cid.bytes.byteLength + 1); + bytes.set(cid.bytes, 1); + return [ + new Token(Type.tag, CID_CBOR_TAG), + new Token(Type.bytes, bytes) + ]; +} +function undefinedEncoder() { + throw new Error("`undefined` is not supported by the IPLD Data Model and cannot be encoded"); +} +function numberEncoder(num) { + if (Number.isNaN(num)) { + throw new Error("`NaN` is not supported by the IPLD Data Model and cannot be encoded"); + } + if (num === Infinity || num === -Infinity) { + throw new Error("`Infinity` and `-Infinity` is not supported by the IPLD Data Model and cannot be encoded"); + } + return null; +} +var _encodeOptions = { + float64: true, + typeEncoders: { + Object: cidEncoder, + undefined: undefinedEncoder, + number: numberEncoder + } +}; +var encodeOptions = { + ..._encodeOptions, + typeEncoders: { + ..._encodeOptions.typeEncoders + } +}; +function cidDecoder(bytes) { + if (bytes[0] !== 0) { + throw new Error("Invalid CID for CBOR tag 42; expected leading 0x00"); + } + return CID.decode(bytes.subarray(1)); +} +var _decodeOptions = { + allowIndefinite: false, + coerceUndefinedToNull: true, + allowNaN: false, + allowInfinity: false, + allowBigInt: true, + // this will lead to BigInt for ints outside of + // safe-integer range, which may surprise users + strict: true, + useMaps: false, + rejectDuplicateMapKeys: true, + /** @type {import('cborg').TagDecoder[]} */ + tags: [] +}; +_decodeOptions.tags[CID_CBOR_TAG] = cidDecoder; +var decodeOptions = { + ..._decodeOptions, + tags: _decodeOptions.tags.slice() +}; +var code = 113; +var encode4 = (node) => encode(node, _encodeOptions); +var decode6 = (data) => decode(data, _decodeOptions); + +// node_modules/multiformats/src/bases/base32.js +var base32_exports = {}; +__export(base32_exports, { + base32: () => base322, + base32hex: () => base32hex2, + base32hexpad: () => base32hexpad2, + base32hexpadupper: () => base32hexpadupper2, + base32hexupper: () => base32hexupper2, + base32pad: () => base32pad2, + base32padupper: () => base32padupper2, + base32upper: () => base32upper2, + base32z: () => base32z2 +}); + +// node_modules/multiformats/vendor/base-x.js +function base2(ALPHABET, name4) { + if (ALPHABET.length >= 255) { + throw new TypeError("Alphabet too long"); + } + var BASE_MAP = new Uint8Array(256); + for (var j = 0; j < BASE_MAP.length; j++) { + BASE_MAP[j] = 255; + } + for (var i = 0; i < ALPHABET.length; i++) { + var x = ALPHABET.charAt(i); + var xc = x.charCodeAt(0); + if (BASE_MAP[xc] !== 255) { + throw new TypeError(x + " is ambiguous"); + } + BASE_MAP[xc] = i; + } + var BASE = ALPHABET.length; + var LEADER = ALPHABET.charAt(0); + var FACTOR = Math.log(BASE) / Math.log(256); + var iFACTOR = Math.log(256) / Math.log(BASE); + function encode12(source) { + if (source instanceof Uint8Array) + ; + else if (ArrayBuffer.isView(source)) { + source = new Uint8Array(source.buffer, source.byteOffset, source.byteLength); + } else if (Array.isArray(source)) { + source = Uint8Array.from(source); + } + if (!(source instanceof Uint8Array)) { + throw new TypeError("Expected Uint8Array"); + } + if (source.length === 0) { + return ""; + } + var zeroes = 0; + var length4 = 0; + var pbegin = 0; + var pend = source.length; + while (pbegin !== pend && source[pbegin] === 0) { + pbegin++; + zeroes++; + } + var size = (pend - pbegin) * iFACTOR + 1 >>> 0; + var b58 = new Uint8Array(size); + while (pbegin !== pend) { + var carry = source[pbegin]; + var i2 = 0; + for (var it1 = size - 1; (carry !== 0 || i2 < length4) && it1 !== -1; it1--, i2++) { + carry += 256 * b58[it1] >>> 0; + b58[it1] = carry % BASE >>> 0; + carry = carry / BASE >>> 0; + } + if (carry !== 0) { + throw new Error("Non-zero carry"); + } + length4 = i2; + pbegin++; + } + var it2 = size - length4; + while (it2 !== size && b58[it2] === 0) { + it2++; + } + var str = LEADER.repeat(zeroes); + for (; it2 < size; ++it2) { + str += ALPHABET.charAt(b58[it2]); + } + return str; + } + function decodeUnsafe(source) { + if (typeof source !== "string") { + throw new TypeError("Expected String"); + } + if (source.length === 0) { + return new Uint8Array(); + } + var psz = 0; + if (source[psz] === " ") { + return; + } + var zeroes = 0; + var length4 = 0; + while (source[psz] === LEADER) { + zeroes++; + psz++; + } + var size = (source.length - psz) * FACTOR + 1 >>> 0; + var b256 = new Uint8Array(size); + while (source[psz]) { + var carry = BASE_MAP[source.charCodeAt(psz)]; + if (carry === 255) { + return; + } + var i2 = 0; + for (var it3 = size - 1; (carry !== 0 || i2 < length4) && it3 !== -1; it3--, i2++) { + carry += BASE * b256[it3] >>> 0; + b256[it3] = carry % 256 >>> 0; + carry = carry / 256 >>> 0; + } + if (carry !== 0) { + throw new Error("Non-zero carry"); + } + length4 = i2; + psz++; + } + if (source[psz] === " ") { + return; + } + var it4 = size - length4; + while (it4 !== size && b256[it4] === 0) { + it4++; + } + var vch = new Uint8Array(zeroes + (size - it4)); + var j2 = zeroes; + while (it4 !== size) { + vch[j2++] = b256[it4++]; + } + return vch; + } + function decode15(string2) { + var buffer2 = decodeUnsafe(string2); + if (buffer2) { + return buffer2; + } + throw new Error(`Non-${name4} character`); + } + return { + encode: encode12, + decodeUnsafe, + decode: decode15 + }; +} +var src2 = base2; +var _brrp__multiformats_scope_baseX2 = src2; +var base_x_default2 = _brrp__multiformats_scope_baseX2; + +// node_modules/multiformats/src/bytes.js +var bytes_exports2 = {}; +__export(bytes_exports2, { + coerce: () => coerce2, + empty: () => empty2, + equals: () => equals3, + fromHex: () => fromHex, + fromString: () => fromString2, + isBinary: () => isBinary, + toHex: () => toHex, + toString: () => toString2 +}); +var empty2 = new Uint8Array(0); +var toHex = (d) => d.reduce((hex, byte) => hex + byte.toString(16).padStart(2, "0"), ""); +var fromHex = (hex) => { + const hexes = hex.match(/../g); + return hexes ? new Uint8Array(hexes.map((b) => parseInt(b, 16))) : empty2; +}; +var equals3 = (aa, bb) => { + if (aa === bb) + return true; + if (aa.byteLength !== bb.byteLength) { + return false; + } + for (let ii = 0; ii < aa.byteLength; ii++) { + if (aa[ii] !== bb[ii]) { + return false; + } + } + return true; +}; +var coerce2 = (o) => { + if (o instanceof Uint8Array && o.constructor.name === "Uint8Array") + return o; + if (o instanceof ArrayBuffer) + return new Uint8Array(o); + if (ArrayBuffer.isView(o)) { + return new Uint8Array(o.buffer, o.byteOffset, o.byteLength); + } + throw new Error("Unknown type, must be binary type"); +}; +var isBinary = (o) => o instanceof ArrayBuffer || ArrayBuffer.isView(o); +var fromString2 = (str) => new TextEncoder().encode(str); +var toString2 = (b) => new TextDecoder().decode(b); + +// node_modules/multiformats/src/bases/base.js +var Encoder2 = class { + /** + * @param {Base} name + * @param {Prefix} prefix + * @param {(bytes:Uint8Array) => string} baseEncode + */ + constructor(name4, prefix, baseEncode) { + this.name = name4; + this.prefix = prefix; + this.baseEncode = baseEncode; + } + /** + * @param {Uint8Array} bytes + * @returns {API.Multibase} + */ + encode(bytes) { + if (bytes instanceof Uint8Array) { + return `${this.prefix}${this.baseEncode(bytes)}`; + } else { + throw Error("Unknown type, must be binary type"); + } + } +}; +var Decoder2 = class { + /** + * @param {Base} name + * @param {Prefix} prefix + * @param {(text:string) => Uint8Array} baseDecode + */ + constructor(name4, prefix, baseDecode) { + this.name = name4; + this.prefix = prefix; + if (prefix.codePointAt(0) === void 0) { + throw new Error("Invalid prefix character"); + } + this.prefixCodePoint = /** @type {number} */ + prefix.codePointAt(0); + this.baseDecode = baseDecode; + } + /** + * @param {string} text + */ + decode(text) { + if (typeof text === "string") { + if (text.codePointAt(0) !== this.prefixCodePoint) { + throw Error(`Unable to decode multibase string ${JSON.stringify(text)}, ${this.name} decoder only supports inputs prefixed with ${this.prefix}`); + } + return this.baseDecode(text.slice(this.prefix.length)); + } else { + throw Error("Can only multibase decode strings"); + } + } + /** + * @template {string} OtherPrefix + * @param {API.UnibaseDecoder|ComposedDecoder} decoder + * @returns {ComposedDecoder} + */ + or(decoder) { + return or2(this, decoder); + } +}; +var ComposedDecoder2 = class { + /** + * @param {Decoders} decoders + */ + constructor(decoders) { + this.decoders = decoders; + } + /** + * @template {string} OtherPrefix + * @param {API.UnibaseDecoder|ComposedDecoder} decoder + * @returns {ComposedDecoder} + */ + or(decoder) { + return or2(this, decoder); + } + /** + * @param {string} input + * @returns {Uint8Array} + */ + decode(input) { + const prefix = ( + /** @type {Prefix} */ + input[0] + ); + const decoder = this.decoders[prefix]; + if (decoder) { + return decoder.decode(input); + } else { + throw RangeError(`Unable to decode multibase string ${JSON.stringify(input)}, only inputs prefixed with ${Object.keys(this.decoders)} are supported`); + } + } +}; +var or2 = (left, right) => new ComposedDecoder2( + /** @type {Decoders} */ + { + ...left.decoders || { [ + /** @type API.UnibaseDecoder */ + left.prefix + ]: left }, + ...right.decoders || { [ + /** @type API.UnibaseDecoder */ + right.prefix + ]: right } + } +); +var Codec2 = class { + /** + * @param {Base} name + * @param {Prefix} prefix + * @param {(bytes:Uint8Array) => string} baseEncode + * @param {(text:string) => Uint8Array} baseDecode + */ + constructor(name4, prefix, baseEncode, baseDecode) { + this.name = name4; + this.prefix = prefix; + this.baseEncode = baseEncode; + this.baseDecode = baseDecode; + this.encoder = new Encoder2(name4, prefix, baseEncode); + this.decoder = new Decoder2(name4, prefix, baseDecode); + } + /** + * @param {Uint8Array} input + */ + encode(input) { + return this.encoder.encode(input); + } + /** + * @param {string} input + */ + decode(input) { + return this.decoder.decode(input); + } +}; +var from2 = ({ name: name4, prefix, encode: encode12, decode: decode15 }) => new Codec2(name4, prefix, encode12, decode15); +var baseX2 = ({ prefix, name: name4, alphabet: alphabet2 }) => { + const { encode: encode12, decode: decode15 } = base_x_default2(alphabet2, name4); + return from2({ + prefix, + name: name4, + encode: encode12, + /** + * @param {string} text + */ + decode: (text) => coerce2(decode15(text)) + }); +}; +var decode7 = (string2, alphabet2, bitsPerChar, name4) => { + const codes = {}; + for (let i = 0; i < alphabet2.length; ++i) { + codes[alphabet2[i]] = i; + } + let end = string2.length; + while (string2[end - 1] === "=") { + --end; + } + const out = new Uint8Array(end * bitsPerChar / 8 | 0); + let bits = 0; + let buffer2 = 0; + let written = 0; + for (let i = 0; i < end; ++i) { + const value = codes[string2[i]]; + if (value === void 0) { + throw new SyntaxError(`Non-${name4} character`); + } + buffer2 = buffer2 << bitsPerChar | value; + bits += bitsPerChar; + if (bits >= 8) { + bits -= 8; + out[written++] = 255 & buffer2 >> bits; + } + } + if (bits >= bitsPerChar || 255 & buffer2 << 8 - bits) { + throw new SyntaxError("Unexpected end of data"); + } + return out; +}; +var encode5 = (data, alphabet2, bitsPerChar) => { + const pad = alphabet2[alphabet2.length - 1] === "="; + const mask = (1 << bitsPerChar) - 1; + let out = ""; + let bits = 0; + let buffer2 = 0; + for (let i = 0; i < data.length; ++i) { + buffer2 = buffer2 << 8 | data[i]; + bits += 8; + while (bits > bitsPerChar) { + bits -= bitsPerChar; + out += alphabet2[mask & buffer2 >> bits]; + } + } + if (bits) { + out += alphabet2[mask & buffer2 << bitsPerChar - bits]; + } + if (pad) { + while (out.length * bitsPerChar & 7) { + out += "="; + } + } + return out; +}; +var rfc46482 = ({ name: name4, prefix, bitsPerChar, alphabet: alphabet2 }) => { + return from2({ + prefix, + name: name4, + encode(input) { + return encode5(input, alphabet2, bitsPerChar); + }, + decode(input) { + return decode7(input, alphabet2, bitsPerChar, name4); + } + }); +}; + +// node_modules/multiformats/src/bases/base32.js +var base322 = rfc46482({ + prefix: "b", + name: "base32", + alphabet: "abcdefghijklmnopqrstuvwxyz234567", + bitsPerChar: 5 +}); +var base32upper2 = rfc46482({ + prefix: "B", + name: "base32upper", + alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567", + bitsPerChar: 5 +}); +var base32pad2 = rfc46482({ + prefix: "c", + name: "base32pad", + alphabet: "abcdefghijklmnopqrstuvwxyz234567=", + bitsPerChar: 5 +}); +var base32padupper2 = rfc46482({ + prefix: "C", + name: "base32padupper", + alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=", + bitsPerChar: 5 +}); +var base32hex2 = rfc46482({ + prefix: "v", + name: "base32hex", + alphabet: "0123456789abcdefghijklmnopqrstuv", + bitsPerChar: 5 +}); +var base32hexupper2 = rfc46482({ + prefix: "V", + name: "base32hexupper", + alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV", + bitsPerChar: 5 +}); +var base32hexpad2 = rfc46482({ + prefix: "t", + name: "base32hexpad", + alphabet: "0123456789abcdefghijklmnopqrstuv=", + bitsPerChar: 5 +}); +var base32hexpadupper2 = rfc46482({ + prefix: "T", + name: "base32hexpadupper", + alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV=", + bitsPerChar: 5 +}); +var base32z2 = rfc46482({ + prefix: "h", + name: "base32z", + alphabet: "ybndrfg8ejkmcpqxot1uwisza345h769", + bitsPerChar: 5 +}); + +// node_modules/multiformats/src/bases/base58.js +var base58_exports = {}; +__export(base58_exports, { + base58btc: () => base58btc2, + base58flickr: () => base58flickr2 +}); +var base58btc2 = baseX2({ + name: "base58btc", + prefix: "z", + alphabet: "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" +}); +var base58flickr2 = baseX2({ + name: "base58flickr", + prefix: "Z", + alphabet: "123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ" +}); + +// node_modules/multiformats/vendor/varint.js +var encode_12 = encode6; +var MSB2 = 128; +var REST2 = 127; +var MSBALL2 = ~REST2; +var INT2 = Math.pow(2, 31); +function encode6(num, out, offset) { + out = out || []; + offset = offset || 0; + var oldOffset = offset; + while (num >= INT2) { + out[offset++] = num & 255 | MSB2; + num /= 128; + } + while (num & MSBALL2) { + out[offset++] = num & 255 | MSB2; + num >>>= 7; + } + out[offset] = num | 0; + encode6.bytes = offset - oldOffset + 1; + return out; +} +var decode8 = read2; +var MSB$12 = 128; +var REST$12 = 127; +function read2(buf2, offset) { + var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf2.length; + do { + if (counter >= l) { + read2.bytes = 0; + throw new RangeError("Could not decode varint"); + } + b = buf2[counter++]; + res += shift < 28 ? (b & REST$12) << shift : (b & REST$12) * Math.pow(2, shift); + shift += 7; + } while (b >= MSB$12); + read2.bytes = counter - offset; + return res; +} +var N12 = Math.pow(2, 7); +var N22 = Math.pow(2, 14); +var N32 = Math.pow(2, 21); +var N42 = Math.pow(2, 28); +var N52 = Math.pow(2, 35); +var N62 = Math.pow(2, 42); +var N72 = Math.pow(2, 49); +var N82 = Math.pow(2, 56); +var N92 = Math.pow(2, 63); +var length2 = function(value) { + return value < N12 ? 1 : value < N22 ? 2 : value < N32 ? 3 : value < N42 ? 4 : value < N52 ? 5 : value < N62 ? 6 : value < N72 ? 7 : value < N82 ? 8 : value < N92 ? 9 : 10; +}; +var varint2 = { + encode: encode_12, + decode: decode8, + encodingLength: length2 +}; +var _brrp_varint2 = varint2; +var varint_default2 = _brrp_varint2; + +// node_modules/multiformats/src/varint.js +var decode9 = (data, offset = 0) => { + const code5 = varint_default2.decode(data, offset); + return [code5, varint_default2.decode.bytes]; +}; +var encodeTo2 = (int, target, offset = 0) => { + varint_default2.encode(int, target, offset); + return target; +}; +var encodingLength2 = (int) => { + return varint_default2.encodingLength(int); +}; + +// node_modules/multiformats/src/hashes/digest.js +var create2 = (code5, digest2) => { + const size = digest2.byteLength; + const sizeOffset = encodingLength2(code5); + const digestOffset = sizeOffset + encodingLength2(size); + const bytes = new Uint8Array(digestOffset + size); + encodeTo2(code5, bytes, 0); + encodeTo2(size, bytes, sizeOffset); + bytes.set(digest2, digestOffset); + return new Digest2(code5, size, digest2, bytes); +}; +var decode10 = (multihash) => { + const bytes = coerce2(multihash); + const [code5, sizeOffset] = decode9(bytes); + const [size, digestOffset] = decode9(bytes.subarray(sizeOffset)); + const digest2 = bytes.subarray(sizeOffset + digestOffset); + if (digest2.byteLength !== size) { + throw new Error("Incorrect length"); + } + return new Digest2(code5, size, digest2, bytes); +}; +var equals4 = (a, b) => { + if (a === b) { + return true; + } else { + const data = ( + /** @type {{code?:unknown, size?:unknown, bytes?:unknown}} */ + b + ); + return a.code === data.code && a.size === data.size && data.bytes instanceof Uint8Array && equals3(a.bytes, data.bytes); + } +}; +var Digest2 = class { + /** + * Creates a multihash digest. + * + * @param {Code} code + * @param {Size} size + * @param {Uint8Array} digest + * @param {Uint8Array} bytes + */ + constructor(code5, size, digest2, bytes) { + this.code = code5; + this.size = size; + this.digest = digest2; + this.bytes = bytes; + } +}; + +// node_modules/multiformats/src/cid.js +var format2 = (link, base3) => { + const { bytes, version } = link; + switch (version) { + case 0: + return toStringV02( + bytes, + baseCache2(link), + /** @type {API.MultibaseEncoder<"z">} */ + base3 || base58btc2.encoder + ); + default: + return toStringV12( + bytes, + baseCache2(link), + /** @type {API.MultibaseEncoder} */ + base3 || base322.encoder + ); + } +}; +var cache2 = /* @__PURE__ */ new WeakMap(); +var baseCache2 = (cid) => { + const baseCache3 = cache2.get(cid); + if (baseCache3 == null) { + const baseCache4 = /* @__PURE__ */ new Map(); + cache2.set(cid, baseCache4); + return baseCache4; + } + return baseCache3; +}; +var CID2 = class _CID { + /** + * @param {Version} version - Version of the CID + * @param {Format} code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv + * @param {API.MultihashDigest} multihash - (Multi)hash of the of the content. + * @param {Uint8Array} bytes + */ + constructor(version, code5, multihash, bytes) { + this.code = code5; + this.version = version; + this.multihash = multihash; + this.bytes = bytes; + this["/"] = bytes; + } + /** + * Signalling `cid.asCID === cid` has been replaced with `cid['/'] === cid.bytes` + * please either use `CID.asCID(cid)` or switch to new signalling mechanism + * + * @deprecated + */ + get asCID() { + return this; + } + // ArrayBufferView + get byteOffset() { + return this.bytes.byteOffset; + } + // ArrayBufferView + get byteLength() { + return this.bytes.byteLength; + } + /** + * @returns {CID} + */ + toV0() { + switch (this.version) { + case 0: { + return ( + /** @type {CID} */ + this + ); + } + case 1: { + const { code: code5, multihash } = this; + if (code5 !== DAG_PB_CODE2) { + throw new Error("Cannot convert a non dag-pb CID to CIDv0"); + } + if (multihash.code !== SHA_256_CODE2) { + throw new Error("Cannot convert non sha2-256 multihash CID to CIDv0"); + } + return ( + /** @type {CID} */ + _CID.createV0( + /** @type {API.MultihashDigest} */ + multihash + ) + ); + } + default: { + throw Error( + `Can not convert CID version ${this.version} to version 0. This is a bug please report` + ); + } + } + } + /** + * @returns {CID} + */ + toV1() { + switch (this.version) { + case 0: { + const { code: code5, digest: digest2 } = this.multihash; + const multihash = create2(code5, digest2); + return ( + /** @type {CID} */ + _CID.createV1(this.code, multihash) + ); + } + case 1: { + return ( + /** @type {CID} */ + this + ); + } + default: { + throw Error( + `Can not convert CID version ${this.version} to version 1. This is a bug please report` + ); + } + } + } + /** + * @param {unknown} other + * @returns {other is CID} + */ + equals(other) { + return _CID.equals(this, other); + } + /** + * @template {unknown} Data + * @template {number} Format + * @template {number} Alg + * @template {API.Version} Version + * @param {API.Link} self + * @param {unknown} other + * @returns {other is CID} + */ + static equals(self, other) { + const unknown = ( + /** @type {{code?:unknown, version?:unknown, multihash?:unknown}} */ + other + ); + return unknown && self.code === unknown.code && self.version === unknown.version && equals4(self.multihash, unknown.multihash); + } + /** + * @param {API.MultibaseEncoder} [base] + * @returns {string} + */ + toString(base3) { + return format2(this, base3); + } + /** + * @returns {API.LinkJSON} + */ + toJSON() { + return { "/": format2(this) }; + } + link() { + return this; + } + get [Symbol.toStringTag]() { + return "CID"; + } + // Legacy + [Symbol.for("nodejs.util.inspect.custom")]() { + return `CID(${this.toString()})`; + } + /** + * Takes any input `value` and returns a `CID` instance if it was + * a `CID` otherwise returns `null`. If `value` is instanceof `CID` + * it will return value back. If `value` is not instance of this CID + * class, but is compatible CID it will return new instance of this + * `CID` class. Otherwise returns null. + * + * This allows two different incompatible versions of CID library to + * co-exist and interop as long as binary interface is compatible. + * + * @template {unknown} Data + * @template {number} Format + * @template {number} Alg + * @template {API.Version} Version + * @template {unknown} U + * @param {API.Link|U} input + * @returns {CID|null} + */ + static asCID(input) { + if (input == null) { + return null; + } + const value = ( + /** @type {any} */ + input + ); + if (value instanceof _CID) { + return value; + } else if (value["/"] != null && value["/"] === value.bytes || value.asCID === value) { + const { version, code: code5, multihash, bytes } = value; + return new _CID( + version, + code5, + /** @type {API.MultihashDigest} */ + multihash, + bytes || encodeCID2(version, code5, multihash.bytes) + ); + } else if (value[cidSymbol2] === true) { + const { version, multihash, code: code5 } = value; + const digest2 = ( + /** @type {API.MultihashDigest} */ + decode10(multihash) + ); + return _CID.create(version, code5, digest2); + } else { + return null; + } + } + /** + * + * @template {unknown} Data + * @template {number} Format + * @template {number} Alg + * @template {API.Version} Version + * @param {Version} version - Version of the CID + * @param {Format} code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv + * @param {API.MultihashDigest} digest - (Multi)hash of the of the content. + * @returns {CID} + */ + static create(version, code5, digest2) { + if (typeof code5 !== "number") { + throw new Error("String codecs are no longer supported"); + } + if (!(digest2.bytes instanceof Uint8Array)) { + throw new Error("Invalid digest"); + } + switch (version) { + case 0: { + if (code5 !== DAG_PB_CODE2) { + throw new Error( + `Version 0 CID must use dag-pb (code: ${DAG_PB_CODE2}) block encoding` + ); + } else { + return new _CID(version, code5, digest2, digest2.bytes); + } + } + case 1: { + const bytes = encodeCID2(version, code5, digest2.bytes); + return new _CID(version, code5, digest2, bytes); + } + default: { + throw new Error("Invalid version"); + } + } + } + /** + * Simplified version of `create` for CIDv0. + * + * @template {unknown} [T=unknown] + * @param {API.MultihashDigest} digest - Multihash. + * @returns {CID} + */ + static createV0(digest2) { + return _CID.create(0, DAG_PB_CODE2, digest2); + } + /** + * Simplified version of `create` for CIDv1. + * + * @template {unknown} Data + * @template {number} Code + * @template {number} Alg + * @param {Code} code - Content encoding format code. + * @param {API.MultihashDigest} digest - Miltihash of the content. + * @returns {CID} + */ + static createV1(code5, digest2) { + return _CID.create(1, code5, digest2); + } + /** + * Decoded a CID from its binary representation. The byte array must contain + * only the CID with no additional bytes. + * + * An error will be thrown if the bytes provided do not contain a valid + * binary representation of a CID. + * + * @template {unknown} Data + * @template {number} Code + * @template {number} Alg + * @template {API.Version} Ver + * @param {API.ByteView>} bytes + * @returns {CID} + */ + static decode(bytes) { + const [cid, remainder] = _CID.decodeFirst(bytes); + if (remainder.length) { + throw new Error("Incorrect length"); + } + return cid; + } + /** + * Decoded a CID from its binary representation at the beginning of a byte + * array. + * + * Returns an array with the first element containing the CID and the second + * element containing the remainder of the original byte array. The remainder + * will be a zero-length byte array if the provided bytes only contained a + * binary CID representation. + * + * @template {unknown} T + * @template {number} C + * @template {number} A + * @template {API.Version} V + * @param {API.ByteView>} bytes + * @returns {[CID, Uint8Array]} + */ + static decodeFirst(bytes) { + const specs = _CID.inspectBytes(bytes); + const prefixSize = specs.size - specs.multihashSize; + const multihashBytes = coerce2( + bytes.subarray(prefixSize, prefixSize + specs.multihashSize) + ); + if (multihashBytes.byteLength !== specs.multihashSize) { + throw new Error("Incorrect length"); + } + const digestBytes = multihashBytes.subarray( + specs.multihashSize - specs.digestSize + ); + const digest2 = new Digest2( + specs.multihashCode, + specs.digestSize, + digestBytes, + multihashBytes + ); + const cid = specs.version === 0 ? _CID.createV0( + /** @type {API.MultihashDigest} */ + digest2 + ) : _CID.createV1(specs.codec, digest2); + return [ + /** @type {CID} */ + cid, + bytes.subarray(specs.size) + ]; + } + /** + * Inspect the initial bytes of a CID to determine its properties. + * + * Involves decoding up to 4 varints. Typically this will require only 4 to 6 + * bytes but for larger multicodec code values and larger multihash digest + * lengths these varints can be quite large. It is recommended that at least + * 10 bytes be made available in the `initialBytes` argument for a complete + * inspection. + * + * @template {unknown} T + * @template {number} C + * @template {number} A + * @template {API.Version} V + * @param {API.ByteView>} initialBytes + * @returns {{ version:V, codec:C, multihashCode:A, digestSize:number, multihashSize:number, size:number }} + */ + static inspectBytes(initialBytes) { + let offset = 0; + const next = () => { + const [i, length4] = decode9(initialBytes.subarray(offset)); + offset += length4; + return i; + }; + let version = ( + /** @type {V} */ + next() + ); + let codec = ( + /** @type {C} */ + DAG_PB_CODE2 + ); + if ( + /** @type {number} */ + version === 18 + ) { + version = /** @type {V} */ + 0; + offset = 0; + } else { + codec = /** @type {C} */ + next(); + } + if (version !== 0 && version !== 1) { + throw new RangeError(`Invalid CID version ${version}`); + } + const prefixSize = offset; + const multihashCode = ( + /** @type {A} */ + next() + ); + const digestSize = next(); + const size = offset + digestSize; + const multihashSize = size - prefixSize; + return { version, codec, multihashCode, digestSize, multihashSize, size }; + } + /** + * Takes cid in a string representation and creates an instance. If `base` + * decoder is not provided will use a default from the configuration. It will + * throw an error if encoding of the CID is not compatible with supplied (or + * a default decoder). + * + * @template {string} Prefix + * @template {unknown} Data + * @template {number} Code + * @template {number} Alg + * @template {API.Version} Ver + * @param {API.ToString, Prefix>} source + * @param {API.MultibaseDecoder} [base] + * @returns {CID} + */ + static parse(source, base3) { + const [prefix, bytes] = parseCIDtoBytes2(source, base3); + const cid = _CID.decode(bytes); + if (cid.version === 0 && source[0] !== "Q") { + throw Error("Version 0 CID string must not include multibase prefix"); + } + baseCache2(cid).set(prefix, source); + return cid; + } +}; +var parseCIDtoBytes2 = (source, base3) => { + switch (source[0]) { + case "Q": { + const decoder = base3 || base58btc2; + return [ + /** @type {Prefix} */ + base58btc2.prefix, + decoder.decode(`${base58btc2.prefix}${source}`) + ]; + } + case base58btc2.prefix: { + const decoder = base3 || base58btc2; + return [ + /** @type {Prefix} */ + base58btc2.prefix, + decoder.decode(source) + ]; + } + case base322.prefix: { + const decoder = base3 || base322; + return [ + /** @type {Prefix} */ + base322.prefix, + decoder.decode(source) + ]; + } + default: { + if (base3 == null) { + throw Error( + "To parse non base32 or base58btc encoded CID multibase decoder must be provided" + ); + } + return [ + /** @type {Prefix} */ + source[0], + base3.decode(source) + ]; + } + } +}; +var toStringV02 = (bytes, cache3, base3) => { + const { prefix } = base3; + if (prefix !== base58btc2.prefix) { + throw Error(`Cannot string encode V0 in ${base3.name} encoding`); + } + const cid = cache3.get(prefix); + if (cid == null) { + const cid2 = base3.encode(bytes).slice(1); + cache3.set(prefix, cid2); + return cid2; + } else { + return cid; + } +}; +var toStringV12 = (bytes, cache3, base3) => { + const { prefix } = base3; + const cid = cache3.get(prefix); + if (cid == null) { + const cid2 = base3.encode(bytes); + cache3.set(prefix, cid2); + return cid2; + } else { + return cid; + } +}; +var DAG_PB_CODE2 = 112; +var SHA_256_CODE2 = 18; +var encodeCID2 = (version, code5, multihash) => { + const codeOffset = encodingLength2(version); + const hashOffset = codeOffset + encodingLength2(code5); + const bytes = new Uint8Array(hashOffset + multihash.byteLength); + encodeTo2(version, bytes, 0); + encodeTo2(code5, bytes, codeOffset); + bytes.set(multihash, hashOffset); + return bytes; +}; +var cidSymbol2 = Symbol.for("@ipld/js-cid/CID"); + +// node_modules/@ipld/car/src/decoder-common.js +var import_varint3 = __toESM(require_varint(), 1); +var V2_HEADER_LENGTH = ( + /* characteristics */ + 16 + 8 + 8 + 8 +); +function decodeVarint(bytes, seeker) { + if (!bytes.length) { + throw new Error("Unexpected end of data"); + } + const i = import_varint3.default.decode(bytes); + seeker.seek( + /** @type {number} */ + import_varint3.default.decode.bytes + ); + return i; +} +function decodeV2Header(bytes) { + const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength); + let offset = 0; + const header = { + version: 2, + /** @type {[bigint, bigint]} */ + characteristics: [ + dv.getBigUint64(offset, true), + dv.getBigUint64(offset += 8, true) + ], + dataOffset: Number(dv.getBigUint64(offset += 8, true)), + dataSize: Number(dv.getBigUint64(offset += 8, true)), + indexOffset: Number(dv.getBigUint64(offset += 8, true)) + }; + return header; +} + +// node_modules/@ipld/car/src/header-validator.js +var Kinds = { + Null: ( + /** @returns {undefined|null} */ + (obj) => obj === null ? obj : void 0 + ), + Int: ( + /** @returns {undefined|number} */ + (obj) => Number.isInteger(obj) ? obj : void 0 + ), + Float: ( + /** @returns {undefined|number} */ + (obj) => typeof obj === "number" && Number.isFinite(obj) ? obj : void 0 + ), + String: ( + /** @returns {undefined|string} */ + (obj) => typeof obj === "string" ? obj : void 0 + ), + Bool: ( + /** @returns {undefined|boolean} */ + (obj) => typeof obj === "boolean" ? obj : void 0 + ), + Bytes: ( + /** @returns {undefined|Uint8Array} */ + (obj) => obj instanceof Uint8Array ? obj : void 0 + ), + Link: ( + /** @returns {undefined|object} */ + (obj) => obj !== null && typeof obj === "object" && obj.asCID === obj ? obj : void 0 + ), + List: ( + /** @returns {undefined|Array} */ + (obj) => Array.isArray(obj) ? obj : void 0 + ), + Map: ( + /** @returns {undefined|object} */ + (obj) => obj !== null && typeof obj === "object" && obj.asCID !== obj && !Array.isArray(obj) && !(obj instanceof Uint8Array) ? obj : void 0 + ) +}; +var Types = { + "CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)": Kinds.Link, + "CarV1HeaderOrV2Pragma > roots (anon)": ( + /** @returns {undefined|any} */ + (obj) => { + if (Kinds.List(obj) === void 0) { + return void 0; + } + for (let i = 0; i < obj.length; i++) { + let v = obj[i]; + v = Types["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v); + if (v === void 0) { + return void 0; + } + if (v !== obj[i]) { + const ret = obj.slice(0, i); + for (let j = i; j < obj.length; j++) { + let v2 = obj[j]; + v2 = Types["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v2); + if (v2 === void 0) { + return void 0; + } + ret.push(v2); + } + return ret; + } + } + return obj; + } + ), + Int: Kinds.Int, + CarV1HeaderOrV2Pragma: ( + /** @returns {undefined|any} */ + (obj) => { + if (Kinds.Map(obj) === void 0) { + return void 0; + } + const entries = Object.entries(obj); + let ret = obj; + let requiredCount = 1; + for (let i = 0; i < entries.length; i++) { + const [key, value] = entries[i]; + switch (key) { + case "roots": + { + const v = Types["CarV1HeaderOrV2Pragma > roots (anon)"](obj[key]); + if (v === void 0) { + return void 0; + } + if (v !== value || ret !== obj) { + if (ret === obj) { + ret = {}; + for (let j = 0; j < i; j++) { + ret[entries[j][0]] = entries[j][1]; + } + } + ret.roots = v; + } + } + break; + case "version": + { + requiredCount--; + const v = Types.Int(obj[key]); + if (v === void 0) { + return void 0; + } + if (v !== value || ret !== obj) { + if (ret === obj) { + ret = {}; + for (let j = 0; j < i; j++) { + ret[entries[j][0]] = entries[j][1]; + } + } + ret.version = v; + } + } + break; + default: + return void 0; + } + } + if (requiredCount > 0) { + return void 0; + } + return ret; + } + ) +}; +var Reprs = { + "CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)": Kinds.Link, + "CarV1HeaderOrV2Pragma > roots (anon)": ( + /** @returns {undefined|any} */ + (obj) => { + if (Kinds.List(obj) === void 0) { + return void 0; + } + for (let i = 0; i < obj.length; i++) { + let v = obj[i]; + v = Reprs["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v); + if (v === void 0) { + return void 0; + } + if (v !== obj[i]) { + const ret = obj.slice(0, i); + for (let j = i; j < obj.length; j++) { + let v2 = obj[j]; + v2 = Reprs["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v2); + if (v2 === void 0) { + return void 0; + } + ret.push(v2); + } + return ret; + } + } + return obj; + } + ), + Int: Kinds.Int, + CarV1HeaderOrV2Pragma: ( + /** @returns {undefined|any} */ + (obj) => { + if (Kinds.Map(obj) === void 0) { + return void 0; + } + const entries = Object.entries(obj); + let ret = obj; + let requiredCount = 1; + for (let i = 0; i < entries.length; i++) { + const [key, value] = entries[i]; + switch (key) { + case "roots": + { + const v = Reprs["CarV1HeaderOrV2Pragma > roots (anon)"](value); + if (v === void 0) { + return void 0; + } + if (v !== value || ret !== obj) { + if (ret === obj) { + ret = {}; + for (let j = 0; j < i; j++) { + ret[entries[j][0]] = entries[j][1]; + } + } + ret.roots = v; + } + } + break; + case "version": + { + requiredCount--; + const v = Reprs.Int(value); + if (v === void 0) { + return void 0; + } + if (v !== value || ret !== obj) { + if (ret === obj) { + ret = {}; + for (let j = 0; j < i; j++) { + ret[entries[j][0]] = entries[j][1]; + } + } + ret.version = v; + } + } + break; + default: + return void 0; + } + } + if (requiredCount > 0) { + return void 0; + } + return ret; + } + ) +}; +var CarV1HeaderOrV2Pragma = { + toTyped: Types.CarV1HeaderOrV2Pragma, + toRepresentation: Reprs.CarV1HeaderOrV2Pragma +}; + +// node_modules/@ipld/car/src/buffer-reader.js +var fsread = import_fs.default.readSync; + +// node_modules/cborg/lib/length.js +var cborEncoders2 = makeCborEncoders(); + +// node_modules/@ipld/car/src/buffer-writer.js +var import_varint4 = __toESM(require_varint(), 1); +var headerPreludeTokens = [ + new Token(Type.map, 2), + new Token(Type.string, "version"), + new Token(Type.uint, 1), + new Token(Type.string, "roots") +]; +var CID_TAG = new Token(Type.tag, 42); + +// node_modules/@ipld/car/src/decoder.js +async function readHeader(reader, strictVersion) { + const length4 = decodeVarint(await reader.upTo(8), reader); + if (length4 === 0) { + throw new Error("Invalid CAR header (zero length)"); + } + const header = await reader.exactly(length4, true); + const block = decode6(header); + if (CarV1HeaderOrV2Pragma.toTyped(block) === void 0) { + throw new Error("Invalid CAR header format"); + } + if (block.version !== 1 && block.version !== 2 || strictVersion !== void 0 && block.version !== strictVersion) { + throw new Error(`Invalid CAR version: ${block.version}${strictVersion !== void 0 ? ` (expected ${strictVersion})` : ""}`); + } + if (block.version === 1) { + if (!Array.isArray(block.roots)) { + throw new Error("Invalid CAR header format"); + } + return block; + } + if (block.roots !== void 0) { + throw new Error("Invalid CAR header format"); + } + const v2Header = decodeV2Header(await reader.exactly(V2_HEADER_LENGTH, true)); + reader.seek(v2Header.dataOffset - reader.pos); + const v1Header = await readHeader(reader, 1); + return Object.assign(v1Header, v2Header); +} +function bytesReader(bytes) { + let pos = 0; + return { + async upTo(length4) { + const out = bytes.subarray(pos, pos + Math.min(length4, bytes.length - pos)); + return out; + }, + async exactly(length4, seek = false) { + if (length4 > bytes.length - pos) { + throw new Error("Unexpected end of data"); + } + const out = bytes.subarray(pos, pos + length4); + if (seek) { + pos += length4; + } + return out; + }, + seek(length4) { + pos += length4; + }, + get pos() { + return pos; + } + }; +} +function chunkReader(readChunk) { + let pos = 0; + let have = 0; + let offset = 0; + let currentChunk = new Uint8Array(0); + const read4 = async (length4) => { + have = currentChunk.length - offset; + const bufa = [currentChunk.subarray(offset)]; + while (have < length4) { + const chunk = await readChunk(); + if (chunk == null) { + break; + } + if (have < 0) { + if (chunk.length > have) { + bufa.push(chunk.subarray(-have)); + } + } else { + bufa.push(chunk); + } + have += chunk.length; + } + currentChunk = new Uint8Array(bufa.reduce((p, c) => p + c.length, 0)); + let off = 0; + for (const b of bufa) { + currentChunk.set(b, off); + off += b.length; + } + offset = 0; + }; + return { + async upTo(length4) { + if (currentChunk.length - offset < length4) { + await read4(length4); + } + return currentChunk.subarray(offset, offset + Math.min(currentChunk.length - offset, length4)); + }, + async exactly(length4, seek = false) { + if (currentChunk.length - offset < length4) { + await read4(length4); + } + if (currentChunk.length - offset < length4) { + throw new Error("Unexpected end of data"); + } + const out = currentChunk.subarray(offset, offset + length4); + if (seek) { + pos += length4; + offset += length4; + } + return out; + }, + seek(length4) { + pos += length4; + offset += length4; + }, + get pos() { + return pos; + } + }; +} + +// node_modules/@ipld/car/src/reader.js +var import_fs2 = __toESM(require("fs"), 1); +var import_util = require("util"); +var fsread2 = (0, import_util.promisify)(import_fs2.default.read); + +// node_modules/@ipld/car/src/writer.js +var import_fs3 = __toESM(require("fs"), 1); +var import_util2 = require("util"); + +// node_modules/@ipld/car/src/encoder.js +var import_varint5 = __toESM(require_varint(), 1); +function createHeader(roots) { + const headerBytes = encode4({ version: 1, roots }); + const varintBytes = import_varint5.default.encode(headerBytes.length); + const header = new Uint8Array(varintBytes.length + headerBytes.length); + header.set(varintBytes, 0); + header.set(headerBytes, varintBytes.length); + return header; +} +function createEncoder(writer) { + return { + /** + * @param {CID[]} roots + * @returns {Promise} + */ + async setRoots(roots) { + const bytes = createHeader(roots); + await writer.write(bytes); + }, + /** + * @param {Block} block + * @returns {Promise} + */ + async writeBlock(block) { + const { cid, bytes } = block; + await writer.write(new Uint8Array(import_varint5.default.encode(cid.bytes.length + bytes.length))); + await writer.write(cid.bytes); + if (bytes.length) { + await writer.write(bytes); + } + }, + /** + * @returns {Promise} + */ + async close() { + await writer.end(); + } + }; +} + +// node_modules/@ipld/car/src/iterator-channel.js +function noop() { +} +function create3() { + const chunkQueue = []; + let drainer = null; + let drainerResolver = noop; + let ended = false; + let outWait = null; + let outWaitResolver = noop; + const makeDrainer = () => { + if (!drainer) { + drainer = new Promise((resolve6) => { + drainerResolver = () => { + drainer = null; + drainerResolver = noop; + resolve6(); + }; + }); + } + return drainer; + }; + const writer = { + /** + * @param {T} chunk + * @returns {Promise} + */ + write(chunk) { + chunkQueue.push(chunk); + const drainer2 = makeDrainer(); + outWaitResolver(); + return drainer2; + }, + async end() { + ended = true; + const drainer2 = makeDrainer(); + outWaitResolver(); + await drainer2; + } + }; + const iterator = { + /** @returns {Promise>} */ + async next() { + const chunk = chunkQueue.shift(); + if (chunk) { + if (chunkQueue.length === 0) { + drainerResolver(); + } + return { done: false, value: chunk }; + } + if (ended) { + drainerResolver(); + return { done: true, value: void 0 }; + } + if (!outWait) { + outWait = new Promise((resolve6) => { + outWaitResolver = () => { + outWait = null; + outWaitResolver = noop; + return resolve6(iterator.next()); + }; + }); + } + return outWait; + } + }; + return { writer, iterator }; +} + +// node_modules/@ipld/car/src/writer-browser.js +var CarWriter = class _CarWriter { + /** + * @param {CID[]} roots + * @param {CarEncoder} encoder + */ + constructor(roots, encoder) { + this._encoder = encoder; + this._mutex = encoder.setRoots(roots); + this._ended = false; + } + /** + * Write a `Block` (a `{ cid:CID, bytes:Uint8Array }` pair) to the archive. + * + * @function + * @memberof CarWriter + * @instance + * @async + * @param {Block} block - A `{ cid:CID, bytes:Uint8Array }` pair. + * @returns {Promise} The returned promise will only resolve once the + * bytes this block generates are written to the `out` iterable. + */ + async put(block) { + if (!(block.bytes instanceof Uint8Array) || !block.cid) { + throw new TypeError("Can only write {cid, bytes} objects"); + } + if (this._ended) { + throw new Error("Already closed"); + } + const cid = CID2.asCID(block.cid); + if (!cid) { + throw new TypeError("Can only write {cid, bytes} objects"); + } + this._mutex = this._mutex.then(() => this._encoder.writeBlock({ cid, bytes: block.bytes })); + return this._mutex; + } + /** + * Finalise the CAR archive and signal that the `out` iterable should end once + * any remaining bytes are written. + * + * @function + * @memberof CarWriter + * @instance + * @async + * @returns {Promise} + */ + async close() { + if (this._ended) { + throw new Error("Already closed"); + } + await this._mutex; + this._ended = true; + return this._encoder.close(); + } + /** + * Create a new CAR writer "channel" which consists of a + * `{ writer:CarWriter, out:AsyncIterable }` pair. + * + * @async + * @static + * @memberof CarWriter + * @param {CID[] | CID | void} roots + * @returns {WriterChannel} The channel takes the form of + * `{ writer:CarWriter, out:AsyncIterable }`. + */ + static create(roots) { + roots = toRoots(roots); + const { encoder, iterator } = encodeWriter(); + const writer = new _CarWriter(roots, encoder); + const out = new CarWriterOut(iterator); + return { writer, out }; + } + /** + * Create a new CAR appender "channel" which consists of a + * `{ writer:CarWriter, out:AsyncIterable }` pair. + * This appender does not consider roots and does not produce a CAR header. + * It is designed to append blocks to an _existing_ CAR archive. It is + * expected that `out` will be concatenated onto the end of an existing + * archive that already has a properly formatted header. + * + * @async + * @static + * @memberof CarWriter + * @returns {WriterChannel} The channel takes the form of + * `{ writer:CarWriter, out:AsyncIterable }`. + */ + static createAppender() { + const { encoder, iterator } = encodeWriter(); + encoder.setRoots = () => Promise.resolve(); + const writer = new _CarWriter([], encoder); + const out = new CarWriterOut(iterator); + return { writer, out }; + } + /** + * Update the list of roots in the header of an existing CAR as represented + * in a Uint8Array. + * + * This operation is an _overwrite_, the total length of the CAR will not be + * modified. A rejection will occur if the new header will not be the same + * length as the existing header, in which case the CAR will not be modified. + * It is the responsibility of the user to ensure that the roots being + * replaced encode as the same length as the new roots. + * + * The byte array passed in an argument will be modified and also returned + * upon successful modification. + * + * @async + * @static + * @memberof CarWriter + * @param {Uint8Array} bytes + * @param {CID[]} roots - A new list of roots to replace the existing list in + * the CAR header. The new header must take up the same number of bytes as the + * existing header, so the roots should collectively be the same byte length + * as the existing roots. + * @returns {Promise} + */ + static async updateRootsInBytes(bytes, roots) { + const reader = bytesReader(bytes); + await readHeader(reader); + const newHeader = createHeader(roots); + if (Number(reader.pos) !== newHeader.length) { + throw new Error(`updateRoots() can only overwrite a header of the same length (old header is ${reader.pos} bytes, new header is ${newHeader.length} bytes)`); + } + bytes.set(newHeader, 0); + return bytes; + } +}; +var CarWriterOut = class { + /** + * @param {AsyncIterator} iterator + */ + constructor(iterator) { + this._iterator = iterator; + } + [Symbol.asyncIterator]() { + if (this._iterating) { + throw new Error("Multiple iterator not supported"); + } + this._iterating = true; + return this._iterator; + } +}; +function encodeWriter() { + const iw = create3(); + const { writer, iterator } = iw; + const encoder = createEncoder(writer); + return { encoder, iterator }; +} +function toRoots(roots) { + if (roots === void 0) { + return []; + } + if (!Array.isArray(roots)) { + const cid = CID2.asCID(roots); + if (!cid) { + throw new TypeError("roots must be a single CID or an array of CIDs"); + } + return [cid]; + } + const _roots = []; + for (const root of roots) { + const _root = CID2.asCID(root); + if (!_root) { + throw new TypeError("roots must be a single CID or an array of CIDs"); + } + _roots.push(_root); + } + return _roots; +} + +// node_modules/@ipld/car/src/writer.js +var fsread3 = (0, import_util2.promisify)(import_fs3.default.read); +var fswrite = (0, import_util2.promisify)(import_fs3.default.write); +var CarWriter2 = class extends CarWriter { + /** + * Update the list of roots in the header of an existing CAR file. The first + * argument must be a file descriptor for CAR file that is open in read and + * write mode (not append), e.g. `fs.open` or `fs.promises.open` with `'r+'` + * mode. + * + * This operation is an _overwrite_, the total length of the CAR will not be + * modified. A rejection will occur if the new header will not be the same + * length as the existing header, in which case the CAR will not be modified. + * It is the responsibility of the user to ensure that the roots being + * replaced encode as the same length as the new roots. + * + * This function is **only available in Node.js** and not a browser + * environment. + * + * @async + * @static + * @memberof CarWriter + * @param {fs.promises.FileHandle | number} fd - A file descriptor from the + * Node.js `fs` module. Either an integer, from `fs.open()` or a `FileHandle` + * from `fs.promises.open()`. + * @param {CID[]} roots - A new list of roots to replace the existing list in + * the CAR header. The new header must take up the same number of bytes as the + * existing header, so the roots should collectively be the same byte length + * as the existing roots. + * @returns {Promise} + */ + static async updateRootsInFile(fd, roots) { + const chunkSize = 256; + let bytes; + let offset = 0; + let readChunk; + if (typeof fd === "number") { + readChunk = async () => (await fsread3(fd, bytes, 0, chunkSize, offset)).bytesRead; + } else if (typeof fd === "object" && typeof fd.read === "function") { + readChunk = async () => (await fd.read(bytes, 0, chunkSize, offset)).bytesRead; + } else { + throw new TypeError("Bad fd"); + } + const fdReader = chunkReader(async () => { + bytes = new Uint8Array(chunkSize); + const read4 = await readChunk(); + offset += read4; + return read4 < chunkSize ? bytes.subarray(0, read4) : bytes; + }); + await readHeader(fdReader); + const newHeader = createHeader(roots); + if (fdReader.pos !== newHeader.length) { + throw new Error(`updateRoots() can only overwrite a header of the same length (old header is ${fdReader.pos} bytes, new header is ${newHeader.length} bytes)`); + } + if (typeof fd === "number") { + await fswrite(fd, newHeader, 0, newHeader.length, 0); + } else if (typeof fd === "object" && typeof fd.read === "function") { + await fd.write(newHeader, 0, newHeader.length, 0); + } + } +}; + +// node_modules/it-drain/dist/src/index.js +function isAsyncIterable(thing) { + return thing[Symbol.asyncIterator] != null; +} +function drain(source) { + if (isAsyncIterable(source)) { + return (async () => { + for await (const _ of source) { + } + })(); + } else { + for (const _ of source) { + } + } +} +var src_default = drain; + +// node_modules/it-peekable/dist/src/index.js +function peekable(iterable) { + const [iterator, symbol2] = iterable[Symbol.asyncIterator] != null ? [iterable[Symbol.asyncIterator](), Symbol.asyncIterator] : [iterable[Symbol.iterator](), Symbol.iterator]; + const queue = []; + return { + peek: () => { + return iterator.next(); + }, + push: (value) => { + queue.push(value); + }, + next: () => { + if (queue.length > 0) { + return { + done: false, + value: queue.shift() + }; + } + return iterator.next(); + }, + [symbol2]() { + return this; + } + }; +} +var src_default2 = peekable; + +// node_modules/it-map/dist/src/index.js +function isAsyncIterable2(thing) { + return thing[Symbol.asyncIterator] != null; +} +function map(source, func) { + if (isAsyncIterable2(source)) { + return async function* () { + for await (const val of source) { + yield func(val); + } + }(); + } + const peekable2 = src_default2(source); + const { value, done } = peekable2.next(); + if (done === true) { + return function* () { + }(); + } + const res = func(value); + if (typeof res.then === "function") { + return async function* () { + yield await res; + for await (const val of peekable2) { + yield func(val); + } + }(); + } + const fn = func; + return function* () { + yield res; + for (const val of peekable2) { + yield fn(val); + } + }(); +} +var src_default3 = map; + +// node_modules/p-defer/index.js +function pDefer() { + const deferred = {}; + deferred.promise = new Promise((resolve6, reject) => { + deferred.resolve = resolve6; + deferred.reject = reject; + }); + return deferred; +} + +// node_modules/eventemitter3/index.mjs +var import_index = __toESM(require_eventemitter3(), 1); + +// node_modules/p-timeout/index.js +var TimeoutError = class extends Error { + constructor(message2) { + super(message2); + this.name = "TimeoutError"; + } +}; +var AbortError = class extends Error { + constructor(message2) { + super(); + this.name = "AbortError"; + this.message = message2; + } +}; +var getDOMException = (errorMessage) => globalThis.DOMException === void 0 ? new AbortError(errorMessage) : new DOMException(errorMessage); +var getAbortedReason = (signal) => { + const reason = signal.reason === void 0 ? getDOMException("This operation was aborted.") : signal.reason; + return reason instanceof Error ? reason : getDOMException(reason); +}; +function pTimeout(promise, milliseconds, fallback, options) { + let timer; + const cancelablePromise = new Promise((resolve6, reject) => { + if (typeof milliseconds !== "number" || Math.sign(milliseconds) !== 1) { + throw new TypeError(`Expected \`milliseconds\` to be a positive number, got \`${milliseconds}\``); + } + if (milliseconds === Number.POSITIVE_INFINITY) { + resolve6(promise); + return; + } + options = { + customTimers: { setTimeout, clearTimeout }, + ...options + }; + if (options.signal) { + const { signal } = options; + if (signal.aborted) { + reject(getAbortedReason(signal)); + } + signal.addEventListener("abort", () => { + reject(getAbortedReason(signal)); + }); + } + timer = options.customTimers.setTimeout.call(void 0, () => { + if (typeof fallback === "function") { + try { + resolve6(fallback()); + } catch (error) { + reject(error); + } + return; + } + const message2 = typeof fallback === "string" ? fallback : `Promise timed out after ${milliseconds} milliseconds`; + const timeoutError = fallback instanceof Error ? fallback : new TimeoutError(message2); + if (typeof promise.cancel === "function") { + promise.cancel(); + } + reject(timeoutError); + }, milliseconds); + (async () => { + try { + resolve6(await promise); + } catch (error) { + reject(error); + } finally { + options.customTimers.clearTimeout.call(void 0, timer); + } + })(); + }); + cancelablePromise.clear = () => { + clearTimeout(timer); + timer = void 0; + }; + return cancelablePromise; +} + +// node_modules/p-queue/dist/lower-bound.js +function lowerBound(array, value, comparator) { + let first2 = 0; + let count = array.length; + while (count > 0) { + const step = Math.trunc(count / 2); + let it = first2 + step; + if (comparator(array[it], value) <= 0) { + first2 = ++it; + count -= step + 1; + } else { + count = step; + } + } + return first2; +} + +// node_modules/p-queue/dist/priority-queue.js +var __classPrivateFieldGet = function(receiver, state, kind, f) { + if (kind === "a" && !f) + throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) + throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _PriorityQueue_queue; +var PriorityQueue = class { + constructor() { + _PriorityQueue_queue.set(this, []); + } + enqueue(run, options) { + options = { + priority: 0, + ...options + }; + const element = { + priority: options.priority, + run + }; + if (this.size && __classPrivateFieldGet(this, _PriorityQueue_queue, "f")[this.size - 1].priority >= options.priority) { + __classPrivateFieldGet(this, _PriorityQueue_queue, "f").push(element); + return; + } + const index = lowerBound(__classPrivateFieldGet(this, _PriorityQueue_queue, "f"), element, (a, b) => b.priority - a.priority); + __classPrivateFieldGet(this, _PriorityQueue_queue, "f").splice(index, 0, element); + } + dequeue() { + const item = __classPrivateFieldGet(this, _PriorityQueue_queue, "f").shift(); + return item === null || item === void 0 ? void 0 : item.run; + } + filter(options) { + return __classPrivateFieldGet(this, _PriorityQueue_queue, "f").filter((element) => element.priority === options.priority).map((element) => element.run); + } + get size() { + return __classPrivateFieldGet(this, _PriorityQueue_queue, "f").length; + } +}; +_PriorityQueue_queue = /* @__PURE__ */ new WeakMap(); +var priority_queue_default = PriorityQueue; + +// node_modules/p-queue/dist/index.js +var __classPrivateFieldSet = function(receiver, state, value, kind, f) { + if (kind === "m") + throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) + throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) + throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value), value; +}; +var __classPrivateFieldGet2 = function(receiver, state, kind, f) { + if (kind === "a" && !f) + throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) + throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _PQueue_instances; +var _PQueue_carryoverConcurrencyCount; +var _PQueue_isIntervalIgnored; +var _PQueue_intervalCount; +var _PQueue_intervalCap; +var _PQueue_interval; +var _PQueue_intervalEnd; +var _PQueue_intervalId; +var _PQueue_timeoutId; +var _PQueue_queue; +var _PQueue_queueClass; +var _PQueue_pending; +var _PQueue_concurrency; +var _PQueue_isPaused; +var _PQueue_throwOnTimeout; +var _PQueue_doesIntervalAllowAnother_get; +var _PQueue_doesConcurrentAllowAnother_get; +var _PQueue_next; +var _PQueue_onResumeInterval; +var _PQueue_isIntervalPaused_get; +var _PQueue_tryToStartAnother; +var _PQueue_initializeIntervalIfNeeded; +var _PQueue_onInterval; +var _PQueue_processQueue; +var _PQueue_throwOnAbort; +var _PQueue_onEvent; +var AbortError2 = class extends Error { +}; +var PQueue = class extends import_index.default { + // TODO: The `throwOnTimeout` option should affect the return types of `add()` and `addAll()` + constructor(options) { + var _a, _b, _c, _d; + super(); + _PQueue_instances.add(this); + _PQueue_carryoverConcurrencyCount.set(this, void 0); + _PQueue_isIntervalIgnored.set(this, void 0); + _PQueue_intervalCount.set(this, 0); + _PQueue_intervalCap.set(this, void 0); + _PQueue_interval.set(this, void 0); + _PQueue_intervalEnd.set(this, 0); + _PQueue_intervalId.set(this, void 0); + _PQueue_timeoutId.set(this, void 0); + _PQueue_queue.set(this, void 0); + _PQueue_queueClass.set(this, void 0); + _PQueue_pending.set(this, 0); + _PQueue_concurrency.set(this, void 0); + _PQueue_isPaused.set(this, void 0); + _PQueue_throwOnTimeout.set(this, void 0); + Object.defineProperty(this, "timeout", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + options = { + carryoverConcurrencyCount: false, + intervalCap: Number.POSITIVE_INFINITY, + interval: 0, + concurrency: Number.POSITIVE_INFINITY, + autoStart: true, + queueClass: priority_queue_default, + ...options + }; + if (!(typeof options.intervalCap === "number" && options.intervalCap >= 1)) { + throw new TypeError(`Expected \`intervalCap\` to be a number from 1 and up, got \`${(_b = (_a = options.intervalCap) === null || _a === void 0 ? void 0 : _a.toString()) !== null && _b !== void 0 ? _b : ""}\` (${typeof options.intervalCap})`); + } + if (options.interval === void 0 || !(Number.isFinite(options.interval) && options.interval >= 0)) { + throw new TypeError(`Expected \`interval\` to be a finite number >= 0, got \`${(_d = (_c = options.interval) === null || _c === void 0 ? void 0 : _c.toString()) !== null && _d !== void 0 ? _d : ""}\` (${typeof options.interval})`); + } + __classPrivateFieldSet(this, _PQueue_carryoverConcurrencyCount, options.carryoverConcurrencyCount, "f"); + __classPrivateFieldSet(this, _PQueue_isIntervalIgnored, options.intervalCap === Number.POSITIVE_INFINITY || options.interval === 0, "f"); + __classPrivateFieldSet(this, _PQueue_intervalCap, options.intervalCap, "f"); + __classPrivateFieldSet(this, _PQueue_interval, options.interval, "f"); + __classPrivateFieldSet(this, _PQueue_queue, new options.queueClass(), "f"); + __classPrivateFieldSet(this, _PQueue_queueClass, options.queueClass, "f"); + this.concurrency = options.concurrency; + this.timeout = options.timeout; + __classPrivateFieldSet(this, _PQueue_throwOnTimeout, options.throwOnTimeout === true, "f"); + __classPrivateFieldSet(this, _PQueue_isPaused, options.autoStart === false, "f"); + } + get concurrency() { + return __classPrivateFieldGet2(this, _PQueue_concurrency, "f"); + } + set concurrency(newConcurrency) { + if (!(typeof newConcurrency === "number" && newConcurrency >= 1)) { + throw new TypeError(`Expected \`concurrency\` to be a number from 1 and up, got \`${newConcurrency}\` (${typeof newConcurrency})`); + } + __classPrivateFieldSet(this, _PQueue_concurrency, newConcurrency, "f"); + __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_processQueue).call(this); + } + async add(function_, options = {}) { + options = { + timeout: this.timeout, + throwOnTimeout: __classPrivateFieldGet2(this, _PQueue_throwOnTimeout, "f"), + ...options + }; + return new Promise((resolve6, reject) => { + __classPrivateFieldGet2(this, _PQueue_queue, "f").enqueue(async () => { + var _a; + var _b, _c; + __classPrivateFieldSet(this, _PQueue_pending, (_b = __classPrivateFieldGet2(this, _PQueue_pending, "f"), _b++, _b), "f"); + __classPrivateFieldSet(this, _PQueue_intervalCount, (_c = __classPrivateFieldGet2(this, _PQueue_intervalCount, "f"), _c++, _c), "f"); + try { + if ((_a = options.signal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw new AbortError2("The task was aborted."); + } + let operation = function_({ signal: options.signal }); + if (options.timeout) { + operation = pTimeout(Promise.resolve(operation), options.timeout); + } + if (options.signal) { + operation = Promise.race([operation, __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_throwOnAbort).call(this, options.signal)]); + } + const result = await operation; + resolve6(result); + this.emit("completed", result); + } catch (error) { + if (error instanceof TimeoutError && !options.throwOnTimeout) { + resolve6(); + return; + } + reject(error); + this.emit("error", error); + } finally { + __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_next).call(this); + } + }, options); + this.emit("add"); + __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_tryToStartAnother).call(this); + }); + } + async addAll(functions, options) { + return Promise.all(functions.map(async (function_) => this.add(function_, options))); + } + /** + Start (or resume) executing enqueued tasks within concurrency limit. No need to call this if queue is not paused (via `options.autoStart = false` or by `.pause()` method.) + */ + start() { + if (!__classPrivateFieldGet2(this, _PQueue_isPaused, "f")) { + return this; + } + __classPrivateFieldSet(this, _PQueue_isPaused, false, "f"); + __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_processQueue).call(this); + return this; + } + /** + Put queue execution on hold. + */ + pause() { + __classPrivateFieldSet(this, _PQueue_isPaused, true, "f"); + } + /** + Clear the queue. + */ + clear() { + __classPrivateFieldSet(this, _PQueue_queue, new (__classPrivateFieldGet2(this, _PQueue_queueClass, "f"))(), "f"); + } + /** + Can be called multiple times. Useful if you for example add additional items at a later time. + + @returns A promise that settles when the queue becomes empty. + */ + async onEmpty() { + if (__classPrivateFieldGet2(this, _PQueue_queue, "f").size === 0) { + return; + } + await __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onEvent).call(this, "empty"); + } + /** + @returns A promise that settles when the queue size is less than the given limit: `queue.size < limit`. + + If you want to avoid having the queue grow beyond a certain size you can `await queue.onSizeLessThan()` before adding a new item. + + Note that this only limits the number of items waiting to start. There could still be up to `concurrency` jobs already running that this call does not include in its calculation. + */ + async onSizeLessThan(limit) { + if (__classPrivateFieldGet2(this, _PQueue_queue, "f").size < limit) { + return; + } + await __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onEvent).call(this, "next", () => __classPrivateFieldGet2(this, _PQueue_queue, "f").size < limit); + } + /** + The difference with `.onEmpty` is that `.onIdle` guarantees that all work from the queue has finished. `.onEmpty` merely signals that the queue is empty, but it could mean that some promises haven't completed yet. + + @returns A promise that settles when the queue becomes empty, and all promises have completed; `queue.size === 0 && queue.pending === 0`. + */ + async onIdle() { + if (__classPrivateFieldGet2(this, _PQueue_pending, "f") === 0 && __classPrivateFieldGet2(this, _PQueue_queue, "f").size === 0) { + return; + } + await __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onEvent).call(this, "idle"); + } + /** + Size of the queue, the number of queued items waiting to run. + */ + get size() { + return __classPrivateFieldGet2(this, _PQueue_queue, "f").size; + } + /** + Size of the queue, filtered by the given options. + + For example, this can be used to find the number of items remaining in the queue with a specific priority level. + */ + sizeBy(options) { + return __classPrivateFieldGet2(this, _PQueue_queue, "f").filter(options).length; + } + /** + Number of running items (no longer in the queue). + */ + get pending() { + return __classPrivateFieldGet2(this, _PQueue_pending, "f"); + } + /** + Whether the queue is currently paused. + */ + get isPaused() { + return __classPrivateFieldGet2(this, _PQueue_isPaused, "f"); + } +}; +_PQueue_carryoverConcurrencyCount = /* @__PURE__ */ new WeakMap(), _PQueue_isIntervalIgnored = /* @__PURE__ */ new WeakMap(), _PQueue_intervalCount = /* @__PURE__ */ new WeakMap(), _PQueue_intervalCap = /* @__PURE__ */ new WeakMap(), _PQueue_interval = /* @__PURE__ */ new WeakMap(), _PQueue_intervalEnd = /* @__PURE__ */ new WeakMap(), _PQueue_intervalId = /* @__PURE__ */ new WeakMap(), _PQueue_timeoutId = /* @__PURE__ */ new WeakMap(), _PQueue_queue = /* @__PURE__ */ new WeakMap(), _PQueue_queueClass = /* @__PURE__ */ new WeakMap(), _PQueue_pending = /* @__PURE__ */ new WeakMap(), _PQueue_concurrency = /* @__PURE__ */ new WeakMap(), _PQueue_isPaused = /* @__PURE__ */ new WeakMap(), _PQueue_throwOnTimeout = /* @__PURE__ */ new WeakMap(), _PQueue_instances = /* @__PURE__ */ new WeakSet(), _PQueue_doesIntervalAllowAnother_get = function _PQueue_doesIntervalAllowAnother_get2() { + return __classPrivateFieldGet2(this, _PQueue_isIntervalIgnored, "f") || __classPrivateFieldGet2(this, _PQueue_intervalCount, "f") < __classPrivateFieldGet2(this, _PQueue_intervalCap, "f"); +}, _PQueue_doesConcurrentAllowAnother_get = function _PQueue_doesConcurrentAllowAnother_get2() { + return __classPrivateFieldGet2(this, _PQueue_pending, "f") < __classPrivateFieldGet2(this, _PQueue_concurrency, "f"); +}, _PQueue_next = function _PQueue_next2() { + var _a; + __classPrivateFieldSet(this, _PQueue_pending, (_a = __classPrivateFieldGet2(this, _PQueue_pending, "f"), _a--, _a), "f"); + __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_tryToStartAnother).call(this); + this.emit("next"); +}, _PQueue_onResumeInterval = function _PQueue_onResumeInterval2() { + __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onInterval).call(this); + __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_initializeIntervalIfNeeded).call(this); + __classPrivateFieldSet(this, _PQueue_timeoutId, void 0, "f"); +}, _PQueue_isIntervalPaused_get = function _PQueue_isIntervalPaused_get2() { + const now = Date.now(); + if (__classPrivateFieldGet2(this, _PQueue_intervalId, "f") === void 0) { + const delay = __classPrivateFieldGet2(this, _PQueue_intervalEnd, "f") - now; + if (delay < 0) { + __classPrivateFieldSet(this, _PQueue_intervalCount, __classPrivateFieldGet2(this, _PQueue_carryoverConcurrencyCount, "f") ? __classPrivateFieldGet2(this, _PQueue_pending, "f") : 0, "f"); + } else { + if (__classPrivateFieldGet2(this, _PQueue_timeoutId, "f") === void 0) { + __classPrivateFieldSet(this, _PQueue_timeoutId, setTimeout(() => { + __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onResumeInterval).call(this); + }, delay), "f"); + } + return true; + } + } + return false; +}, _PQueue_tryToStartAnother = function _PQueue_tryToStartAnother2() { + if (__classPrivateFieldGet2(this, _PQueue_queue, "f").size === 0) { + if (__classPrivateFieldGet2(this, _PQueue_intervalId, "f")) { + clearInterval(__classPrivateFieldGet2(this, _PQueue_intervalId, "f")); + } + __classPrivateFieldSet(this, _PQueue_intervalId, void 0, "f"); + this.emit("empty"); + if (__classPrivateFieldGet2(this, _PQueue_pending, "f") === 0) { + this.emit("idle"); + } + return false; + } + if (!__classPrivateFieldGet2(this, _PQueue_isPaused, "f")) { + const canInitializeInterval = !__classPrivateFieldGet2(this, _PQueue_instances, "a", _PQueue_isIntervalPaused_get); + if (__classPrivateFieldGet2(this, _PQueue_instances, "a", _PQueue_doesIntervalAllowAnother_get) && __classPrivateFieldGet2(this, _PQueue_instances, "a", _PQueue_doesConcurrentAllowAnother_get)) { + const job = __classPrivateFieldGet2(this, _PQueue_queue, "f").dequeue(); + if (!job) { + return false; + } + this.emit("active"); + job(); + if (canInitializeInterval) { + __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_initializeIntervalIfNeeded).call(this); + } + return true; + } + } + return false; +}, _PQueue_initializeIntervalIfNeeded = function _PQueue_initializeIntervalIfNeeded2() { + if (__classPrivateFieldGet2(this, _PQueue_isIntervalIgnored, "f") || __classPrivateFieldGet2(this, _PQueue_intervalId, "f") !== void 0) { + return; + } + __classPrivateFieldSet(this, _PQueue_intervalId, setInterval(() => { + __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onInterval).call(this); + }, __classPrivateFieldGet2(this, _PQueue_interval, "f")), "f"); + __classPrivateFieldSet(this, _PQueue_intervalEnd, Date.now() + __classPrivateFieldGet2(this, _PQueue_interval, "f"), "f"); +}, _PQueue_onInterval = function _PQueue_onInterval2() { + if (__classPrivateFieldGet2(this, _PQueue_intervalCount, "f") === 0 && __classPrivateFieldGet2(this, _PQueue_pending, "f") === 0 && __classPrivateFieldGet2(this, _PQueue_intervalId, "f")) { + clearInterval(__classPrivateFieldGet2(this, _PQueue_intervalId, "f")); + __classPrivateFieldSet(this, _PQueue_intervalId, void 0, "f"); + } + __classPrivateFieldSet(this, _PQueue_intervalCount, __classPrivateFieldGet2(this, _PQueue_carryoverConcurrencyCount, "f") ? __classPrivateFieldGet2(this, _PQueue_pending, "f") : 0, "f"); + __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_processQueue).call(this); +}, _PQueue_processQueue = function _PQueue_processQueue2() { + while (__classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_tryToStartAnother).call(this)) { + } +}, _PQueue_throwOnAbort = async function _PQueue_throwOnAbort2(signal) { + return new Promise((_resolve, reject) => { + signal.addEventListener("abort", () => { + reject(new AbortError2("The task was aborted.")); + }, { once: true }); + }); +}, _PQueue_onEvent = async function _PQueue_onEvent2(event, filter3) { + return new Promise((resolve6) => { + const listener = () => { + if (filter3 && !filter3()) { + return; + } + this.off(event, listener); + resolve6(); + }; + this.on(event, listener); + }); +}; +var dist_default = PQueue; + +// node_modules/@ipld/dag-pb/src/index.js +var src_exports2 = {}; +__export(src_exports2, { + code: () => code2, + createLink: () => createLink, + createNode: () => createNode, + decode: () => decode11, + encode: () => encode7, + name: () => name, + prepare: () => prepare, + validate: () => validate +}); + +// node_modules/@ipld/dag-pb/src/pb-decode.js +var textDecoder2 = new TextDecoder(); +function decodeVarint2(bytes, offset) { + let v = 0; + for (let shift = 0; ; shift += 7) { + if (shift >= 64) { + throw new Error("protobuf: varint overflow"); + } + if (offset >= bytes.length) { + throw new Error("protobuf: unexpected end of data"); + } + const b = bytes[offset++]; + v += shift < 28 ? (b & 127) << shift : (b & 127) * 2 ** shift; + if (b < 128) { + break; + } + } + return [v, offset]; +} +function decodeBytes(bytes, offset) { + let byteLen; + [byteLen, offset] = decodeVarint2(bytes, offset); + const postOffset = offset + byteLen; + if (byteLen < 0 || postOffset < 0) { + throw new Error("protobuf: invalid length"); + } + if (postOffset > bytes.length) { + throw new Error("protobuf: unexpected end of data"); + } + return [bytes.subarray(offset, postOffset), postOffset]; +} +function decodeKey(bytes, index) { + let wire; + [wire, index] = decodeVarint2(bytes, index); + return [wire & 7, wire >> 3, index]; +} +function decodeLink(bytes) { + const link = {}; + const l = bytes.length; + let index = 0; + while (index < l) { + let wireType, fieldNum; + [wireType, fieldNum, index] = decodeKey(bytes, index); + if (fieldNum === 1) { + if (link.Hash) { + throw new Error("protobuf: (PBLink) duplicate Hash section"); + } + if (wireType !== 2) { + throw new Error(`protobuf: (PBLink) wrong wireType (${wireType}) for Hash`); + } + if (link.Name !== void 0) { + throw new Error("protobuf: (PBLink) invalid order, found Name before Hash"); + } + if (link.Tsize !== void 0) { + throw new Error("protobuf: (PBLink) invalid order, found Tsize before Hash"); + } + [link.Hash, index] = decodeBytes(bytes, index); + } else if (fieldNum === 2) { + if (link.Name !== void 0) { + throw new Error("protobuf: (PBLink) duplicate Name section"); + } + if (wireType !== 2) { + throw new Error(`protobuf: (PBLink) wrong wireType (${wireType}) for Name`); + } + if (link.Tsize !== void 0) { + throw new Error("protobuf: (PBLink) invalid order, found Tsize before Name"); + } + let byts; + [byts, index] = decodeBytes(bytes, index); + link.Name = textDecoder2.decode(byts); + } else if (fieldNum === 3) { + if (link.Tsize !== void 0) { + throw new Error("protobuf: (PBLink) duplicate Tsize section"); + } + if (wireType !== 0) { + throw new Error(`protobuf: (PBLink) wrong wireType (${wireType}) for Tsize`); + } + [link.Tsize, index] = decodeVarint2(bytes, index); + } else { + throw new Error(`protobuf: (PBLink) invalid fieldNumber, expected 1, 2 or 3, got ${fieldNum}`); + } + } + if (index > l) { + throw new Error("protobuf: (PBLink) unexpected end of data"); + } + return link; +} +function decodeNode(bytes) { + const l = bytes.length; + let index = 0; + let links = void 0; + let linksBeforeData = false; + let data = void 0; + while (index < l) { + let wireType, fieldNum; + [wireType, fieldNum, index] = decodeKey(bytes, index); + if (wireType !== 2) { + throw new Error(`protobuf: (PBNode) invalid wireType, expected 2, got ${wireType}`); + } + if (fieldNum === 1) { + if (data) { + throw new Error("protobuf: (PBNode) duplicate Data section"); + } + [data, index] = decodeBytes(bytes, index); + if (links) { + linksBeforeData = true; + } + } else if (fieldNum === 2) { + if (linksBeforeData) { + throw new Error("protobuf: (PBNode) duplicate Links section"); + } else if (!links) { + links = []; + } + let byts; + [byts, index] = decodeBytes(bytes, index); + links.push(decodeLink(byts)); + } else { + throw new Error(`protobuf: (PBNode) invalid fieldNumber, expected 1 or 2, got ${fieldNum}`); + } + } + if (index > l) { + throw new Error("protobuf: (PBNode) unexpected end of data"); + } + const node = {}; + if (data) { + node.Data = data; + } + node.Links = links || []; + return node; +} + +// node_modules/@ipld/dag-pb/src/pb-encode.js +var textEncoder2 = new TextEncoder(); +var maxInt32 = 2 ** 32; +var maxUInt32 = 2 ** 31; +function encodeLink(link, bytes) { + let i = bytes.length; + if (typeof link.Tsize === "number") { + if (link.Tsize < 0) { + throw new Error("Tsize cannot be negative"); + } + if (!Number.isSafeInteger(link.Tsize)) { + throw new Error("Tsize too large for encoding"); + } + i = encodeVarint(bytes, i, link.Tsize) - 1; + bytes[i] = 24; + } + if (typeof link.Name === "string") { + const nameBytes = textEncoder2.encode(link.Name); + i -= nameBytes.length; + bytes.set(nameBytes, i); + i = encodeVarint(bytes, i, nameBytes.length) - 1; + bytes[i] = 18; + } + if (link.Hash) { + i -= link.Hash.length; + bytes.set(link.Hash, i); + i = encodeVarint(bytes, i, link.Hash.length) - 1; + bytes[i] = 10; + } + return bytes.length - i; +} +function encodeNode(node) { + const size = sizeNode(node); + const bytes = new Uint8Array(size); + let i = size; + if (node.Data) { + i -= node.Data.length; + bytes.set(node.Data, i); + i = encodeVarint(bytes, i, node.Data.length) - 1; + bytes[i] = 10; + } + if (node.Links) { + for (let index = node.Links.length - 1; index >= 0; index--) { + const size2 = encodeLink(node.Links[index], bytes.subarray(0, i)); + i -= size2; + i = encodeVarint(bytes, i, size2) - 1; + bytes[i] = 18; + } + } + return bytes; +} +function sizeLink(link) { + let n = 0; + if (link.Hash) { + const l = link.Hash.length; + n += 1 + l + sov(l); + } + if (typeof link.Name === "string") { + const l = textEncoder2.encode(link.Name).length; + n += 1 + l + sov(l); + } + if (typeof link.Tsize === "number") { + n += 1 + sov(link.Tsize); + } + return n; +} +function sizeNode(node) { + let n = 0; + if (node.Data) { + const l = node.Data.length; + n += 1 + l + sov(l); + } + if (node.Links) { + for (const link of node.Links) { + const l = sizeLink(link); + n += 1 + l + sov(l); + } + } + return n; +} +function encodeVarint(bytes, offset, v) { + offset -= sov(v); + const base3 = offset; + while (v >= maxUInt32) { + bytes[offset++] = v & 127 | 128; + v /= 128; + } + while (v >= 128) { + bytes[offset++] = v & 127 | 128; + v >>>= 7; + } + bytes[offset] = v; + return base3; +} +function sov(x) { + if (x % 2 === 0) { + x++; + } + return Math.floor((len64(x) + 6) / 7); +} +function len64(x) { + let n = 0; + if (x >= maxInt32) { + x = Math.floor(x / maxInt32); + n = 32; + } + if (x >= 1 << 16) { + x >>>= 16; + n += 16; + } + if (x >= 1 << 8) { + x >>>= 8; + n += 8; + } + return n + len8tab[x]; +} +var len8tab = [ + 0, + 1, + 2, + 2, + 3, + 3, + 3, + 3, + 4, + 4, + 4, + 4, + 4, + 4, + 4, + 4, + 5, + 5, + 5, + 5, + 5, + 5, + 5, + 5, + 5, + 5, + 5, + 5, + 5, + 5, + 5, + 5, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 6, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8 +]; + +// node_modules/@ipld/dag-pb/src/util.js +var pbNodeProperties = ["Data", "Links"]; +var pbLinkProperties = ["Hash", "Name", "Tsize"]; +var textEncoder3 = new TextEncoder(); +function linkComparator(a, b) { + if (a === b) { + return 0; + } + const abuf = a.Name ? textEncoder3.encode(a.Name) : []; + const bbuf = b.Name ? textEncoder3.encode(b.Name) : []; + let x = abuf.length; + let y = bbuf.length; + for (let i = 0, len = Math.min(x, y); i < len; ++i) { + if (abuf[i] !== bbuf[i]) { + x = abuf[i]; + y = bbuf[i]; + break; + } + } + return x < y ? -1 : y < x ? 1 : 0; +} +function hasOnlyProperties(node, properties) { + return !Object.keys(node).some((p) => !properties.includes(p)); +} +function asLink(link) { + if (typeof link.asCID === "object") { + const Hash = CID2.asCID(link); + if (!Hash) { + throw new TypeError("Invalid DAG-PB form"); + } + return { Hash }; + } + if (typeof link !== "object" || Array.isArray(link)) { + throw new TypeError("Invalid DAG-PB form"); + } + const pbl = {}; + if (link.Hash) { + let cid = CID2.asCID(link.Hash); + try { + if (!cid) { + if (typeof link.Hash === "string") { + cid = CID2.parse(link.Hash); + } else if (link.Hash instanceof Uint8Array) { + cid = CID2.decode(link.Hash); + } + } + } catch (e) { + throw new TypeError(`Invalid DAG-PB form: ${e.message}`); + } + if (cid) { + pbl.Hash = cid; + } + } + if (!pbl.Hash) { + throw new TypeError("Invalid DAG-PB form"); + } + if (typeof link.Name === "string") { + pbl.Name = link.Name; + } + if (typeof link.Tsize === "number") { + pbl.Tsize = link.Tsize; + } + return pbl; +} +function prepare(node) { + if (node instanceof Uint8Array || typeof node === "string") { + node = { Data: node }; + } + if (typeof node !== "object" || Array.isArray(node)) { + throw new TypeError("Invalid DAG-PB form"); + } + const pbn = {}; + if (node.Data !== void 0) { + if (typeof node.Data === "string") { + pbn.Data = textEncoder3.encode(node.Data); + } else if (node.Data instanceof Uint8Array) { + pbn.Data = node.Data; + } else { + throw new TypeError("Invalid DAG-PB form"); + } + } + if (node.Links !== void 0) { + if (Array.isArray(node.Links)) { + pbn.Links = node.Links.map(asLink); + pbn.Links.sort(linkComparator); + } else { + throw new TypeError("Invalid DAG-PB form"); + } + } else { + pbn.Links = []; + } + return pbn; +} +function validate(node) { + if (!node || typeof node !== "object" || Array.isArray(node) || node instanceof Uint8Array || node["/"] && node["/"] === node.bytes) { + throw new TypeError("Invalid DAG-PB form"); + } + if (!hasOnlyProperties(node, pbNodeProperties)) { + throw new TypeError("Invalid DAG-PB form (extraneous properties)"); + } + if (node.Data !== void 0 && !(node.Data instanceof Uint8Array)) { + throw new TypeError("Invalid DAG-PB form (Data must be bytes)"); + } + if (!Array.isArray(node.Links)) { + throw new TypeError("Invalid DAG-PB form (Links must be a list)"); + } + for (let i = 0; i < node.Links.length; i++) { + const link = node.Links[i]; + if (!link || typeof link !== "object" || Array.isArray(link) || link instanceof Uint8Array || link["/"] && link["/"] === link.bytes) { + throw new TypeError("Invalid DAG-PB form (bad link)"); + } + if (!hasOnlyProperties(link, pbLinkProperties)) { + throw new TypeError("Invalid DAG-PB form (extraneous properties on link)"); + } + if (link.Hash === void 0) { + throw new TypeError("Invalid DAG-PB form (link must have a Hash)"); + } + if (link.Hash == null || !link.Hash["/"] || link.Hash["/"] !== link.Hash.bytes) { + throw new TypeError("Invalid DAG-PB form (link Hash must be a CID)"); + } + if (link.Name !== void 0 && typeof link.Name !== "string") { + throw new TypeError("Invalid DAG-PB form (link Name must be a string)"); + } + if (link.Tsize !== void 0) { + if (typeof link.Tsize !== "number" || link.Tsize % 1 !== 0) { + throw new TypeError("Invalid DAG-PB form (link Tsize must be an integer)"); + } + if (link.Tsize < 0) { + throw new TypeError("Invalid DAG-PB form (link Tsize cannot be negative)"); + } + } + if (i > 0 && linkComparator(link, node.Links[i - 1]) === -1) { + throw new TypeError("Invalid DAG-PB form (links must be sorted by Name bytes)"); + } + } +} +function createNode(data, links = []) { + return prepare({ Data: data, Links: links }); +} +function createLink(name4, size, cid) { + return asLink({ Hash: cid, Name: name4, Tsize: size }); +} + +// node_modules/@ipld/dag-pb/src/index.js +var name = "dag-pb"; +var code2 = 112; +function encode7(node) { + validate(node); + const pbn = {}; + if (node.Links) { + pbn.Links = node.Links.map((l) => { + const link = {}; + if (l.Hash) { + link.Hash = l.Hash.bytes; + } + if (l.Name !== void 0) { + link.Name = l.Name; + } + if (l.Tsize !== void 0) { + link.Tsize = l.Tsize; + } + return link; + }); + } + if (node.Data) { + pbn.Data = node.Data; + } + return encodeNode(pbn); +} +function decode11(bytes) { + const pbn = decodeNode(bytes); + const node = {}; + if (pbn.Data) { + node.Data = pbn.Data; + } + if (pbn.Links) { + node.Links = pbn.Links.map((l) => { + const link = {}; + try { + link.Hash = CID2.decode(l.Hash); + } catch (e) { + } + if (!link.Hash) { + throw new Error("Invalid Hash field found in link, expected CID"); + } + if (l.Name !== void 0) { + link.Name = l.Name; + } + if (l.Tsize !== void 0) { + link.Tsize = l.Tsize; + } + return link; + }); + } + return node; +} + +// node_modules/cborg/lib/json/encode.js +var JSONEncoder = class extends Array { + constructor() { + super(); + this.inRecursive = []; + } + /** + * @param {Bl} buf + */ + prefix(buf2) { + const recurs = this.inRecursive[this.inRecursive.length - 1]; + if (recurs) { + if (recurs.type === Type.array) { + recurs.elements++; + if (recurs.elements !== 1) { + buf2.push([44]); + } + } + if (recurs.type === Type.map) { + recurs.elements++; + if (recurs.elements !== 1) { + if (recurs.elements % 2 === 1) { + buf2.push([44]); + } else { + buf2.push([58]); + } + } + } + } + } + /** + * @param {Bl} buf + * @param {Token} token + */ + [Type.uint.major](buf2, token) { + this.prefix(buf2); + const is2 = String(token.value); + const isa = []; + for (let i = 0; i < is2.length; i++) { + isa[i] = is2.charCodeAt(i); + } + buf2.push(isa); + } + /** + * @param {Bl} buf + * @param {Token} token + */ + [Type.negint.major](buf2, token) { + this[Type.uint.major](buf2, token); + } + /** + * @param {Bl} _buf + * @param {Token} _token + */ + [Type.bytes.major](_buf, _token) { + throw new Error(`${encodeErrPrefix} unsupported type: Uint8Array`); + } + /** + * @param {Bl} buf + * @param {Token} token + */ + [Type.string.major](buf2, token) { + this.prefix(buf2); + const byts = fromString(JSON.stringify(token.value)); + buf2.push(byts.length > 32 ? asU8A(byts) : byts); + } + /** + * @param {Bl} buf + * @param {Token} _token + */ + [Type.array.major](buf2, _token) { + this.prefix(buf2); + this.inRecursive.push({ type: Type.array, elements: 0 }); + buf2.push([91]); + } + /** + * @param {Bl} buf + * @param {Token} _token + */ + [Type.map.major](buf2, _token) { + this.prefix(buf2); + this.inRecursive.push({ type: Type.map, elements: 0 }); + buf2.push([123]); + } + /** + * @param {Bl} _buf + * @param {Token} _token + */ + [Type.tag.major](_buf, _token) { + } + /** + * @param {Bl} buf + * @param {Token} token + */ + [Type.float.major](buf2, token) { + if (token.type.name === "break") { + const recurs = this.inRecursive.pop(); + if (recurs) { + if (recurs.type === Type.array) { + buf2.push([93]); + } else if (recurs.type === Type.map) { + buf2.push([125]); + } else { + throw new Error("Unexpected recursive type; this should not happen!"); + } + return; + } + throw new Error("Unexpected break; this should not happen!"); + } + if (token.value === void 0) { + throw new Error(`${encodeErrPrefix} unsupported type: undefined`); + } + this.prefix(buf2); + if (token.type.name === "true") { + buf2.push([116, 114, 117, 101]); + return; + } else if (token.type.name === "false") { + buf2.push([102, 97, 108, 115, 101]); + return; + } else if (token.type.name === "null") { + buf2.push([110, 117, 108, 108]); + return; + } + const is2 = String(token.value); + const isa = []; + let dp = false; + for (let i = 0; i < is2.length; i++) { + isa[i] = is2.charCodeAt(i); + if (!dp && (isa[i] === 46 || isa[i] === 101 || isa[i] === 69)) { + dp = true; + } + } + if (!dp) { + isa.push(46); + isa.push(48); + } + buf2.push(isa); + } +}; + +// node_modules/cborg/lib/json/decode.js +var Tokenizer = class { + /** + * @param {Uint8Array} data + * @param {DecodeOptions} options + */ + constructor(data, options = {}) { + this._pos = 0; + this.data = data; + this.options = options; + this.modeStack = ["value"]; + this.lastToken = ""; + } + pos() { + return this._pos; + } + /** + * @returns {boolean} + */ + done() { + return this._pos >= this.data.length; + } + /** + * @returns {number} + */ + ch() { + return this.data[this._pos]; + } + /** + * @returns {string} + */ + currentMode() { + return this.modeStack[this.modeStack.length - 1]; + } + skipWhitespace() { + let c = this.ch(); + while (c === 32 || c === 9 || c === 13 || c === 10) { + c = this.data[++this._pos]; + } + } + /** + * @param {number[]} str + */ + expect(str) { + if (this.data.length - this._pos < str.length) { + throw new Error(`${decodeErrPrefix} unexpected end of input at position ${this._pos}`); + } + for (let i = 0; i < str.length; i++) { + if (this.data[this._pos++] !== str[i]) { + throw new Error(`${decodeErrPrefix} unexpected token at position ${this._pos}, expected to find '${String.fromCharCode(...str)}'`); + } + } + } + parseNumber() { + const startPos = this._pos; + let negative = false; + let float = false; + const swallow = (chars) => { + while (!this.done()) { + const ch = this.ch(); + if (chars.includes(ch)) { + this._pos++; + } else { + break; + } + } + }; + if (this.ch() === 45) { + negative = true; + this._pos++; + } + if (this.ch() === 48) { + this._pos++; + if (this.ch() === 46) { + this._pos++; + float = true; + } else { + return new Token(Type.uint, 0, this._pos - startPos); + } + } + swallow([48, 49, 50, 51, 52, 53, 54, 55, 56, 57]); + if (negative && this._pos === startPos + 1) { + throw new Error(`${decodeErrPrefix} unexpected token at position ${this._pos}`); + } + if (!this.done() && this.ch() === 46) { + if (float) { + throw new Error(`${decodeErrPrefix} unexpected token at position ${this._pos}`); + } + float = true; + this._pos++; + swallow([48, 49, 50, 51, 52, 53, 54, 55, 56, 57]); + } + if (!this.done() && (this.ch() === 101 || this.ch() === 69)) { + float = true; + this._pos++; + if (!this.done() && (this.ch() === 43 || this.ch() === 45)) { + this._pos++; + } + swallow([48, 49, 50, 51, 52, 53, 54, 55, 56, 57]); + } + const numStr = String.fromCharCode.apply(null, this.data.subarray(startPos, this._pos)); + const num = parseFloat(numStr); + if (float) { + return new Token(Type.float, num, this._pos - startPos); + } + if (this.options.allowBigInt !== true || Number.isSafeInteger(num)) { + return new Token(num >= 0 ? Type.uint : Type.negint, num, this._pos - startPos); + } + return new Token(num >= 0 ? Type.uint : Type.negint, BigInt(numStr), this._pos - startPos); + } + /** + * @returns {Token} + */ + parseString() { + if (this.ch() !== 34) { + throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}; this shouldn't happen`); + } + this._pos++; + for (let i = this._pos, l = 0; i < this.data.length && l < 65536; i++, l++) { + const ch = this.data[i]; + if (ch === 92 || ch < 32 || ch >= 128) { + break; + } + if (ch === 34) { + const str = String.fromCharCode.apply(null, this.data.subarray(this._pos, i)); + this._pos = i + 1; + return new Token(Type.string, str, l); + } + } + const startPos = this._pos; + const chars = []; + const readu4 = () => { + if (this._pos + 4 >= this.data.length) { + throw new Error(`${decodeErrPrefix} unexpected end of unicode escape sequence at position ${this._pos}`); + } + let u4 = 0; + for (let i = 0; i < 4; i++) { + let ch = this.ch(); + if (ch >= 48 && ch <= 57) { + ch -= 48; + } else if (ch >= 97 && ch <= 102) { + ch = ch - 97 + 10; + } else if (ch >= 65 && ch <= 70) { + ch = ch - 65 + 10; + } else { + throw new Error(`${decodeErrPrefix} unexpected unicode escape character at position ${this._pos}`); + } + u4 = u4 * 16 + ch; + this._pos++; + } + return u4; + }; + const readUtf8Char = () => { + const firstByte = this.ch(); + let codePoint = null; + let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1; + if (this._pos + bytesPerSequence > this.data.length) { + throw new Error(`${decodeErrPrefix} unexpected unicode sequence at position ${this._pos}`); + } + let secondByte, thirdByte, fourthByte, tempCodePoint; + switch (bytesPerSequence) { + case 1: + if (firstByte < 128) { + codePoint = firstByte; + } + break; + case 2: + secondByte = this.data[this._pos + 1]; + if ((secondByte & 192) === 128) { + tempCodePoint = (firstByte & 31) << 6 | secondByte & 63; + if (tempCodePoint > 127) { + codePoint = tempCodePoint; + } + } + break; + case 3: + secondByte = this.data[this._pos + 1]; + thirdByte = this.data[this._pos + 2]; + if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) { + tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63; + if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) { + codePoint = tempCodePoint; + } + } + break; + case 4: + secondByte = this.data[this._pos + 1]; + thirdByte = this.data[this._pos + 2]; + fourthByte = this.data[this._pos + 3]; + if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) { + tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63; + if (tempCodePoint > 65535 && tempCodePoint < 1114112) { + codePoint = tempCodePoint; + } + } + } + if (codePoint === null) { + codePoint = 65533; + bytesPerSequence = 1; + } else if (codePoint > 65535) { + codePoint -= 65536; + chars.push(codePoint >>> 10 & 1023 | 55296); + codePoint = 56320 | codePoint & 1023; + } + chars.push(codePoint); + this._pos += bytesPerSequence; + }; + while (!this.done()) { + const ch = this.ch(); + let ch1; + switch (ch) { + case 92: + this._pos++; + if (this.done()) { + throw new Error(`${decodeErrPrefix} unexpected string termination at position ${this._pos}`); + } + ch1 = this.ch(); + this._pos++; + switch (ch1) { + case 34: + case 39: + case 92: + case 47: + chars.push(ch1); + break; + case 98: + chars.push(8); + break; + case 116: + chars.push(9); + break; + case 110: + chars.push(10); + break; + case 102: + chars.push(12); + break; + case 114: + chars.push(13); + break; + case 117: + chars.push(readu4()); + break; + default: + throw new Error(`${decodeErrPrefix} unexpected string escape character at position ${this._pos}`); + } + break; + case 34: + this._pos++; + return new Token(Type.string, decodeCodePointsArray(chars), this._pos - startPos); + default: + if (ch < 32) { + throw new Error(`${decodeErrPrefix} invalid control character at position ${this._pos}`); + } else if (ch < 128) { + chars.push(ch); + this._pos++; + } else { + readUtf8Char(); + } + } + } + throw new Error(`${decodeErrPrefix} unexpected end of string at position ${this._pos}`); + } + /** + * @returns {Token} + */ + parseValue() { + switch (this.ch()) { + case 123: + this.modeStack.push("obj-start"); + this._pos++; + return new Token(Type.map, Infinity, 1); + case 91: + this.modeStack.push("array-start"); + this._pos++; + return new Token(Type.array, Infinity, 1); + case 34: { + return this.parseString(); + } + case 110: + this.expect([110, 117, 108, 108]); + return new Token(Type.null, null, 4); + case 102: + this.expect([102, 97, 108, 115, 101]); + return new Token(Type.false, false, 5); + case 116: + this.expect([116, 114, 117, 101]); + return new Token(Type.true, true, 4); + case 45: + case 48: + case 49: + case 50: + case 51: + case 52: + case 53: + case 54: + case 55: + case 56: + case 57: + return this.parseNumber(); + default: + throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}`); + } + } + /** + * @returns {Token} + */ + next() { + this.skipWhitespace(); + switch (this.currentMode()) { + case "value": + this.modeStack.pop(); + return this.parseValue(); + case "array-value": { + this.modeStack.pop(); + if (this.ch() === 93) { + this._pos++; + this.skipWhitespace(); + return new Token(Type.break, void 0, 1); + } + if (this.ch() !== 44) { + throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}, was expecting array delimiter but found '${String.fromCharCode(this.ch())}'`); + } + this._pos++; + this.modeStack.push("array-value"); + this.skipWhitespace(); + return this.parseValue(); + } + case "array-start": { + this.modeStack.pop(); + if (this.ch() === 93) { + this._pos++; + this.skipWhitespace(); + return new Token(Type.break, void 0, 1); + } + this.modeStack.push("array-value"); + this.skipWhitespace(); + return this.parseValue(); + } + case "obj-key": + if (this.ch() === 125) { + this.modeStack.pop(); + this._pos++; + this.skipWhitespace(); + return new Token(Type.break, void 0, 1); + } + if (this.ch() !== 44) { + throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}, was expecting object delimiter but found '${String.fromCharCode(this.ch())}'`); + } + this._pos++; + this.skipWhitespace(); + case "obj-start": { + this.modeStack.pop(); + if (this.ch() === 125) { + this._pos++; + this.skipWhitespace(); + return new Token(Type.break, void 0, 1); + } + const token = this.parseString(); + this.skipWhitespace(); + if (this.ch() !== 58) { + throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}, was expecting key/value delimiter ':' but found '${String.fromCharCode(this.ch())}'`); + } + this._pos++; + this.modeStack.push("obj-value"); + return token; + } + case "obj-value": { + this.modeStack.pop(); + this.modeStack.push("obj-key"); + this.skipWhitespace(); + return this.parseValue(); + } + default: + throw new Error(`${decodeErrPrefix} unexpected parse state at position ${this._pos}; this shouldn't happen`); + } + } +}; +function decode12(data, options) { + options = Object.assign({ tokenizer: new Tokenizer(data, options) }, options); + return decode(data, options); +} + +// node_modules/multiformats/src/bases/base64.js +var base64_exports = {}; +__export(base64_exports, { + base64: () => base64, + base64pad: () => base64pad, + base64url: () => base64url, + base64urlpad: () => base64urlpad +}); +var base64 = rfc46482({ + prefix: "m", + name: "base64", + alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/", + bitsPerChar: 6 +}); +var base64pad = rfc46482({ + prefix: "M", + name: "base64pad", + alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=", + bitsPerChar: 6 +}); +var base64url = rfc46482({ + prefix: "u", + name: "base64url", + alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_", + bitsPerChar: 6 +}); +var base64urlpad = rfc46482({ + prefix: "U", + name: "base64urlpad", + alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_=", + bitsPerChar: 6 +}); + +// node_modules/multiformats/src/codecs/raw.js +var raw_exports = {}; +__export(raw_exports, { + code: () => code3, + decode: () => decode13, + encode: () => encode9, + name: () => name2 +}); +var name2 = "raw"; +var code3 = 85; +var encode9 = (node) => coerce2(node); +var decode13 = (data) => coerce2(data); + +// node_modules/@helia/car/dist/src/utils/dag-walkers.js +var dagPbWalker = { + codec: code2, + async *walk(block) { + const node = decode11(block); + yield* node.Links.map((l) => l.Hash); + } +}; +var rawWalker = { + codec: code3, + async *walk() { + } +}; +var CID_TAG2 = 42; +var cborWalker = { + codec: 113, + async *walk(block) { + const cids = []; + const tags = []; + tags[CID_TAG2] = (bytes) => { + if (bytes[0] !== 0) { + throw new Error("Invalid CID for CBOR tag 42; expected leading 0x00"); + } + const cid = CID2.decode(bytes.subarray(1)); + cids.push(cid); + return cid; + }; + decode(block, { + tags + }); + yield* cids; + } +}; +var DagJsonTokenizer = class extends Tokenizer { + tokenBuffer; + constructor(data, options) { + super(data, options); + this.tokenBuffer = []; + } + done() { + return this.tokenBuffer.length === 0 && super.done(); + } + _next() { + if (this.tokenBuffer.length > 0) { + return this.tokenBuffer.pop(); + } + return super.next(); + } + /** + * Implements rules outlined in https://github.com/ipld/specs/pull/356 + */ + next() { + const token = this._next(); + if (token.type === Type.map) { + const keyToken = this._next(); + if (keyToken.type === Type.string && keyToken.value === "/") { + const valueToken = this._next(); + if (valueToken.type === Type.string) { + const breakToken = this._next(); + if (breakToken.type !== Type.break) { + throw new Error("Invalid encoded CID form"); + } + this.tokenBuffer.push(valueToken); + return new Token(Type.tag, 42, 0); + } + if (valueToken.type === Type.map) { + const innerKeyToken = this._next(); + if (innerKeyToken.type === Type.string && innerKeyToken.value === "bytes") { + const innerValueToken = this._next(); + if (innerValueToken.type === Type.string) { + for (let i = 0; i < 2; i++) { + const breakToken = this._next(); + if (breakToken.type !== Type.break) { + throw new Error("Invalid encoded Bytes form"); + } + } + const bytes = base64.decode(`m${innerValueToken.value}`); + return new Token(Type.bytes, bytes, innerValueToken.value.length); + } + this.tokenBuffer.push(innerValueToken); + } + this.tokenBuffer.push(innerKeyToken); + } + this.tokenBuffer.push(valueToken); + } + this.tokenBuffer.push(keyToken); + } + return token; + } +}; +var jsonWalker = { + codec: 297, + async *walk(block) { + const cids = []; + const tags = []; + tags[CID_TAG2] = (string2) => { + const cid = CID2.parse(string2); + cids.push(cid); + return cid; + }; + decode12(block, { + tags, + tokenizer: new DagJsonTokenizer(block, { + tags, + allowIndefinite: true, + allowUndefined: true, + allowNaN: true, + allowInfinity: true, + allowBigInt: true, + strict: false, + rejectDuplicateMapKeys: false + }) + }); + yield* cids; + } +}; + +// node_modules/@helia/car/dist/src/index.js +var DEFAULT_DAG_WALKERS = [ + rawWalker, + dagPbWalker, + cborWalker, + jsonWalker +]; +var DAG_WALK_QUEUE_CONCURRENCY = 1; +var DefaultCar = class { + components; + dagWalkers; + constructor(components, init) { + this.components = components; + this.dagWalkers = {}; + [...DEFAULT_DAG_WALKERS, ...init.dagWalkers ?? []].forEach((dagWalker) => { + this.dagWalkers[dagWalker.codec] = dagWalker; + }); + } + async import(reader, options) { + await src_default(this.components.blockstore.putMany(src_default3(reader.blocks(), ({ cid, bytes }) => ({ cid, block: bytes })), options)); + } + async export(root, writer, options) { + const deferred = pDefer(); + const roots = Array.isArray(root) ? root : [root]; + const queue = new dist_default({ + concurrency: DAG_WALK_QUEUE_CONCURRENCY + }); + queue.on("idle", () => { + deferred.resolve(); + }); + queue.on("error", (err) => { + deferred.resolve(err); + }); + for (const root2 of roots) { + void queue.add(async () => { + await this.#walkDag(root2, queue, async (cid, bytes) => { + await writer.put({ cid, bytes }); + }, options); + }); + } + try { + await deferred.promise; + } finally { + await writer.close(); + } + } + /** + * Walk the DAG behind the passed CID, ensure all blocks are present in the blockstore + * and update the pin count for them + */ + async #walkDag(cid, queue, withBlock, options) { + const dagWalker = this.dagWalkers[cid.code]; + if (dagWalker == null) { + throw new Error(`No dag walker found for cid codec ${cid.code}`); + } + const block = await this.components.blockstore.get(cid, options); + await withBlock(cid, block); + for await (const cid2 of dagWalker.walk(block)) { + void queue.add(async () => { + await this.#walkDag(cid2, queue, withBlock, options); + }); + } + } +}; +function car(helia, init = {}) { + return new DefaultCar(helia, init); +} + +// node_modules/ipfs-unixfs-importer/dist/src/index.js +var import_err_code4 = __toESM(require_err_code(), 1); + +// node_modules/it-first/dist/src/index.js +function isAsyncIterable3(thing) { + return thing[Symbol.asyncIterator] != null; +} +function first(source) { + if (isAsyncIterable3(source)) { + return (async () => { + for await (const entry of source) { + return entry; + } + return void 0; + })(); + } + for (const entry of source) { + return entry; + } + return void 0; +} +var src_default4 = first; + +// node_modules/it-batch/dist/src/index.js +function isAsyncIterable4(thing) { + return thing[Symbol.asyncIterator] != null; +} +function batch(source, size = 1) { + size = Number(size); + if (isAsyncIterable4(source)) { + return async function* () { + let things = []; + if (size < 1) { + size = 1; + } + if (size !== Math.round(size)) { + throw new Error("Batch size must be an integer"); + } + for await (const thing of source) { + things.push(thing); + while (things.length >= size) { + yield things.slice(0, size); + things = things.slice(size); + } + } + while (things.length > 0) { + yield things.slice(0, size); + things = things.slice(size); + } + }(); + } + return function* () { + let things = []; + if (size < 1) { + size = 1; + } + if (size !== Math.round(size)) { + throw new Error("Batch size must be an integer"); + } + for (const thing of source) { + things.push(thing); + while (things.length >= size) { + yield things.slice(0, size); + things = things.slice(size); + } + } + while (things.length > 0) { + yield things.slice(0, size); + things = things.slice(size); + } + }(); +} +var src_default5 = batch; + +// node_modules/it-parallel-batch/dist/src/index.js +async function* parallelBatch(source, size = 1) { + for await (const tasks of src_default5(source, size)) { + const things = tasks.map(async (p) => { + return p().then((value) => ({ ok: true, value }), (err) => ({ ok: false, err })); + }); + for (let i = 0; i < things.length; i++) { + const result = await things[i]; + if (result.ok) { + yield result.value; + } else { + throw result.err; + } + } + } +} + +// node_modules/uint8arrays/dist/src/util/as-uint8array.js +function asUint8Array(buf2) { + if (globalThis.Buffer != null) { + return new Uint8Array(buf2.buffer, buf2.byteOffset, buf2.byteLength); + } + return buf2; +} + +// node_modules/uint8arrays/dist/src/alloc.js +function alloc2(size = 0) { + var _a; + if (((_a = globalThis.Buffer) == null ? void 0 : _a.alloc) != null) { + return asUint8Array(globalThis.Buffer.alloc(size)); + } + return new Uint8Array(size); +} +function allocUnsafe(size = 0) { + var _a; + if (((_a = globalThis.Buffer) == null ? void 0 : _a.allocUnsafe) != null) { + return asUint8Array(globalThis.Buffer.allocUnsafe(size)); + } + return new Uint8Array(size); +} + +// node_modules/uint8arrays/dist/src/concat.js +function concat2(arrays, length4) { + if (length4 == null) { + length4 = arrays.reduce((acc, curr) => acc + curr.length, 0); + } + const output = allocUnsafe(length4); + let offset = 0; + for (const arr of arrays) { + output.set(arr, offset); + offset += arr.length; + } + return asUint8Array(output); +} + +// node_modules/uint8arrays/dist/src/equals.js +function equals5(a, b) { + if (a === b) { + return true; + } + if (a.byteLength !== b.byteLength) { + return false; + } + for (let i = 0; i < a.byteLength; i++) { + if (a[i] !== b[i]) { + return false; + } + } + return true; +} + +// node_modules/uint8arraylist/dist/src/index.js +var symbol = Symbol.for("@achingbrain/uint8arraylist"); +function findBufAndOffset(bufs, index) { + if (index == null || index < 0) { + throw new RangeError("index is out of bounds"); + } + let offset = 0; + for (const buf2 of bufs) { + const bufEnd = offset + buf2.byteLength; + if (index < bufEnd) { + return { + buf: buf2, + index: index - offset + }; + } + offset = bufEnd; + } + throw new RangeError("index is out of bounds"); +} +function isUint8ArrayList(value) { + return Boolean(value == null ? void 0 : value[symbol]); +} +var Uint8ArrayList = class _Uint8ArrayList { + bufs; + length; + [symbol] = true; + constructor(...data) { + this.bufs = []; + this.length = 0; + if (data.length > 0) { + this.appendAll(data); + } + } + *[Symbol.iterator]() { + yield* this.bufs; + } + get byteLength() { + return this.length; + } + /** + * Add one or more `bufs` to the end of this Uint8ArrayList + */ + append(...bufs) { + this.appendAll(bufs); + } + /** + * Add all `bufs` to the end of this Uint8ArrayList + */ + appendAll(bufs) { + let length4 = 0; + for (const buf2 of bufs) { + if (buf2 instanceof Uint8Array) { + length4 += buf2.byteLength; + this.bufs.push(buf2); + } else if (isUint8ArrayList(buf2)) { + length4 += buf2.byteLength; + this.bufs.push(...buf2.bufs); + } else { + throw new Error("Could not append value, must be an Uint8Array or a Uint8ArrayList"); + } + } + this.length += length4; + } + /** + * Add one or more `bufs` to the start of this Uint8ArrayList + */ + prepend(...bufs) { + this.prependAll(bufs); + } + /** + * Add all `bufs` to the start of this Uint8ArrayList + */ + prependAll(bufs) { + let length4 = 0; + for (const buf2 of bufs.reverse()) { + if (buf2 instanceof Uint8Array) { + length4 += buf2.byteLength; + this.bufs.unshift(buf2); + } else if (isUint8ArrayList(buf2)) { + length4 += buf2.byteLength; + this.bufs.unshift(...buf2.bufs); + } else { + throw new Error("Could not prepend value, must be an Uint8Array or a Uint8ArrayList"); + } + } + this.length += length4; + } + /** + * Read the value at `index` + */ + get(index) { + const res = findBufAndOffset(this.bufs, index); + return res.buf[res.index]; + } + /** + * Set the value at `index` to `value` + */ + set(index, value) { + const res = findBufAndOffset(this.bufs, index); + res.buf[res.index] = value; + } + /** + * Copy bytes from `buf` to the index specified by `offset` + */ + write(buf2, offset = 0) { + if (buf2 instanceof Uint8Array) { + for (let i = 0; i < buf2.length; i++) { + this.set(offset + i, buf2[i]); + } + } else if (isUint8ArrayList(buf2)) { + for (let i = 0; i < buf2.length; i++) { + this.set(offset + i, buf2.get(i)); + } + } else { + throw new Error("Could not write value, must be an Uint8Array or a Uint8ArrayList"); + } + } + /** + * Remove bytes from the front of the pool + */ + consume(bytes) { + bytes = Math.trunc(bytes); + if (Number.isNaN(bytes) || bytes <= 0) { + return; + } + if (bytes === this.byteLength) { + this.bufs = []; + this.length = 0; + return; + } + while (this.bufs.length > 0) { + if (bytes >= this.bufs[0].byteLength) { + bytes -= this.bufs[0].byteLength; + this.length -= this.bufs[0].byteLength; + this.bufs.shift(); + } else { + this.bufs[0] = this.bufs[0].subarray(bytes); + this.length -= bytes; + break; + } + } + } + /** + * Extracts a section of an array and returns a new array. + * + * This is a copy operation as it is with Uint8Arrays and Arrays + * - note this is different to the behaviour of Node Buffers. + */ + slice(beginInclusive, endExclusive) { + const { bufs, length: length4 } = this._subList(beginInclusive, endExclusive); + return concat2(bufs, length4); + } + /** + * Returns a alloc from the given start and end element index. + * + * In the best case where the data extracted comes from a single Uint8Array + * internally this is a no-copy operation otherwise it is a copy operation. + */ + subarray(beginInclusive, endExclusive) { + const { bufs, length: length4 } = this._subList(beginInclusive, endExclusive); + if (bufs.length === 1) { + return bufs[0]; + } + return concat2(bufs, length4); + } + /** + * Returns a allocList from the given start and end element index. + * + * This is a no-copy operation. + */ + sublist(beginInclusive, endExclusive) { + const { bufs, length: length4 } = this._subList(beginInclusive, endExclusive); + const list = new _Uint8ArrayList(); + list.length = length4; + list.bufs = [...bufs]; + return list; + } + _subList(beginInclusive, endExclusive) { + beginInclusive = beginInclusive ?? 0; + endExclusive = endExclusive ?? this.length; + if (beginInclusive < 0) { + beginInclusive = this.length + beginInclusive; + } + if (endExclusive < 0) { + endExclusive = this.length + endExclusive; + } + if (beginInclusive < 0 || endExclusive > this.length) { + throw new RangeError("index is out of bounds"); + } + if (beginInclusive === endExclusive) { + return { bufs: [], length: 0 }; + } + if (beginInclusive === 0 && endExclusive === this.length) { + return { bufs: this.bufs, length: this.length }; + } + const bufs = []; + let offset = 0; + for (let i = 0; i < this.bufs.length; i++) { + const buf2 = this.bufs[i]; + const bufStart = offset; + const bufEnd = bufStart + buf2.byteLength; + offset = bufEnd; + if (beginInclusive >= bufEnd) { + continue; + } + const sliceStartInBuf = beginInclusive >= bufStart && beginInclusive < bufEnd; + const sliceEndsInBuf = endExclusive > bufStart && endExclusive <= bufEnd; + if (sliceStartInBuf && sliceEndsInBuf) { + if (beginInclusive === bufStart && endExclusive === bufEnd) { + bufs.push(buf2); + break; + } + const start = beginInclusive - bufStart; + bufs.push(buf2.subarray(start, start + (endExclusive - beginInclusive))); + break; + } + if (sliceStartInBuf) { + if (beginInclusive === 0) { + bufs.push(buf2); + continue; + } + bufs.push(buf2.subarray(beginInclusive - bufStart)); + continue; + } + if (sliceEndsInBuf) { + if (endExclusive === bufEnd) { + bufs.push(buf2); + break; + } + bufs.push(buf2.subarray(0, endExclusive - bufStart)); + break; + } + bufs.push(buf2); + } + return { bufs, length: endExclusive - beginInclusive }; + } + indexOf(search, offset = 0) { + if (!isUint8ArrayList(search) && !(search instanceof Uint8Array)) { + throw new TypeError('The "value" argument must be a Uint8ArrayList or Uint8Array'); + } + const needle = search instanceof Uint8Array ? search : search.subarray(); + offset = Number(offset ?? 0); + if (isNaN(offset)) { + offset = 0; + } + if (offset < 0) { + offset = this.length + offset; + } + if (offset < 0) { + offset = 0; + } + if (search.length === 0) { + return offset > this.length ? this.length : offset; + } + const M = needle.byteLength; + if (M === 0) { + throw new TypeError("search must be at least 1 byte long"); + } + const radix = 256; + const rightmostPositions = new Int32Array(radix); + for (let c = 0; c < radix; c++) { + rightmostPositions[c] = -1; + } + for (let j = 0; j < M; j++) { + rightmostPositions[needle[j]] = j; + } + const right = rightmostPositions; + const lastIndex = this.byteLength - needle.byteLength; + const lastPatIndex = needle.byteLength - 1; + let skip; + for (let i = offset; i <= lastIndex; i += skip) { + skip = 0; + for (let j = lastPatIndex; j >= 0; j--) { + const char = this.get(i + j); + if (needle[j] !== char) { + skip = Math.max(1, j - right[char]); + break; + } + } + if (skip === 0) { + return i; + } + } + return -1; + } + getInt8(byteOffset) { + const buf2 = this.subarray(byteOffset, byteOffset + 1); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + return view.getInt8(0); + } + setInt8(byteOffset, value) { + const buf2 = allocUnsafe(1); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + view.setInt8(0, value); + this.write(buf2, byteOffset); + } + getInt16(byteOffset, littleEndian) { + const buf2 = this.subarray(byteOffset, byteOffset + 2); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + return view.getInt16(0, littleEndian); + } + setInt16(byteOffset, value, littleEndian) { + const buf2 = alloc2(2); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + view.setInt16(0, value, littleEndian); + this.write(buf2, byteOffset); + } + getInt32(byteOffset, littleEndian) { + const buf2 = this.subarray(byteOffset, byteOffset + 4); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + return view.getInt32(0, littleEndian); + } + setInt32(byteOffset, value, littleEndian) { + const buf2 = alloc2(4); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + view.setInt32(0, value, littleEndian); + this.write(buf2, byteOffset); + } + getBigInt64(byteOffset, littleEndian) { + const buf2 = this.subarray(byteOffset, byteOffset + 8); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + return view.getBigInt64(0, littleEndian); + } + setBigInt64(byteOffset, value, littleEndian) { + const buf2 = alloc2(8); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + view.setBigInt64(0, value, littleEndian); + this.write(buf2, byteOffset); + } + getUint8(byteOffset) { + const buf2 = this.subarray(byteOffset, byteOffset + 1); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + return view.getUint8(0); + } + setUint8(byteOffset, value) { + const buf2 = allocUnsafe(1); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + view.setUint8(0, value); + this.write(buf2, byteOffset); + } + getUint16(byteOffset, littleEndian) { + const buf2 = this.subarray(byteOffset, byteOffset + 2); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + return view.getUint16(0, littleEndian); + } + setUint16(byteOffset, value, littleEndian) { + const buf2 = alloc2(2); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + view.setUint16(0, value, littleEndian); + this.write(buf2, byteOffset); + } + getUint32(byteOffset, littleEndian) { + const buf2 = this.subarray(byteOffset, byteOffset + 4); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + return view.getUint32(0, littleEndian); + } + setUint32(byteOffset, value, littleEndian) { + const buf2 = alloc2(4); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + view.setUint32(0, value, littleEndian); + this.write(buf2, byteOffset); + } + getBigUint64(byteOffset, littleEndian) { + const buf2 = this.subarray(byteOffset, byteOffset + 8); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + return view.getBigUint64(0, littleEndian); + } + setBigUint64(byteOffset, value, littleEndian) { + const buf2 = alloc2(8); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + view.setBigUint64(0, value, littleEndian); + this.write(buf2, byteOffset); + } + getFloat32(byteOffset, littleEndian) { + const buf2 = this.subarray(byteOffset, byteOffset + 4); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + return view.getFloat32(0, littleEndian); + } + setFloat32(byteOffset, value, littleEndian) { + const buf2 = alloc2(4); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + view.setFloat32(0, value, littleEndian); + this.write(buf2, byteOffset); + } + getFloat64(byteOffset, littleEndian) { + const buf2 = this.subarray(byteOffset, byteOffset + 8); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + return view.getFloat64(0, littleEndian); + } + setFloat64(byteOffset, value, littleEndian) { + const buf2 = alloc2(8); + const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength); + view.setFloat64(0, value, littleEndian); + this.write(buf2, byteOffset); + } + equals(other) { + if (other == null) { + return false; + } + if (!(other instanceof _Uint8ArrayList)) { + return false; + } + if (other.bufs.length !== this.bufs.length) { + return false; + } + for (let i = 0; i < this.bufs.length; i++) { + if (!equals5(this.bufs[i], other.bufs[i])) { + return false; + } + } + return true; + } + /** + * Create a Uint8ArrayList from a pre-existing list of Uint8Arrays. Use this + * method if you know the total size of all the Uint8Arrays ahead of time. + */ + static fromUint8Arrays(bufs, length4) { + const list = new _Uint8ArrayList(); + list.bufs = bufs; + if (length4 == null) { + length4 = bufs.reduce((acc, curr) => acc + curr.byteLength, 0); + } + list.length = length4; + return list; + } +}; + +// node_modules/ipfs-unixfs-importer/dist/src/chunker/fixed-size.js +var DEFAULT_CHUNK_SIZE = 262144; +var fixedSize = (options = {}) => { + const chunkSize = options.chunkSize ?? DEFAULT_CHUNK_SIZE; + return async function* fixedSizeChunker(source) { + let list = new Uint8ArrayList(); + let currentLength = 0; + let emitted = false; + for await (const buffer2 of source) { + list.append(buffer2); + currentLength += buffer2.length; + while (currentLength >= chunkSize) { + yield list.slice(0, chunkSize); + emitted = true; + if (chunkSize === list.length) { + list = new Uint8ArrayList(); + currentLength = 0; + } else { + const newBl = new Uint8ArrayList(); + newBl.append(list.sublist(chunkSize)); + list = newBl; + currentLength -= chunkSize; + } + } + } + if (!emitted || currentLength > 0) { + yield list.subarray(0, currentLength); + } + }; +}; + +// node_modules/ipfs-unixfs/dist/src/index.js +var import_err_code = __toESM(require_err_code(), 1); + +// node_modules/protons-runtime/dist/src/utils/float.js +var f32 = new Float32Array([-0]); +var f8b = new Uint8Array(f32.buffer); +function writeFloatLE(val, buf2, pos) { + f32[0] = val; + buf2[pos] = f8b[0]; + buf2[pos + 1] = f8b[1]; + buf2[pos + 2] = f8b[2]; + buf2[pos + 3] = f8b[3]; +} +function readFloatLE(buf2, pos) { + f8b[0] = buf2[pos]; + f8b[1] = buf2[pos + 1]; + f8b[2] = buf2[pos + 2]; + f8b[3] = buf2[pos + 3]; + return f32[0]; +} +var f64 = new Float64Array([-0]); +var d8b = new Uint8Array(f64.buffer); +function writeDoubleLE(val, buf2, pos) { + f64[0] = val; + buf2[pos] = d8b[0]; + buf2[pos + 1] = d8b[1]; + buf2[pos + 2] = d8b[2]; + buf2[pos + 3] = d8b[3]; + buf2[pos + 4] = d8b[4]; + buf2[pos + 5] = d8b[5]; + buf2[pos + 6] = d8b[6]; + buf2[pos + 7] = d8b[7]; +} +function readDoubleLE(buf2, pos) { + d8b[0] = buf2[pos]; + d8b[1] = buf2[pos + 1]; + d8b[2] = buf2[pos + 2]; + d8b[3] = buf2[pos + 3]; + d8b[4] = buf2[pos + 4]; + d8b[5] = buf2[pos + 5]; + d8b[6] = buf2[pos + 6]; + d8b[7] = buf2[pos + 7]; + return f64[0]; +} + +// node_modules/protons-runtime/dist/src/utils/longbits.js +var MAX_SAFE_NUMBER_INTEGER = BigInt(Number.MAX_SAFE_INTEGER); +var MIN_SAFE_NUMBER_INTEGER = BigInt(Number.MIN_SAFE_INTEGER); +var LongBits = class _LongBits { + lo; + hi; + constructor(lo, hi) { + this.lo = lo | 0; + this.hi = hi | 0; + } + /** + * Converts this long bits to a possibly unsafe JavaScript number + */ + toNumber(unsigned = false) { + if (!unsigned && this.hi >>> 31 > 0) { + const lo = ~this.lo + 1 >>> 0; + let hi = ~this.hi >>> 0; + if (lo === 0) { + hi = hi + 1 >>> 0; + } + return -(lo + hi * 4294967296); + } + return this.lo + this.hi * 4294967296; + } + /** + * Converts this long bits to a bigint + */ + toBigInt(unsigned = false) { + if (unsigned) { + return BigInt(this.lo >>> 0) + (BigInt(this.hi >>> 0) << 32n); + } + if (this.hi >>> 31 !== 0) { + const lo = ~this.lo + 1 >>> 0; + let hi = ~this.hi >>> 0; + if (lo === 0) { + hi = hi + 1 >>> 0; + } + return -(BigInt(lo) + (BigInt(hi) << 32n)); + } + return BigInt(this.lo >>> 0) + (BigInt(this.hi >>> 0) << 32n); + } + /** + * Converts this long bits to a string + */ + toString(unsigned = false) { + return this.toBigInt(unsigned).toString(); + } + /** + * Zig-zag encodes this long bits + */ + zzEncode() { + const mask = this.hi >> 31; + this.hi = ((this.hi << 1 | this.lo >>> 31) ^ mask) >>> 0; + this.lo = (this.lo << 1 ^ mask) >>> 0; + return this; + } + /** + * Zig-zag decodes this long bits + */ + zzDecode() { + const mask = -(this.lo & 1); + this.lo = ((this.lo >>> 1 | this.hi << 31) ^ mask) >>> 0; + this.hi = (this.hi >>> 1 ^ mask) >>> 0; + return this; + } + /** + * Calculates the length of this longbits when encoded as a varint. + */ + length() { + const part0 = this.lo; + const part1 = (this.lo >>> 28 | this.hi << 4) >>> 0; + const part2 = this.hi >>> 24; + return part2 === 0 ? part1 === 0 ? part0 < 16384 ? part0 < 128 ? 1 : 2 : part0 < 2097152 ? 3 : 4 : part1 < 16384 ? part1 < 128 ? 5 : 6 : part1 < 2097152 ? 7 : 8 : part2 < 128 ? 9 : 10; + } + /** + * Constructs new long bits from the specified number + */ + static fromBigInt(value) { + if (value === 0n) { + return zero; + } + if (value < MAX_SAFE_NUMBER_INTEGER && value > MIN_SAFE_NUMBER_INTEGER) { + return this.fromNumber(Number(value)); + } + const negative = value < 0n; + if (negative) { + value = -value; + } + let hi = value >> 32n; + let lo = value - (hi << 32n); + if (negative) { + hi = ~hi | 0n; + lo = ~lo | 0n; + if (++lo > TWO_32) { + lo = 0n; + if (++hi > TWO_32) { + hi = 0n; + } + } + } + return new _LongBits(Number(lo), Number(hi)); + } + /** + * Constructs new long bits from the specified number + */ + static fromNumber(value) { + if (value === 0) { + return zero; + } + const sign = value < 0; + if (sign) { + value = -value; + } + let lo = value >>> 0; + let hi = (value - lo) / 4294967296 >>> 0; + if (sign) { + hi = ~hi >>> 0; + lo = ~lo >>> 0; + if (++lo > 4294967295) { + lo = 0; + if (++hi > 4294967295) { + hi = 0; + } + } + } + return new _LongBits(lo, hi); + } + /** + * Constructs new long bits from a number, long or string + */ + static from(value) { + if (typeof value === "number") { + return _LongBits.fromNumber(value); + } + if (typeof value === "bigint") { + return _LongBits.fromBigInt(value); + } + if (typeof value === "string") { + return _LongBits.fromBigInt(BigInt(value)); + } + return value.low != null || value.high != null ? new _LongBits(value.low >>> 0, value.high >>> 0) : zero; + } +}; +var zero = new LongBits(0, 0); +zero.toBigInt = function() { + return 0n; +}; +zero.zzEncode = zero.zzDecode = function() { + return this; +}; +zero.length = function() { + return 1; +}; +var TWO_32 = 4294967296n; + +// node_modules/protons-runtime/dist/src/utils/utf8.js +function length3(string2) { + let len = 0; + let c = 0; + for (let i = 0; i < string2.length; ++i) { + c = string2.charCodeAt(i); + if (c < 128) { + len += 1; + } else if (c < 2048) { + len += 2; + } else if ((c & 64512) === 55296 && (string2.charCodeAt(i + 1) & 64512) === 56320) { + ++i; + len += 4; + } else { + len += 3; + } + } + return len; +} +function read3(buffer2, start, end) { + const len = end - start; + if (len < 1) { + return ""; + } + let parts; + const chunk = []; + let i = 0; + let t; + while (start < end) { + t = buffer2[start++]; + if (t < 128) { + chunk[i++] = t; + } else if (t > 191 && t < 224) { + chunk[i++] = (t & 31) << 6 | buffer2[start++] & 63; + } else if (t > 239 && t < 365) { + t = ((t & 7) << 18 | (buffer2[start++] & 63) << 12 | (buffer2[start++] & 63) << 6 | buffer2[start++] & 63) - 65536; + chunk[i++] = 55296 + (t >> 10); + chunk[i++] = 56320 + (t & 1023); + } else { + chunk[i++] = (t & 15) << 12 | (buffer2[start++] & 63) << 6 | buffer2[start++] & 63; + } + if (i > 8191) { + (parts ?? (parts = [])).push(String.fromCharCode.apply(String, chunk)); + i = 0; + } + } + if (parts != null) { + if (i > 0) { + parts.push(String.fromCharCode.apply(String, chunk.slice(0, i))); + } + return parts.join(""); + } + return String.fromCharCode.apply(String, chunk.slice(0, i)); +} +function write(string2, buffer2, offset) { + const start = offset; + let c1; + let c2; + for (let i = 0; i < string2.length; ++i) { + c1 = string2.charCodeAt(i); + if (c1 < 128) { + buffer2[offset++] = c1; + } else if (c1 < 2048) { + buffer2[offset++] = c1 >> 6 | 192; + buffer2[offset++] = c1 & 63 | 128; + } else if ((c1 & 64512) === 55296 && ((c2 = string2.charCodeAt(i + 1)) & 64512) === 56320) { + c1 = 65536 + ((c1 & 1023) << 10) + (c2 & 1023); + ++i; + buffer2[offset++] = c1 >> 18 | 240; + buffer2[offset++] = c1 >> 12 & 63 | 128; + buffer2[offset++] = c1 >> 6 & 63 | 128; + buffer2[offset++] = c1 & 63 | 128; + } else { + buffer2[offset++] = c1 >> 12 | 224; + buffer2[offset++] = c1 >> 6 & 63 | 128; + buffer2[offset++] = c1 & 63 | 128; + } + } + return offset - start; +} + +// node_modules/protons-runtime/dist/src/utils/reader.js +function indexOutOfRange(reader, writeLength) { + return RangeError(`index out of range: ${reader.pos} + ${writeLength ?? 1} > ${reader.len}`); +} +function readFixed32End(buf2, end) { + return (buf2[end - 4] | buf2[end - 3] << 8 | buf2[end - 2] << 16 | buf2[end - 1] << 24) >>> 0; +} +var Uint8ArrayReader = class { + buf; + pos; + len; + _slice = Uint8Array.prototype.subarray; + constructor(buffer2) { + this.buf = buffer2; + this.pos = 0; + this.len = buffer2.length; + } + /** + * Reads a varint as an unsigned 32 bit value + */ + uint32() { + let value = 4294967295; + value = (this.buf[this.pos] & 127) >>> 0; + if (this.buf[this.pos++] < 128) + return value; + value = (value | (this.buf[this.pos] & 127) << 7) >>> 0; + if (this.buf[this.pos++] < 128) + return value; + value = (value | (this.buf[this.pos] & 127) << 14) >>> 0; + if (this.buf[this.pos++] < 128) + return value; + value = (value | (this.buf[this.pos] & 127) << 21) >>> 0; + if (this.buf[this.pos++] < 128) + return value; + value = (value | (this.buf[this.pos] & 15) << 28) >>> 0; + if (this.buf[this.pos++] < 128) + return value; + if ((this.pos += 5) > this.len) { + this.pos = this.len; + throw indexOutOfRange(this, 10); + } + return value; + } + /** + * Reads a varint as a signed 32 bit value + */ + int32() { + return this.uint32() | 0; + } + /** + * Reads a zig-zag encoded varint as a signed 32 bit value + */ + sint32() { + const value = this.uint32(); + return value >>> 1 ^ -(value & 1) | 0; + } + /** + * Reads a varint as a boolean + */ + bool() { + return this.uint32() !== 0; + } + /** + * Reads fixed 32 bits as an unsigned 32 bit integer + */ + fixed32() { + if (this.pos + 4 > this.len) { + throw indexOutOfRange(this, 4); + } + const res = readFixed32End(this.buf, this.pos += 4); + return res; + } + /** + * Reads fixed 32 bits as a signed 32 bit integer + */ + sfixed32() { + if (this.pos + 4 > this.len) { + throw indexOutOfRange(this, 4); + } + const res = readFixed32End(this.buf, this.pos += 4) | 0; + return res; + } + /** + * Reads a float (32 bit) as a number + */ + float() { + if (this.pos + 4 > this.len) { + throw indexOutOfRange(this, 4); + } + const value = readFloatLE(this.buf, this.pos); + this.pos += 4; + return value; + } + /** + * Reads a double (64 bit float) as a number + */ + double() { + if (this.pos + 8 > this.len) { + throw indexOutOfRange(this, 4); + } + const value = readDoubleLE(this.buf, this.pos); + this.pos += 8; + return value; + } + /** + * Reads a sequence of bytes preceded by its length as a varint + */ + bytes() { + const length4 = this.uint32(); + const start = this.pos; + const end = this.pos + length4; + if (end > this.len) { + throw indexOutOfRange(this, length4); + } + this.pos += length4; + return start === end ? new Uint8Array(0) : this.buf.subarray(start, end); + } + /** + * Reads a string preceded by its byte length as a varint + */ + string() { + const bytes = this.bytes(); + return read3(bytes, 0, bytes.length); + } + /** + * Skips the specified number of bytes if specified, otherwise skips a varint + */ + skip(length4) { + if (typeof length4 === "number") { + if (this.pos + length4 > this.len) { + throw indexOutOfRange(this, length4); + } + this.pos += length4; + } else { + do { + if (this.pos >= this.len) { + throw indexOutOfRange(this); + } + } while ((this.buf[this.pos++] & 128) !== 0); + } + return this; + } + /** + * Skips the next element of the specified wire type + */ + skipType(wireType) { + switch (wireType) { + case 0: + this.skip(); + break; + case 1: + this.skip(8); + break; + case 2: + this.skip(this.uint32()); + break; + case 3: + while ((wireType = this.uint32() & 7) !== 4) { + this.skipType(wireType); + } + break; + case 5: + this.skip(4); + break; + default: + throw Error(`invalid wire type ${wireType} at offset ${this.pos}`); + } + return this; + } + readLongVarint() { + const bits = new LongBits(0, 0); + let i = 0; + if (this.len - this.pos > 4) { + for (; i < 4; ++i) { + bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0; + if (this.buf[this.pos++] < 128) { + return bits; + } + } + bits.lo = (bits.lo | (this.buf[this.pos] & 127) << 28) >>> 0; + bits.hi = (bits.hi | (this.buf[this.pos] & 127) >> 4) >>> 0; + if (this.buf[this.pos++] < 128) { + return bits; + } + i = 0; + } else { + for (; i < 3; ++i) { + if (this.pos >= this.len) { + throw indexOutOfRange(this); + } + bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0; + if (this.buf[this.pos++] < 128) { + return bits; + } + } + bits.lo = (bits.lo | (this.buf[this.pos++] & 127) << i * 7) >>> 0; + return bits; + } + if (this.len - this.pos > 4) { + for (; i < 5; ++i) { + bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0; + if (this.buf[this.pos++] < 128) { + return bits; + } + } + } else { + for (; i < 5; ++i) { + if (this.pos >= this.len) { + throw indexOutOfRange(this); + } + bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0; + if (this.buf[this.pos++] < 128) { + return bits; + } + } + } + throw Error("invalid varint encoding"); + } + readFixed64() { + if (this.pos + 8 > this.len) { + throw indexOutOfRange(this, 8); + } + const lo = readFixed32End(this.buf, this.pos += 4); + const hi = readFixed32End(this.buf, this.pos += 4); + return new LongBits(lo, hi); + } + /** + * Reads a varint as a signed 64 bit value + */ + int64() { + return this.readLongVarint().toBigInt(); + } + /** + * Reads a varint as a signed 64 bit value returned as a possibly unsafe + * JavaScript number + */ + int64Number() { + return this.readLongVarint().toNumber(); + } + /** + * Reads a varint as a signed 64 bit value returned as a string + */ + int64String() { + return this.readLongVarint().toString(); + } + /** + * Reads a varint as an unsigned 64 bit value + */ + uint64() { + return this.readLongVarint().toBigInt(true); + } + /** + * Reads a varint as an unsigned 64 bit value returned as a possibly unsafe + * JavaScript number + */ + uint64Number() { + return this.readLongVarint().toNumber(true); + } + /** + * Reads a varint as an unsigned 64 bit value returned as a string + */ + uint64String() { + return this.readLongVarint().toString(true); + } + /** + * Reads a zig-zag encoded varint as a signed 64 bit value + */ + sint64() { + return this.readLongVarint().zzDecode().toBigInt(); + } + /** + * Reads a zig-zag encoded varint as a signed 64 bit value returned as a + * possibly unsafe JavaScript number + */ + sint64Number() { + return this.readLongVarint().zzDecode().toNumber(); + } + /** + * Reads a zig-zag encoded varint as a signed 64 bit value returned as a + * string + */ + sint64String() { + return this.readLongVarint().zzDecode().toString(); + } + /** + * Reads fixed 64 bits + */ + fixed64() { + return this.readFixed64().toBigInt(); + } + /** + * Reads fixed 64 bits returned as a possibly unsafe JavaScript number + */ + fixed64Number() { + return this.readFixed64().toNumber(); + } + /** + * Reads fixed 64 bits returned as a string + */ + fixed64String() { + return this.readFixed64().toString(); + } + /** + * Reads zig-zag encoded fixed 64 bits + */ + sfixed64() { + return this.readFixed64().toBigInt(); + } + /** + * Reads zig-zag encoded fixed 64 bits returned as a possibly unsafe + * JavaScript number + */ + sfixed64Number() { + return this.readFixed64().toNumber(); + } + /** + * Reads zig-zag encoded fixed 64 bits returned as a string + */ + sfixed64String() { + return this.readFixed64().toString(); + } +}; +function createReader(buf2) { + return new Uint8ArrayReader(buf2 instanceof Uint8Array ? buf2 : buf2.subarray()); +} + +// node_modules/protons-runtime/dist/src/decode.js +function decodeMessage(buf2, codec) { + const reader = createReader(buf2); + return codec.decode(reader); +} + +// node_modules/multiformats/src/bases/base10.js +var base10_exports = {}; +__export(base10_exports, { + base10: () => base10 +}); +var base10 = baseX2({ + prefix: "9", + name: "base10", + alphabet: "0123456789" +}); + +// node_modules/multiformats/src/bases/base16.js +var base16_exports = {}; +__export(base16_exports, { + base16: () => base16, + base16upper: () => base16upper +}); +var base16 = rfc46482({ + prefix: "f", + name: "base16", + alphabet: "0123456789abcdef", + bitsPerChar: 4 +}); +var base16upper = rfc46482({ + prefix: "F", + name: "base16upper", + alphabet: "0123456789ABCDEF", + bitsPerChar: 4 +}); + +// node_modules/multiformats/src/bases/base2.js +var base2_exports = {}; +__export(base2_exports, { + base2: () => base22 +}); +var base22 = rfc46482({ + prefix: "0", + name: "base2", + alphabet: "01", + bitsPerChar: 1 +}); + +// node_modules/multiformats/src/bases/base256emoji.js +var base256emoji_exports = {}; +__export(base256emoji_exports, { + base256emoji: () => base256emoji +}); +var alphabet = Array.from("\u{1F680}\u{1FA90}\u2604\u{1F6F0}\u{1F30C}\u{1F311}\u{1F312}\u{1F313}\u{1F314}\u{1F315}\u{1F316}\u{1F317}\u{1F318}\u{1F30D}\u{1F30F}\u{1F30E}\u{1F409}\u2600\u{1F4BB}\u{1F5A5}\u{1F4BE}\u{1F4BF}\u{1F602}\u2764\u{1F60D}\u{1F923}\u{1F60A}\u{1F64F}\u{1F495}\u{1F62D}\u{1F618}\u{1F44D}\u{1F605}\u{1F44F}\u{1F601}\u{1F525}\u{1F970}\u{1F494}\u{1F496}\u{1F499}\u{1F622}\u{1F914}\u{1F606}\u{1F644}\u{1F4AA}\u{1F609}\u263A\u{1F44C}\u{1F917}\u{1F49C}\u{1F614}\u{1F60E}\u{1F607}\u{1F339}\u{1F926}\u{1F389}\u{1F49E}\u270C\u2728\u{1F937}\u{1F631}\u{1F60C}\u{1F338}\u{1F64C}\u{1F60B}\u{1F497}\u{1F49A}\u{1F60F}\u{1F49B}\u{1F642}\u{1F493}\u{1F929}\u{1F604}\u{1F600}\u{1F5A4}\u{1F603}\u{1F4AF}\u{1F648}\u{1F447}\u{1F3B6}\u{1F612}\u{1F92D}\u2763\u{1F61C}\u{1F48B}\u{1F440}\u{1F62A}\u{1F611}\u{1F4A5}\u{1F64B}\u{1F61E}\u{1F629}\u{1F621}\u{1F92A}\u{1F44A}\u{1F973}\u{1F625}\u{1F924}\u{1F449}\u{1F483}\u{1F633}\u270B\u{1F61A}\u{1F61D}\u{1F634}\u{1F31F}\u{1F62C}\u{1F643}\u{1F340}\u{1F337}\u{1F63B}\u{1F613}\u2B50\u2705\u{1F97A}\u{1F308}\u{1F608}\u{1F918}\u{1F4A6}\u2714\u{1F623}\u{1F3C3}\u{1F490}\u2639\u{1F38A}\u{1F498}\u{1F620}\u261D\u{1F615}\u{1F33A}\u{1F382}\u{1F33B}\u{1F610}\u{1F595}\u{1F49D}\u{1F64A}\u{1F639}\u{1F5E3}\u{1F4AB}\u{1F480}\u{1F451}\u{1F3B5}\u{1F91E}\u{1F61B}\u{1F534}\u{1F624}\u{1F33C}\u{1F62B}\u26BD\u{1F919}\u2615\u{1F3C6}\u{1F92B}\u{1F448}\u{1F62E}\u{1F646}\u{1F37B}\u{1F343}\u{1F436}\u{1F481}\u{1F632}\u{1F33F}\u{1F9E1}\u{1F381}\u26A1\u{1F31E}\u{1F388}\u274C\u270A\u{1F44B}\u{1F630}\u{1F928}\u{1F636}\u{1F91D}\u{1F6B6}\u{1F4B0}\u{1F353}\u{1F4A2}\u{1F91F}\u{1F641}\u{1F6A8}\u{1F4A8}\u{1F92C}\u2708\u{1F380}\u{1F37A}\u{1F913}\u{1F619}\u{1F49F}\u{1F331}\u{1F616}\u{1F476}\u{1F974}\u25B6\u27A1\u2753\u{1F48E}\u{1F4B8}\u2B07\u{1F628}\u{1F31A}\u{1F98B}\u{1F637}\u{1F57A}\u26A0\u{1F645}\u{1F61F}\u{1F635}\u{1F44E}\u{1F932}\u{1F920}\u{1F927}\u{1F4CC}\u{1F535}\u{1F485}\u{1F9D0}\u{1F43E}\u{1F352}\u{1F617}\u{1F911}\u{1F30A}\u{1F92F}\u{1F437}\u260E\u{1F4A7}\u{1F62F}\u{1F486}\u{1F446}\u{1F3A4}\u{1F647}\u{1F351}\u2744\u{1F334}\u{1F4A3}\u{1F438}\u{1F48C}\u{1F4CD}\u{1F940}\u{1F922}\u{1F445}\u{1F4A1}\u{1F4A9}\u{1F450}\u{1F4F8}\u{1F47B}\u{1F910}\u{1F92E}\u{1F3BC}\u{1F975}\u{1F6A9}\u{1F34E}\u{1F34A}\u{1F47C}\u{1F48D}\u{1F4E3}\u{1F942}"); +var alphabetBytesToChars = ( + /** @type {string[]} */ + alphabet.reduce( + (p, c, i) => { + p[i] = c; + return p; + }, + /** @type {string[]} */ + [] + ) +); +var alphabetCharsToBytes = ( + /** @type {number[]} */ + alphabet.reduce( + (p, c, i) => { + p[ + /** @type {number} */ + c.codePointAt(0) + ] = i; + return p; + }, + /** @type {number[]} */ + [] + ) +); +function encode10(data) { + return data.reduce((p, c) => { + p += alphabetBytesToChars[c]; + return p; + }, ""); +} +function decode14(str) { + const byts = []; + for (const char of str) { + const byt = alphabetCharsToBytes[ + /** @type {number} */ + char.codePointAt(0) + ]; + if (byt === void 0) { + throw new Error(`Non-base256emoji character: ${char}`); + } + byts.push(byt); + } + return new Uint8Array(byts); +} +var base256emoji = from2({ + prefix: "\u{1F680}", + name: "base256emoji", + encode: encode10, + decode: decode14 +}); + +// node_modules/multiformats/src/bases/base36.js +var base36_exports = {}; +__export(base36_exports, { + base36: () => base36, + base36upper: () => base36upper +}); +var base36 = baseX2({ + prefix: "k", + name: "base36", + alphabet: "0123456789abcdefghijklmnopqrstuvwxyz" +}); +var base36upper = baseX2({ + prefix: "K", + name: "base36upper", + alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" +}); + +// node_modules/multiformats/src/bases/base8.js +var base8_exports = {}; +__export(base8_exports, { + base8: () => base8 +}); +var base8 = rfc46482({ + prefix: "7", + name: "base8", + alphabet: "01234567", + bitsPerChar: 3 +}); + +// node_modules/multiformats/src/bases/identity.js +var identity_exports = {}; +__export(identity_exports, { + identity: () => identity +}); +var identity = from2({ + prefix: "\0", + name: "identity", + encode: (buf2) => toString2(buf2), + decode: (str) => fromString2(str) +}); + +// node_modules/multiformats/src/codecs/json.js +var textEncoder4 = new TextEncoder(); +var textDecoder3 = new TextDecoder(); + +// node_modules/multiformats/src/hashes/identity.js +var identity_exports2 = {}; +__export(identity_exports2, { + identity: () => identity2 +}); +var code4 = 0; +var name3 = "identity"; +var encode11 = coerce2; +var digest = (input) => create2(code4, encode11(input)); +var identity2 = { code: code4, name: name3, encode: encode11, digest }; + +// node_modules/multiformats/src/hashes/sha2.js +var sha2_exports = {}; +__export(sha2_exports, { + sha256: () => sha256, + sha512: () => sha512 +}); +var import_crypto = __toESM(require("crypto"), 1); + +// node_modules/multiformats/src/hashes/hasher.js +var from3 = ({ name: name4, code: code5, encode: encode12 }) => new Hasher(name4, code5, encode12); +var Hasher = class { + /** + * + * @param {Name} name + * @param {Code} code + * @param {(input: Uint8Array) => Await} encode + */ + constructor(name4, code5, encode12) { + this.name = name4; + this.code = code5; + this.encode = encode12; + } + /** + * @param {Uint8Array} input + * @returns {Await>} + */ + digest(input) { + if (input instanceof Uint8Array) { + const result = this.encode(input); + return result instanceof Uint8Array ? create2(this.code, result) : result.then((digest2) => create2(this.code, digest2)); + } else { + throw Error("Unknown type, must be binary type"); + } + } +}; + +// node_modules/multiformats/src/hashes/sha2.js +var sha256 = from3({ + name: "sha2-256", + code: 18, + encode: (input) => coerce2(import_crypto.default.createHash("sha256").update(input).digest()) +}); +var sha512 = from3({ + name: "sha2-512", + code: 19, + encode: (input) => coerce2(import_crypto.default.createHash("sha512").update(input).digest()) +}); + +// node_modules/multiformats/src/basics.js +var bases = { ...identity_exports, ...base2_exports, ...base8_exports, ...base10_exports, ...base16_exports, ...base32_exports, ...base36_exports, ...base58_exports, ...base64_exports, ...base256emoji_exports }; +var hashes = { ...sha2_exports, ...identity_exports2 }; + +// node_modules/uint8arrays/dist/src/util/bases.js +function createCodec(name4, prefix, encode12, decode15) { + return { + name: name4, + prefix, + encoder: { + name: name4, + prefix, + encode: encode12 + }, + decoder: { + decode: decode15 + } + }; +} +var string = createCodec("utf8", "u", (buf2) => { + const decoder = new TextDecoder("utf8"); + return "u" + decoder.decode(buf2); +}, (str) => { + const encoder = new TextEncoder(); + return encoder.encode(str.substring(1)); +}); +var ascii = createCodec("ascii", "a", (buf2) => { + let string2 = "a"; + for (let i = 0; i < buf2.length; i++) { + string2 += String.fromCharCode(buf2[i]); + } + return string2; +}, (str) => { + str = str.substring(1); + const buf2 = allocUnsafe(str.length); + for (let i = 0; i < str.length; i++) { + buf2[i] = str.charCodeAt(i); + } + return buf2; +}); +var BASES = { + utf8: string, + "utf-8": string, + hex: bases.base16, + latin1: ascii, + ascii, + binary: ascii, + ...bases +}; +var bases_default = BASES; + +// node_modules/uint8arrays/dist/src/from-string.js +function fromString3(string2, encoding = "utf8") { + const base3 = bases_default[encoding]; + if (base3 == null) { + throw new Error(`Unsupported encoding "${encoding}"`); + } + if ((encoding === "utf8" || encoding === "utf-8") && globalThis.Buffer != null && globalThis.Buffer.from != null) { + return asUint8Array(globalThis.Buffer.from(string2, "utf-8")); + } + return base3.decoder.decode(`${base3.prefix}${string2}`); +} + +// node_modules/protons-runtime/dist/src/utils/pool.js +function pool(size) { + const SIZE = size ?? 8192; + const MAX = SIZE >>> 1; + let slab; + let offset = SIZE; + return function poolAlloc(size2) { + if (size2 < 1 || size2 > MAX) { + return allocUnsafe(size2); + } + if (offset + size2 > SIZE) { + slab = allocUnsafe(SIZE); + offset = 0; + } + const buf2 = slab.subarray(offset, offset += size2); + if ((offset & 7) !== 0) { + offset = (offset | 7) + 1; + } + return buf2; + }; +} + +// node_modules/protons-runtime/dist/src/utils/writer.js +var Op = class { + /** + * Function to call + */ + fn; + /** + * Value byte length + */ + len; + /** + * Next operation + */ + next; + /** + * Value to write + */ + val; + constructor(fn, len, val) { + this.fn = fn; + this.len = len; + this.next = void 0; + this.val = val; + } +}; +function noop2() { +} +var State = class { + /** + * Current head + */ + head; + /** + * Current tail + */ + tail; + /** + * Current buffer length + */ + len; + /** + * Next state + */ + next; + constructor(writer) { + this.head = writer.head; + this.tail = writer.tail; + this.len = writer.len; + this.next = writer.states; + } +}; +var bufferPool = pool(); +function alloc3(size) { + if (globalThis.Buffer != null) { + return allocUnsafe(size); + } + return bufferPool(size); +} +var Uint8ArrayWriter = class { + /** + * Current length + */ + len; + /** + * Operations head + */ + head; + /** + * Operations tail + */ + tail; + /** + * Linked forked states + */ + states; + constructor() { + this.len = 0; + this.head = new Op(noop2, 0, 0); + this.tail = this.head; + this.states = null; + } + /** + * Pushes a new operation to the queue + */ + _push(fn, len, val) { + this.tail = this.tail.next = new Op(fn, len, val); + this.len += len; + return this; + } + /** + * Writes an unsigned 32 bit value as a varint + */ + uint32(value) { + this.len += (this.tail = this.tail.next = new VarintOp((value = value >>> 0) < 128 ? 1 : value < 16384 ? 2 : value < 2097152 ? 3 : value < 268435456 ? 4 : 5, value)).len; + return this; + } + /** + * Writes a signed 32 bit value as a varint` + */ + int32(value) { + return value < 0 ? this._push(writeVarint64, 10, LongBits.fromNumber(value)) : this.uint32(value); + } + /** + * Writes a 32 bit value as a varint, zig-zag encoded + */ + sint32(value) { + return this.uint32((value << 1 ^ value >> 31) >>> 0); + } + /** + * Writes an unsigned 64 bit value as a varint + */ + uint64(value) { + const bits = LongBits.fromBigInt(value); + return this._push(writeVarint64, bits.length(), bits); + } + /** + * Writes an unsigned 64 bit value as a varint + */ + uint64Number(value) { + const bits = LongBits.fromNumber(value); + return this._push(writeVarint64, bits.length(), bits); + } + /** + * Writes an unsigned 64 bit value as a varint + */ + uint64String(value) { + return this.uint64(BigInt(value)); + } + /** + * Writes a signed 64 bit value as a varint + */ + int64(value) { + return this.uint64(value); + } + /** + * Writes a signed 64 bit value as a varint + */ + int64Number(value) { + return this.uint64Number(value); + } + /** + * Writes a signed 64 bit value as a varint + */ + int64String(value) { + return this.uint64String(value); + } + /** + * Writes a signed 64 bit value as a varint, zig-zag encoded + */ + sint64(value) { + const bits = LongBits.fromBigInt(value).zzEncode(); + return this._push(writeVarint64, bits.length(), bits); + } + /** + * Writes a signed 64 bit value as a varint, zig-zag encoded + */ + sint64Number(value) { + const bits = LongBits.fromNumber(value).zzEncode(); + return this._push(writeVarint64, bits.length(), bits); + } + /** + * Writes a signed 64 bit value as a varint, zig-zag encoded + */ + sint64String(value) { + return this.sint64(BigInt(value)); + } + /** + * Writes a boolish value as a varint + */ + bool(value) { + return this._push(writeByte, 1, value ? 1 : 0); + } + /** + * Writes an unsigned 32 bit value as fixed 32 bits + */ + fixed32(value) { + return this._push(writeFixed32, 4, value >>> 0); + } + /** + * Writes a signed 32 bit value as fixed 32 bits + */ + sfixed32(value) { + return this.fixed32(value); + } + /** + * Writes an unsigned 64 bit value as fixed 64 bits + */ + fixed64(value) { + const bits = LongBits.fromBigInt(value); + return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi); + } + /** + * Writes an unsigned 64 bit value as fixed 64 bits + */ + fixed64Number(value) { + const bits = LongBits.fromNumber(value); + return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi); + } + /** + * Writes an unsigned 64 bit value as fixed 64 bits + */ + fixed64String(value) { + return this.fixed64(BigInt(value)); + } + /** + * Writes a signed 64 bit value as fixed 64 bits + */ + sfixed64(value) { + return this.fixed64(value); + } + /** + * Writes a signed 64 bit value as fixed 64 bits + */ + sfixed64Number(value) { + return this.fixed64Number(value); + } + /** + * Writes a signed 64 bit value as fixed 64 bits + */ + sfixed64String(value) { + return this.fixed64String(value); + } + /** + * Writes a float (32 bit) + */ + float(value) { + return this._push(writeFloatLE, 4, value); + } + /** + * Writes a double (64 bit float). + * + * @function + * @param {number} value - Value to write + * @returns {Writer} `this` + */ + double(value) { + return this._push(writeDoubleLE, 8, value); + } + /** + * Writes a sequence of bytes + */ + bytes(value) { + const len = value.length >>> 0; + if (len === 0) { + return this._push(writeByte, 1, 0); + } + return this.uint32(len)._push(writeBytes, len, value); + } + /** + * Writes a string + */ + string(value) { + const len = length3(value); + return len !== 0 ? this.uint32(len)._push(write, len, value) : this._push(writeByte, 1, 0); + } + /** + * Forks this writer's state by pushing it to a stack. + * Calling {@link Writer#reset|reset} or {@link Writer#ldelim|ldelim} resets the writer to the previous state. + */ + fork() { + this.states = new State(this); + this.head = this.tail = new Op(noop2, 0, 0); + this.len = 0; + return this; + } + /** + * Resets this instance to the last state + */ + reset() { + if (this.states != null) { + this.head = this.states.head; + this.tail = this.states.tail; + this.len = this.states.len; + this.states = this.states.next; + } else { + this.head = this.tail = new Op(noop2, 0, 0); + this.len = 0; + } + return this; + } + /** + * Resets to the last state and appends the fork state's current write length as a varint followed by its operations. + */ + ldelim() { + const head = this.head; + const tail = this.tail; + const len = this.len; + this.reset().uint32(len); + if (len !== 0) { + this.tail.next = head.next; + this.tail = tail; + this.len += len; + } + return this; + } + /** + * Finishes the write operation + */ + finish() { + let head = this.head.next; + const buf2 = alloc3(this.len); + let pos = 0; + while (head != null) { + head.fn(head.val, buf2, pos); + pos += head.len; + head = head.next; + } + return buf2; + } +}; +function writeByte(val, buf2, pos) { + buf2[pos] = val & 255; +} +function writeVarint32(val, buf2, pos) { + while (val > 127) { + buf2[pos++] = val & 127 | 128; + val >>>= 7; + } + buf2[pos] = val; +} +var VarintOp = class extends Op { + next; + constructor(len, val) { + super(writeVarint32, len, val); + this.next = void 0; + } +}; +function writeVarint64(val, buf2, pos) { + while (val.hi !== 0) { + buf2[pos++] = val.lo & 127 | 128; + val.lo = (val.lo >>> 7 | val.hi << 25) >>> 0; + val.hi >>>= 7; + } + while (val.lo > 127) { + buf2[pos++] = val.lo & 127 | 128; + val.lo = val.lo >>> 7; + } + buf2[pos++] = val.lo; +} +function writeFixed32(val, buf2, pos) { + buf2[pos] = val & 255; + buf2[pos + 1] = val >>> 8 & 255; + buf2[pos + 2] = val >>> 16 & 255; + buf2[pos + 3] = val >>> 24; +} +function writeBytes(val, buf2, pos) { + buf2.set(val, pos); +} +if (globalThis.Buffer != null) { + Uint8ArrayWriter.prototype.bytes = function(value) { + const len = value.length >>> 0; + this.uint32(len); + if (len > 0) { + this._push(writeBytesBuffer, len, value); + } + return this; + }; + Uint8ArrayWriter.prototype.string = function(value) { + const len = globalThis.Buffer.byteLength(value); + this.uint32(len); + if (len > 0) { + this._push(writeStringBuffer, len, value); + } + return this; + }; +} +function writeBytesBuffer(val, buf2, pos) { + buf2.set(val, pos); +} +function writeStringBuffer(val, buf2, pos) { + if (val.length < 40) { + write(val, buf2, pos); + } else if (buf2.utf8Write != null) { + buf2.utf8Write(val, pos); + } else { + buf2.set(fromString3(val), pos); + } +} +function createWriter() { + return new Uint8ArrayWriter(); +} + +// node_modules/protons-runtime/dist/src/encode.js +function encodeMessage(message2, codec) { + const w = createWriter(); + codec.encode(message2, w, { + lengthDelimited: false + }); + return w.finish(); +} + +// node_modules/protons-runtime/dist/src/codec.js +var CODEC_TYPES; +(function(CODEC_TYPES2) { + CODEC_TYPES2[CODEC_TYPES2["VARINT"] = 0] = "VARINT"; + CODEC_TYPES2[CODEC_TYPES2["BIT64"] = 1] = "BIT64"; + CODEC_TYPES2[CODEC_TYPES2["LENGTH_DELIMITED"] = 2] = "LENGTH_DELIMITED"; + CODEC_TYPES2[CODEC_TYPES2["START_GROUP"] = 3] = "START_GROUP"; + CODEC_TYPES2[CODEC_TYPES2["END_GROUP"] = 4] = "END_GROUP"; + CODEC_TYPES2[CODEC_TYPES2["BIT32"] = 5] = "BIT32"; +})(CODEC_TYPES || (CODEC_TYPES = {})); +function createCodec2(name4, type, encode12, decode15) { + return { + name: name4, + type, + encode: encode12, + decode: decode15 + }; +} + +// node_modules/protons-runtime/dist/src/codecs/enum.js +function enumeration(v) { + function findValue(val) { + if (v[val.toString()] == null) { + throw new Error("Invalid enum value"); + } + return v[val]; + } + const encode12 = function enumEncode(val, writer) { + const enumValue = findValue(val); + writer.int32(enumValue); + }; + const decode15 = function enumDecode(reader) { + const val = reader.int32(); + return findValue(val); + }; + return createCodec2("enum", CODEC_TYPES.VARINT, encode12, decode15); +} + +// node_modules/protons-runtime/dist/src/codecs/message.js +function message(encode12, decode15) { + return createCodec2("message", CODEC_TYPES.LENGTH_DELIMITED, encode12, decode15); +} + +// node_modules/ipfs-unixfs/dist/src/unixfs.js +var Data; +(function(Data2) { + let DataType; + (function(DataType2) { + DataType2["Raw"] = "Raw"; + DataType2["Directory"] = "Directory"; + DataType2["File"] = "File"; + DataType2["Metadata"] = "Metadata"; + DataType2["Symlink"] = "Symlink"; + DataType2["HAMTShard"] = "HAMTShard"; + })(DataType = Data2.DataType || (Data2.DataType = {})); + let __DataTypeValues; + (function(__DataTypeValues2) { + __DataTypeValues2[__DataTypeValues2["Raw"] = 0] = "Raw"; + __DataTypeValues2[__DataTypeValues2["Directory"] = 1] = "Directory"; + __DataTypeValues2[__DataTypeValues2["File"] = 2] = "File"; + __DataTypeValues2[__DataTypeValues2["Metadata"] = 3] = "Metadata"; + __DataTypeValues2[__DataTypeValues2["Symlink"] = 4] = "Symlink"; + __DataTypeValues2[__DataTypeValues2["HAMTShard"] = 5] = "HAMTShard"; + })(__DataTypeValues || (__DataTypeValues = {})); + (function(DataType2) { + DataType2.codec = () => { + return enumeration(__DataTypeValues); + }; + })(DataType = Data2.DataType || (Data2.DataType = {})); + let _codec; + Data2.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.Type != null) { + w.uint32(8); + Data2.DataType.codec().encode(obj.Type, w); + } + if (obj.Data != null) { + w.uint32(18); + w.bytes(obj.Data); + } + if (obj.filesize != null) { + w.uint32(24); + w.uint64(obj.filesize); + } + if (obj.blocksizes != null) { + for (const value of obj.blocksizes) { + w.uint32(32); + w.uint64(value); + } + } + if (obj.hashType != null) { + w.uint32(40); + w.uint64(obj.hashType); + } + if (obj.fanout != null) { + w.uint32(48); + w.uint64(obj.fanout); + } + if (obj.mode != null) { + w.uint32(56); + w.uint32(obj.mode); + } + if (obj.mtime != null) { + w.uint32(66); + UnixTime.codec().encode(obj.mtime, w); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length4) => { + const obj = { + blocksizes: [] + }; + const end = length4 == null ? reader.len : reader.pos + length4; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.Type = Data2.DataType.codec().decode(reader); + break; + case 2: + obj.Data = reader.bytes(); + break; + case 3: + obj.filesize = reader.uint64(); + break; + case 4: + obj.blocksizes.push(reader.uint64()); + break; + case 5: + obj.hashType = reader.uint64(); + break; + case 6: + obj.fanout = reader.uint64(); + break; + case 7: + obj.mode = reader.uint32(); + break; + case 8: + obj.mtime = UnixTime.codec().decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + Data2.encode = (obj) => { + return encodeMessage(obj, Data2.codec()); + }; + Data2.decode = (buf2) => { + return decodeMessage(buf2, Data2.codec()); + }; +})(Data || (Data = {})); +var UnixTime; +(function(UnixTime2) { + let _codec; + UnixTime2.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.Seconds != null) { + w.uint32(8); + w.int64(obj.Seconds); + } + if (obj.FractionalNanoseconds != null) { + w.uint32(21); + w.fixed32(obj.FractionalNanoseconds); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length4) => { + const obj = {}; + const end = length4 == null ? reader.len : reader.pos + length4; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.Seconds = reader.int64(); + break; + case 2: + obj.FractionalNanoseconds = reader.fixed32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + UnixTime2.encode = (obj) => { + return encodeMessage(obj, UnixTime2.codec()); + }; + UnixTime2.decode = (buf2) => { + return decodeMessage(buf2, UnixTime2.codec()); + }; +})(UnixTime || (UnixTime = {})); +var Metadata; +(function(Metadata2) { + let _codec; + Metadata2.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.MimeType != null) { + w.uint32(10); + w.string(obj.MimeType); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length4) => { + const obj = {}; + const end = length4 == null ? reader.len : reader.pos + length4; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.MimeType = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + Metadata2.encode = (obj) => { + return encodeMessage(obj, Metadata2.codec()); + }; + Metadata2.decode = (buf2) => { + return decodeMessage(buf2, Metadata2.codec()); + }; +})(Metadata || (Metadata = {})); + +// node_modules/ipfs-unixfs/dist/src/index.js +var types = { + Raw: "raw", + Directory: "directory", + File: "file", + Metadata: "metadata", + Symlink: "symlink", + HAMTShard: "hamt-sharded-directory" +}; +var dirTypes = [ + "directory", + "hamt-sharded-directory" +]; +var DEFAULT_FILE_MODE = parseInt("0644", 8); +var DEFAULT_DIRECTORY_MODE = parseInt("0755", 8); +var UnixFS = class _UnixFS { + /** + * Decode from protobuf https://github.com/ipfs/specs/blob/master/UNIXFS.md + */ + static unmarshal(marshaled) { + const message2 = Data.decode(marshaled); + const data = new _UnixFS({ + type: types[message2.Type != null ? message2.Type.toString() : "File"], + data: message2.Data, + blockSizes: message2.blocksizes, + mode: message2.mode, + mtime: message2.mtime != null ? { + secs: message2.mtime.Seconds ?? 0n, + nsecs: message2.mtime.FractionalNanoseconds + } : void 0, + fanout: message2.fanout + }); + data._originalMode = message2.mode ?? 0; + return data; + } + type; + data; + blockSizes; + hashType; + fanout; + mtime; + _mode; + _originalMode; + constructor(options = { + type: "file" + }) { + const { type, data, blockSizes, hashType, fanout, mtime, mode } = options; + if (type != null && !Object.values(types).includes(type)) { + throw (0, import_err_code.default)(new Error("Type: " + type + " is not valid"), "ERR_INVALID_TYPE"); + } + this.type = type ?? "file"; + this.data = data; + this.hashType = hashType; + this.fanout = fanout; + this.blockSizes = blockSizes ?? []; + this._originalMode = 0; + this.mode = mode; + this.mtime = mtime; + } + set mode(mode) { + if (mode == null) { + this._mode = this.isDirectory() ? DEFAULT_DIRECTORY_MODE : DEFAULT_FILE_MODE; + } else { + this._mode = mode & 4095; + } + } + get mode() { + return this._mode; + } + isDirectory() { + return dirTypes.includes(this.type); + } + addBlockSize(size) { + this.blockSizes.push(size); + } + removeBlockSize(index) { + this.blockSizes.splice(index, 1); + } + /** + * Returns `0n` for directories or `data.length + sum(blockSizes)` for everything else + */ + fileSize() { + if (this.isDirectory()) { + return 0n; + } + let sum = 0n; + this.blockSizes.forEach((size) => { + sum += size; + }); + if (this.data != null) { + sum += BigInt(this.data.length); + } + return sum; + } + /** + * encode to protobuf Uint8Array + */ + marshal() { + let type; + switch (this.type) { + case "raw": + type = Data.DataType.Raw; + break; + case "directory": + type = Data.DataType.Directory; + break; + case "file": + type = Data.DataType.File; + break; + case "metadata": + type = Data.DataType.Metadata; + break; + case "symlink": + type = Data.DataType.Symlink; + break; + case "hamt-sharded-directory": + type = Data.DataType.HAMTShard; + break; + default: + throw (0, import_err_code.default)(new Error(`Type: ${type} is not valid`), "ERR_INVALID_TYPE"); + } + let data = this.data; + if (this.data == null || this.data.length === 0) { + data = void 0; + } + let mode; + if (this.mode != null) { + mode = this._originalMode & 4294963200 | (this.mode ?? 0); + if (mode === DEFAULT_FILE_MODE && !this.isDirectory()) { + mode = void 0; + } + if (mode === DEFAULT_DIRECTORY_MODE && this.isDirectory()) { + mode = void 0; + } + } + let mtime; + if (this.mtime != null) { + mtime = { + Seconds: this.mtime.secs, + FractionalNanoseconds: this.mtime.nsecs + }; + } + return Data.encode({ + Type: type, + Data: data, + filesize: this.isDirectory() ? void 0 : this.fileSize(), + blocksizes: this.blockSizes, + hashType: this.hashType, + fanout: this.fanout, + mode, + mtime + }); + } +}; + +// node_modules/progress-events/dist/src/index.js +var CustomProgressEvent = class extends Event { + constructor(type, detail) { + super(type); + this.detail = detail; + } +}; + +// node_modules/ipfs-unixfs-importer/dist/src/utils/persist.js +var persist = async (buffer2, blockstore, options) => { + if (options.codec == null) { + options.codec = src_exports2; + } + const multihash = await sha256.digest(buffer2); + const cid = CID2.create(options.cidVersion, options.codec.code, multihash); + await blockstore.put(cid, buffer2, options); + return cid; +}; + +// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/buffer-importer.js +function defaultBufferImporter(options) { + return async function* bufferImporter(file, blockstore) { + let bytesWritten = 0n; + for await (let block of file.content) { + yield async () => { + var _a; + let unixfs2; + const opts = { + codec: src_exports2, + cidVersion: options.cidVersion, + onProgress: options.onProgress + }; + if (options.rawLeaves) { + opts.codec = raw_exports; + opts.cidVersion = 1; + } else { + unixfs2 = new UnixFS({ + type: options.leafType, + data: block + }); + block = encode7({ + Data: unixfs2.marshal(), + Links: [] + }); + } + const cid = await persist(block, blockstore, opts); + bytesWritten += BigInt(block.byteLength); + (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:importer:progress:file:write", { + bytesWritten, + cid, + path: file.path + })); + return { + cid, + unixfs: unixfs2, + size: BigInt(block.length), + block + }; + }; + } + }; +} + +// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/index.js +var import_err_code2 = __toESM(require_err_code(), 1); + +// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/dir.js +var dirBuilder = async (dir, blockstore, options) => { + const unixfs2 = new UnixFS({ + type: "directory", + mtime: dir.mtime, + mode: dir.mode + }); + const block = encode7(prepare({ Data: unixfs2.marshal() })); + const cid = await persist(block, blockstore, options); + const path6 = dir.path; + return { + cid, + path: path6, + unixfs: unixfs2, + size: BigInt(block.length), + originalPath: dir.originalPath, + block + }; +}; + +// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/file.js +async function* buildFileBatch(file, blockstore, options) { + let count = -1; + let previous; + for await (const entry of parallelBatch(options.bufferImporter(file, blockstore), options.blockWriteConcurrency)) { + count++; + if (count === 0) { + previous = { + ...entry, + single: true + }; + continue; + } else if (count === 1 && previous != null) { + yield { + ...previous, + block: void 0, + single: void 0 + }; + previous = void 0; + } + yield { + ...entry, + block: void 0 + }; + } + if (previous != null) { + yield previous; + } +} +function isSingleBlockImport(result) { + return result.single === true; +} +var reduce = (file, blockstore, options) => { + const reducer = async function(leaves) { + var _a, _b; + if (leaves.length === 1 && isSingleBlockImport(leaves[0]) && options.reduceSingleLeafToSelf) { + const leaf = leaves[0]; + let node2 = leaf.block; + if (isSingleBlockImport(leaf) && (file.mtime !== void 0 || file.mode !== void 0)) { + leaf.unixfs = new UnixFS({ + type: "file", + mtime: file.mtime, + mode: file.mode, + data: leaf.block + }); + node2 = { Data: leaf.unixfs.marshal(), Links: [] }; + leaf.block = encode7(prepare(node2)); + leaf.cid = await persist(leaf.block, blockstore, { + ...options, + cidVersion: options.cidVersion + }); + leaf.size = BigInt(leaf.block.length); + } + (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:importer:progress:file:layout", { + cid: leaf.cid, + path: leaf.originalPath + })); + return { + cid: leaf.cid, + path: file.path, + unixfs: leaf.unixfs, + size: leaf.size, + originalPath: leaf.originalPath + }; + } + const f = new UnixFS({ + type: "file", + mtime: file.mtime, + mode: file.mode + }); + const links = leaves.filter((leaf) => { + var _a2, _b2; + if (leaf.cid.code === code3 && leaf.size > 0) { + return true; + } + if (leaf.unixfs != null && leaf.unixfs.data == null && leaf.unixfs.fileSize() > 0n) { + return true; + } + return Boolean((_b2 = (_a2 = leaf.unixfs) == null ? void 0 : _a2.data) == null ? void 0 : _b2.length); + }).map((leaf) => { + var _a2; + if (leaf.cid.code === code3) { + f.addBlockSize(leaf.size); + return { + Name: "", + Tsize: Number(leaf.size), + Hash: leaf.cid + }; + } + if (leaf.unixfs == null || leaf.unixfs.data == null) { + f.addBlockSize(((_a2 = leaf.unixfs) == null ? void 0 : _a2.fileSize()) ?? 0n); + } else { + f.addBlockSize(BigInt(leaf.unixfs.data.length)); + } + return { + Name: "", + Tsize: Number(leaf.size), + Hash: leaf.cid + }; + }); + const node = { + Data: f.marshal(), + Links: links + }; + const block = encode7(prepare(node)); + const cid = await persist(block, blockstore, options); + (_b = options.onProgress) == null ? void 0 : _b.call(options, new CustomProgressEvent("unixfs:importer:progress:file:layout", { + cid, + path: file.originalPath + })); + return { + cid, + path: file.path, + unixfs: f, + size: BigInt(block.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0)), + originalPath: file.originalPath, + block + }; + }; + return reducer; +}; +var fileBuilder = async (file, block, options) => { + return options.layout(buildFileBatch(file, block, options), reduce(file, block, options)); +}; + +// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/index.js +function isIterable(thing) { + return Symbol.iterator in thing; +} +function isAsyncIterable5(thing) { + return Symbol.asyncIterator in thing; +} +function contentAsAsyncIterable(content) { + try { + if (content instanceof Uint8Array) { + return async function* () { + yield content; + }(); + } else if (isIterable(content)) { + return async function* () { + yield* content; + }(); + } else if (isAsyncIterable5(content)) { + return content; + } + } catch { + throw (0, import_err_code2.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT"); + } + throw (0, import_err_code2.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT"); +} +function defaultDagBuilder(options) { + return async function* dagBuilder(source, blockstore) { + for await (const entry of source) { + let originalPath; + if (entry.path != null) { + originalPath = entry.path; + entry.path = entry.path.split("/").filter((path6) => path6 != null && path6 !== ".").join("/"); + } + if (isFileCandidate(entry)) { + const file = { + path: entry.path, + mtime: entry.mtime, + mode: entry.mode, + content: async function* () { + var _a; + let bytesRead = 0n; + for await (const chunk of options.chunker(options.chunkValidator(contentAsAsyncIterable(entry.content)))) { + const currentChunkSize = BigInt(chunk.byteLength); + bytesRead += currentChunkSize; + (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:importer:progress:file:read", { + bytesRead, + chunkSize: currentChunkSize, + path: entry.path + })); + yield chunk; + } + }(), + originalPath + }; + yield async () => fileBuilder(file, blockstore, options); + } else if (entry.path != null) { + const dir = { + path: entry.path, + mtime: entry.mtime, + mode: entry.mode, + originalPath + }; + yield async () => dirBuilder(dir, blockstore, options); + } else { + throw new Error("Import candidate must have content or path or both"); + } + } + }; +} +function isFileCandidate(entry) { + return entry.content != null; +} + +// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/validate-chunks.js +var import_err_code3 = __toESM(require_err_code(), 1); +var defaultChunkValidator = () => { + return async function* validateChunks(source) { + for await (const content of source) { + if (content.length === void 0) { + throw (0, import_err_code3.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT"); + } + if (typeof content === "string" || content instanceof String) { + yield fromString3(content.toString()); + } else if (Array.isArray(content)) { + yield Uint8Array.from(content); + } else if (content instanceof Uint8Array) { + yield content; + } else { + throw (0, import_err_code3.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT"); + } + } + }; +}; + +// node_modules/ipfs-unixfs-importer/dist/src/layout/balanced.js +var DEFAULT_MAX_CHILDREN_PER_NODE = 174; +function balanced(options) { + const maxChildrenPerNode = (options == null ? void 0 : options.maxChildrenPerNode) ?? DEFAULT_MAX_CHILDREN_PER_NODE; + return async function balancedLayout(source, reduce2) { + const roots = []; + for await (const chunked of src_default5(source, maxChildrenPerNode)) { + roots.push(await reduce2(chunked)); + } + if (roots.length > 1) { + return balancedLayout(roots, reduce2); + } + return roots[0]; + }; +} + +// node_modules/ipfs-unixfs-importer/dist/src/dir.js +var Dir = class { + options; + root; + dir; + path; + dirty; + flat; + parent; + parentKey; + unixfs; + mode; + mtime; + cid; + size; + nodeSize; + constructor(props, options) { + this.options = options ?? {}; + this.root = props.root; + this.dir = props.dir; + this.path = props.path; + this.dirty = props.dirty; + this.flat = props.flat; + this.parent = props.parent; + this.parentKey = props.parentKey; + this.unixfs = props.unixfs; + this.mode = props.mode; + this.mtime = props.mtime; + } +}; +var CID_V0 = CID2.parse("QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn"); +var CID_V1 = CID2.parse("zdj7WbTaiJT1fgatdet9Ei9iDB5hdCxkbVyhyh8YTUnXMiwYi"); + +// node_modules/ipfs-unixfs-importer/dist/src/dir-flat.js +var DirFlat = class extends Dir { + _children; + constructor(props, options) { + super(props, options); + this._children = /* @__PURE__ */ new Map(); + } + async put(name4, value) { + this.cid = void 0; + this.size = void 0; + this.nodeSize = void 0; + this._children.set(name4, value); + } + async get(name4) { + return Promise.resolve(this._children.get(name4)); + } + childCount() { + return this._children.size; + } + directChildrenCount() { + return this.childCount(); + } + onlyChild() { + return this._children.values().next().value; + } + async *eachChildSeries() { + for (const [key, child] of this._children.entries()) { + yield { + key, + child + }; + } + } + estimateNodeSize() { + if (this.nodeSize !== void 0) { + return this.nodeSize; + } + this.nodeSize = 0; + for (const [name4, child] of this._children.entries()) { + if (child.size != null && child.cid != null) { + this.nodeSize += name4.length + (this.options.cidVersion === 1 ? CID_V1.bytes.byteLength : CID_V0.bytes.byteLength); + } + } + return this.nodeSize; + } + async *flush(block) { + const links = []; + for (const [name4, child] of this._children.entries()) { + let result = child; + if (child instanceof Dir) { + for await (const entry of child.flush(block)) { + result = entry; + yield entry; + } + } + if (result.size != null && result.cid != null) { + links.push({ + Name: name4, + Tsize: Number(result.size), + Hash: result.cid + }); + } + } + const unixfs2 = new UnixFS({ + type: "directory", + mtime: this.mtime, + mode: this.mode + }); + const node = { Data: unixfs2.marshal(), Links: links }; + const buffer2 = encode7(prepare(node)); + const cid = await persist(buffer2, block, this.options); + const size = buffer2.length + node.Links.reduce( + /** + * @param {number} acc + * @param {PBLink} curr + */ + (acc, curr) => acc + (curr.Tsize == null ? 0 : curr.Tsize), + 0 + ); + this.cid = cid; + this.size = size; + yield { + cid, + unixfs: unixfs2, + path: this.path, + size: BigInt(size) + }; + } +}; + +// node_modules/@multiformats/murmur3/src/index.js +var import_murmurhash3js_revisited = __toESM(require_murmurhash3js_revisited(), 1); +function fromNumberTo32BitBuf(number) { + const bytes = new Array(4); + for (let i = 0; i < 4; i++) { + bytes[i] = number & 255; + number = number >> 8; + } + return new Uint8Array(bytes); +} +var murmur332 = from3({ + name: "murmur3-32", + code: 35, + encode: (input) => fromNumberTo32BitBuf(import_murmurhash3js_revisited.default.x86.hash32(input)) +}); +var murmur3128 = from3({ + name: "murmur3-128", + code: 34, + encode: (input) => bytes_exports2.fromHex(import_murmurhash3js_revisited.default.x64.hash128(input)) +}); +var murmur364 = from3({ + name: "murmur3-x64-64", + code: 34, + encode: (input) => bytes_exports2.fromHex(import_murmurhash3js_revisited.default.x64.hash128(input)).subarray(0, 8) +}); + +// node_modules/hamt-sharding/dist/src/bucket.js +var import_sparse_array = __toESM(require_sparse_array(), 1); +var Bucket = class _Bucket { + constructor(options, parent, posAtParent = 0) { + this._options = options; + this._popCount = 0; + this._parent = parent; + this._posAtParent = posAtParent; + this._children = new import_sparse_array.default(); + this.key = null; + } + async put(key, value) { + const place = await this._findNewBucketAndPos(key); + await place.bucket._putAt(place, key, value); + } + async get(key) { + const child = await this._findChild(key); + if (child != null) { + return child.value; + } + } + async del(key) { + const place = await this._findPlace(key); + const child = place.bucket._at(place.pos); + if (child != null && child.key === key) { + place.bucket._delAt(place.pos); + } + } + leafCount() { + const children = this._children.compactArray(); + return children.reduce((acc, child) => { + if (child instanceof _Bucket) { + return acc + child.leafCount(); + } + return acc + 1; + }, 0); + } + childrenCount() { + return this._children.length; + } + onlyChild() { + return this._children.get(0); + } + *eachLeafSeries() { + const children = this._children.compactArray(); + for (const child of children) { + if (child instanceof _Bucket) { + yield* child.eachLeafSeries(); + } else { + yield child; + } + } + } + serialize(map2, reduce2) { + const acc = []; + return reduce2(this._children.reduce((acc2, child, index) => { + if (child != null) { + if (child instanceof _Bucket) { + acc2.push(child.serialize(map2, reduce2)); + } else { + acc2.push(map2(child, index)); + } + } + return acc2; + }, acc)); + } + async asyncTransform(asyncMap, asyncReduce) { + return await asyncTransformBucket(this, asyncMap, asyncReduce); + } + toJSON() { + return this.serialize(mapNode, reduceNodes); + } + prettyPrint() { + return JSON.stringify(this.toJSON(), null, " "); + } + tableSize() { + return Math.pow(2, this._options.bits); + } + async _findChild(key) { + const result = await this._findPlace(key); + const child = result.bucket._at(result.pos); + if (child instanceof _Bucket) { + return void 0; + } + if (child != null && child.key === key) { + return child; + } + } + async _findPlace(key) { + const hashValue = this._options.hash(typeof key === "string" ? fromString3(key) : key); + const index = await hashValue.take(this._options.bits); + const child = this._children.get(index); + if (child instanceof _Bucket) { + return await child._findPlace(hashValue); + } + return { + bucket: this, + pos: index, + hash: hashValue, + existingChild: child + }; + } + async _findNewBucketAndPos(key) { + const place = await this._findPlace(key); + if (place.existingChild != null && place.existingChild.key !== key) { + const bucket = new _Bucket(this._options, place.bucket, place.pos); + place.bucket._putObjectAt(place.pos, bucket); + const newPlace = await bucket._findPlace(place.existingChild.hash); + newPlace.bucket._putAt(newPlace, place.existingChild.key, place.existingChild.value); + return await bucket._findNewBucketAndPos(place.hash); + } + return place; + } + _putAt(place, key, value) { + this._putObjectAt(place.pos, { + key, + value, + hash: place.hash + }); + } + _putObjectAt(pos, object) { + if (this._children.get(pos) == null) { + this._popCount++; + } + this._children.set(pos, object); + } + _delAt(pos) { + if (pos === -1) { + throw new Error("Invalid position"); + } + if (this._children.get(pos) != null) { + this._popCount--; + } + this._children.unset(pos); + this._level(); + } + _level() { + if (this._parent != null && this._popCount <= 1) { + if (this._popCount === 1) { + const onlyChild = this._children.find(exists); + if (onlyChild != null && !(onlyChild instanceof _Bucket)) { + const hash = onlyChild.hash; + hash.untake(this._options.bits); + const place = { + pos: this._posAtParent, + hash, + bucket: this._parent + }; + this._parent._putAt(place, onlyChild.key, onlyChild.value); + } + } else { + this._parent._delAt(this._posAtParent); + } + } + } + _at(index) { + return this._children.get(index); + } +}; +function exists(o) { + return Boolean(o); +} +function mapNode(node, _) { + return node.key; +} +function reduceNodes(nodes) { + return nodes; +} +async function asyncTransformBucket(bucket, asyncMap, asyncReduce) { + const output = []; + for (const child of bucket._children.compactArray()) { + if (child instanceof Bucket) { + await asyncTransformBucket(child, asyncMap, asyncReduce); + } else { + const mappedChildren = await asyncMap(child); + output.push({ + bitField: bucket._children.bitField(), + children: mappedChildren + }); + } + } + return await asyncReduce(output); +} + +// node_modules/hamt-sharding/dist/src/consumable-buffer.js +var START_MASKS = [ + 255, + 254, + 252, + 248, + 240, + 224, + 192, + 128 +]; +var STOP_MASKS = [ + 1, + 3, + 7, + 15, + 31, + 63, + 127, + 255 +]; +var ConsumableBuffer = class { + constructor(value) { + this._value = value; + this._currentBytePos = value.length - 1; + this._currentBitPos = 7; + } + availableBits() { + return this._currentBitPos + 1 + this._currentBytePos * 8; + } + totalBits() { + return this._value.length * 8; + } + take(bits) { + let pendingBits = bits; + let result = 0; + while (pendingBits > 0 && this._haveBits()) { + const byte = this._value[this._currentBytePos]; + const availableBits = this._currentBitPos + 1; + const taking = Math.min(availableBits, pendingBits); + const value = byteBitsToInt(byte, availableBits - taking, taking); + result = (result << taking) + value; + pendingBits -= taking; + this._currentBitPos -= taking; + if (this._currentBitPos < 0) { + this._currentBitPos = 7; + this._currentBytePos--; + } + } + return result; + } + untake(bits) { + this._currentBitPos += bits; + while (this._currentBitPos > 7) { + this._currentBitPos -= 8; + this._currentBytePos += 1; + } + } + _haveBits() { + return this._currentBytePos >= 0; + } +}; +function byteBitsToInt(byte, start, length4) { + const mask = maskFor(start, length4); + return (byte & mask) >>> start; +} +function maskFor(start, length4) { + return START_MASKS[start] & STOP_MASKS[Math.min(length4 + start - 1, 7)]; +} + +// node_modules/hamt-sharding/dist/src/consumable-hash.js +function wrapHash(hashFn2) { + function hashing(value) { + if (value instanceof InfiniteHash) { + return value; + } else { + return new InfiniteHash(value, hashFn2); + } + } + return hashing; +} +var InfiniteHash = class { + constructor(value, hashFn2) { + if (!(value instanceof Uint8Array)) { + throw new Error("can only hash Uint8Arrays"); + } + this._value = value; + this._hashFn = hashFn2; + this._depth = -1; + this._availableBits = 0; + this._currentBufferIndex = 0; + this._buffers = []; + } + async take(bits) { + let pendingBits = bits; + while (this._availableBits < pendingBits) { + await this._produceMoreBits(); + } + let result = 0; + while (pendingBits > 0) { + const hash = this._buffers[this._currentBufferIndex]; + const available = Math.min(hash.availableBits(), pendingBits); + const took = hash.take(available); + result = (result << available) + took; + pendingBits -= available; + this._availableBits -= available; + if (hash.availableBits() === 0) { + this._currentBufferIndex++; + } + } + return result; + } + untake(bits) { + let pendingBits = bits; + while (pendingBits > 0) { + const hash = this._buffers[this._currentBufferIndex]; + const availableForUntake = Math.min(hash.totalBits() - hash.availableBits(), pendingBits); + hash.untake(availableForUntake); + pendingBits -= availableForUntake; + this._availableBits += availableForUntake; + if (this._currentBufferIndex > 0 && hash.totalBits() === hash.availableBits()) { + this._depth--; + this._currentBufferIndex--; + } + } + } + async _produceMoreBits() { + this._depth++; + const value = this._depth > 0 ? concat2([this._value, Uint8Array.from([this._depth])]) : this._value; + const hashValue = await this._hashFn(value); + const buffer2 = new ConsumableBuffer(hashValue); + this._buffers.push(buffer2); + this._availableBits += buffer2.availableBits(); + } +}; + +// node_modules/hamt-sharding/dist/src/index.js +function createHAMT(options) { + if (options == null || options.hashFn == null) { + throw new Error("please define an options.hashFn"); + } + const bucketOptions = { + bits: options.bits ?? 8, + hash: wrapHash(options.hashFn) + }; + return new Bucket(bucketOptions); +} + +// node_modules/ipfs-unixfs-importer/dist/src/dir-sharded.js +async function hamtHashFn(buf2) { + return (await murmur3128.encode(buf2)).slice(0, 8).reverse(); +} +var HAMT_HASH_CODE = BigInt(34); +var DEFAULT_FANOUT_BITS = 8; +var DirSharded = class extends Dir { + _bucket; + constructor(props, options) { + super(props, options); + this._bucket = createHAMT({ + hashFn: hamtHashFn, + bits: options.shardFanoutBits ?? DEFAULT_FANOUT_BITS + }); + } + async put(name4, value) { + this.cid = void 0; + this.size = void 0; + this.nodeSize = void 0; + await this._bucket.put(name4, value); + } + async get(name4) { + return this._bucket.get(name4); + } + childCount() { + return this._bucket.leafCount(); + } + directChildrenCount() { + return this._bucket.childrenCount(); + } + onlyChild() { + return this._bucket.onlyChild(); + } + async *eachChildSeries() { + for await (const { key, value } of this._bucket.eachLeafSeries()) { + yield { + key, + child: value + }; + } + } + estimateNodeSize() { + if (this.nodeSize !== void 0) { + return this.nodeSize; + } + this.nodeSize = calculateSize(this._bucket, this, this.options); + return this.nodeSize; + } + async *flush(blockstore) { + for await (const entry of flush(this._bucket, blockstore, this, this.options)) { + yield { + ...entry, + path: this.path + }; + } + } +}; +var dir_sharded_default = DirSharded; +async function* flush(bucket, blockstore, shardRoot, options) { + const children = bucket._children; + const padLength = (bucket.tableSize() - 1).toString(16).length; + const links = []; + let childrenSize = 0n; + for (let i = 0; i < children.length; i++) { + const child = children.get(i); + if (child == null) { + continue; + } + const labelPrefix = i.toString(16).toUpperCase().padStart(padLength, "0"); + if (child instanceof Bucket) { + let shard; + for await (const subShard of flush(child, blockstore, null, options)) { + shard = subShard; + } + if (shard == null) { + throw new Error("Could not flush sharded directory, no subshard found"); + } + links.push({ + Name: labelPrefix, + Tsize: Number(shard.size), + Hash: shard.cid + }); + childrenSize += shard.size; + } else if (isDir(child.value)) { + const dir2 = child.value; + let flushedDir; + for await (const entry of dir2.flush(blockstore)) { + flushedDir = entry; + yield flushedDir; + } + if (flushedDir == null) { + throw new Error("Did not flush dir"); + } + const label = labelPrefix + child.key; + links.push({ + Name: label, + Tsize: Number(flushedDir.size), + Hash: flushedDir.cid + }); + childrenSize += flushedDir.size; + } else { + const value = child.value; + if (value.cid == null) { + continue; + } + const label = labelPrefix + child.key; + const size2 = value.size; + links.push({ + Name: label, + Tsize: Number(size2), + Hash: value.cid + }); + childrenSize += BigInt(size2 ?? 0); + } + } + const data = Uint8Array.from(children.bitField().reverse()); + const dir = new UnixFS({ + type: "hamt-sharded-directory", + data, + fanout: BigInt(bucket.tableSize()), + hashType: HAMT_HASH_CODE, + mtime: shardRoot == null ? void 0 : shardRoot.mtime, + mode: shardRoot == null ? void 0 : shardRoot.mode + }); + const node = { + Data: dir.marshal(), + Links: links + }; + const buffer2 = encode7(prepare(node)); + const cid = await persist(buffer2, blockstore, options); + const size = BigInt(buffer2.byteLength) + childrenSize; + yield { + cid, + unixfs: dir, + size + }; +} +function isDir(obj) { + return typeof obj.flush === "function"; +} +function calculateSize(bucket, shardRoot, options) { + const children = bucket._children; + const padLength = (bucket.tableSize() - 1).toString(16).length; + const links = []; + for (let i = 0; i < children.length; i++) { + const child = children.get(i); + if (child == null) { + continue; + } + const labelPrefix = i.toString(16).toUpperCase().padStart(padLength, "0"); + if (child instanceof Bucket) { + const size = calculateSize(child, null, options); + links.push({ + Name: labelPrefix, + Tsize: Number(size), + Hash: options.cidVersion === 0 ? CID_V0 : CID_V1 + }); + } else if (typeof child.value.flush === "function") { + const dir2 = child.value; + const size = dir2.nodeSize(); + links.push({ + Name: labelPrefix + child.key, + Tsize: Number(size), + Hash: options.cidVersion === 0 ? CID_V0 : CID_V1 + }); + } else { + const value = child.value; + if (value.cid == null) { + continue; + } + const label = labelPrefix + child.key; + const size = value.size; + links.push({ + Name: label, + Tsize: Number(size), + Hash: value.cid + }); + } + } + const data = Uint8Array.from(children.bitField().reverse()); + const dir = new UnixFS({ + type: "hamt-sharded-directory", + data, + fanout: BigInt(bucket.tableSize()), + hashType: HAMT_HASH_CODE, + mtime: shardRoot == null ? void 0 : shardRoot.mtime, + mode: shardRoot == null ? void 0 : shardRoot.mode + }); + const buffer2 = encode7(prepare({ + Data: dir.marshal(), + Links: links + })); + return buffer2.length; +} + +// node_modules/ipfs-unixfs-importer/dist/src/flat-to-shard.js +async function flatToShard(child, dir, threshold, options) { + let newDir = dir; + if (dir instanceof DirFlat && dir.estimateNodeSize() > threshold) { + newDir = await convertToShard(dir, options); + } + const parent = newDir.parent; + if (parent != null) { + if (newDir !== dir) { + if (child != null) { + child.parent = newDir; + } + if (newDir.parentKey == null) { + throw new Error("No parent key found"); + } + await parent.put(newDir.parentKey, newDir); + } + return flatToShard(newDir, parent, threshold, options); + } + return newDir; +} +async function convertToShard(oldDir, options) { + const newDir = new dir_sharded_default({ + root: oldDir.root, + dir: true, + parent: oldDir.parent, + parentKey: oldDir.parentKey, + path: oldDir.path, + dirty: oldDir.dirty, + flat: false, + mtime: oldDir.mtime, + mode: oldDir.mode + }, options); + for await (const { key, child } of oldDir.eachChildSeries()) { + await newDir.put(key, child); + } + return newDir; +} + +// node_modules/ipfs-unixfs-importer/dist/src/utils/to-path-components.js +var toPathComponents = (path6 = "") => { + return path6.split(/(? 1) { + yield* flushAndYield(tree, block); + } else { + for await (const unwrapped of tree.eachChildSeries()) { + if (unwrapped == null) { + continue; + } + yield* flushAndYield(unwrapped.child, block); + } + } + }; +} + +// node_modules/ipfs-unixfs-importer/dist/src/index.js +async function* importer(source, blockstore, options = {}) { + let candidates; + if (Symbol.asyncIterator in source || Symbol.iterator in source) { + candidates = source; + } else { + candidates = [source]; + } + const wrapWithDirectory = options.wrapWithDirectory ?? false; + const shardSplitThresholdBytes = options.shardSplitThresholdBytes ?? 262144; + const shardFanoutBits = options.shardFanoutBits ?? 8; + const cidVersion = options.cidVersion ?? 1; + const rawLeaves = options.rawLeaves ?? true; + const leafType = options.leafType ?? "file"; + const fileImportConcurrency = options.fileImportConcurrency ?? 50; + const blockWriteConcurrency = options.blockWriteConcurrency ?? 10; + const reduceSingleLeafToSelf = options.reduceSingleLeafToSelf ?? true; + const chunker = options.chunker ?? fixedSize(); + const chunkValidator = options.chunkValidator ?? defaultChunkValidator(); + const buildDag = options.dagBuilder ?? defaultDagBuilder({ + chunker, + chunkValidator, + wrapWithDirectory, + layout: options.layout ?? balanced(), + bufferImporter: options.bufferImporter ?? defaultBufferImporter({ + cidVersion, + rawLeaves, + leafType, + onProgress: options.onProgress + }), + blockWriteConcurrency, + reduceSingleLeafToSelf, + cidVersion, + onProgress: options.onProgress + }); + const buildTree = options.treeBuilder ?? defaultTreeBuilder({ + wrapWithDirectory, + shardSplitThresholdBytes, + shardFanoutBits, + cidVersion, + onProgress: options.onProgress + }); + for await (const entry of buildTree(parallelBatch(buildDag(candidates, blockstore), fileImportConcurrency), blockstore)) { + yield { + cid: entry.cid, + path: entry.path, + unixfs: entry.unixfs, + size: entry.size + }; + } +} +async function importFile(content, blockstore, options = {}) { + const result = await src_default4(importer([content], blockstore, options)); + if (result == null) { + throw (0, import_err_code4.default)(new Error("Nothing imported"), "ERR_INVALID_PARAMS"); + } + return result; +} +async function importDirectory(content, blockstore, options = {}) { + const result = await src_default4(importer([content], blockstore, options)); + if (result == null) { + throw (0, import_err_code4.default)(new Error("Nothing imported"), "ERR_INVALID_PARAMS"); + } + return result; +} +async function importBytes(buf2, blockstore, options = {}) { + return importFile({ + content: buf2 + }, blockstore, options); +} +async function importByteStream(bufs, blockstore, options = {}) { + return importFile({ + content: bufs + }, blockstore, options); +} + +// node_modules/ipfs-unixfs-importer/dist/src/chunker/rabin.js +var import_err_code5 = __toESM(require_err_code(), 1); +var import_rabin_wasm = __toESM(require_src(), 1); + +// node_modules/@helia/unixfs/dist/src/commands/add.js +var defaultImporterSettings = { + cidVersion: 1, + rawLeaves: true, + layout: balanced({ + maxChildrenPerNode: 1024 + }), + chunker: fixedSize({ + chunkSize: 1048576 + }) +}; +async function* addAll(source, blockstore, options = {}) { + yield* importer(source, blockstore, { + ...defaultImporterSettings, + ...options + }); +} +async function addBytes(bytes, blockstore, options = {}) { + const { cid } = await importBytes(bytes, blockstore, { + ...defaultImporterSettings, + ...options + }); + return cid; +} +async function addByteStream(bytes, blockstore, options = {}) { + const { cid } = await importByteStream(bytes, blockstore, { + ...defaultImporterSettings, + ...options + }); + return cid; +} +async function addFile(file, blockstore, options = {}) { + const { cid } = await importFile(file, blockstore, { + ...defaultImporterSettings, + ...options + }); + return cid; +} +async function addDirectory(dir, blockstore, options = {}) { + const { cid } = await importDirectory({ + ...dir, + path: dir.path ?? "-" + }, blockstore, { + ...defaultImporterSettings, + ...options + }); + return cid; +} + +// node_modules/ipfs-unixfs-exporter/dist/src/index.js +var import_err_code15 = __toESM(require_err_code(), 1); + +// node_modules/it-last/dist/src/index.js +function isAsyncIterable6(thing) { + return thing[Symbol.asyncIterator] != null; +} +function last(source) { + if (isAsyncIterable6(source)) { + return (async () => { + let res2; + for await (const entry of source) { + res2 = entry; + } + return res2; + })(); + } + let res; + for (const entry of source) { + res = entry; + } + return res; +} +var src_default7 = last; + +// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/index.js +var import_err_code14 = __toESM(require_err_code(), 1); + +// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/dag-cbor.js +var import_err_code6 = __toESM(require_err_code(), 1); +var resolve = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => { + const block = await blockstore.get(cid, options); + const object = decode6(block); + let subObject = object; + let subPath = path6; + while (toResolve.length > 0) { + const prop = toResolve[0]; + if (prop in subObject) { + toResolve.shift(); + subPath = `${subPath}/${prop}`; + const subObjectCid = CID2.asCID(subObject[prop]); + if (subObjectCid != null) { + return { + entry: { + type: "object", + name: name4, + path: path6, + cid, + node: block, + depth, + size: BigInt(block.length), + content: async function* () { + yield object; + } + }, + next: { + cid: subObjectCid, + name: prop, + path: subPath, + toResolve + } + }; + } + subObject = subObject[prop]; + } else { + throw (0, import_err_code6.default)(new Error(`No property named ${prop} found in cbor node ${cid}`), "ERR_NO_PROP"); + } + } + return { + entry: { + type: "object", + name: name4, + path: path6, + cid, + node: block, + depth, + size: BigInt(block.length), + content: async function* () { + yield object; + } + } + }; +}; +var dag_cbor_default = resolve; + +// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/identity.js +var import_err_code8 = __toESM(require_err_code(), 1); + +// node_modules/ipfs-unixfs-exporter/dist/src/utils/extract-data-from-block.js +function extractDataFromBlock(block, blockStart, requestedStart, requestedEnd) { + const blockLength = BigInt(block.length); + const blockEnd = BigInt(blockStart + blockLength); + if (requestedStart >= blockEnd || requestedEnd < blockStart) { + return new Uint8Array(0); + } + if (requestedEnd >= blockStart && requestedEnd < blockEnd) { + block = block.subarray(0, Number(requestedEnd - blockStart)); + } + if (requestedStart >= blockStart && requestedStart < blockEnd) { + block = block.subarray(Number(requestedStart - blockStart)); + } + return block; +} +var extract_data_from_block_default = extractDataFromBlock; + +// node_modules/ipfs-unixfs-exporter/dist/src/utils/validate-offset-and-length.js +var import_err_code7 = __toESM(require_err_code(), 1); +var validateOffsetAndLength = (size, offset = 0, length4 = size) => { + const fileSize = BigInt(size); + const start = BigInt(offset ?? 0); + let end = BigInt(length4); + if (end !== fileSize) { + end = start + end; + } + if (end > fileSize) { + end = fileSize; + } + if (start < 0n) { + throw (0, import_err_code7.default)(new Error("Offset must be greater than or equal to 0"), "ERR_INVALID_PARAMS"); + } + if (start > fileSize) { + throw (0, import_err_code7.default)(new Error("Offset must be less than the file size"), "ERR_INVALID_PARAMS"); + } + if (end < 0n) { + throw (0, import_err_code7.default)(new Error("Length must be greater than or equal to 0"), "ERR_INVALID_PARAMS"); + } + if (end > fileSize) { + throw (0, import_err_code7.default)(new Error("Length must be less than the file size"), "ERR_INVALID_PARAMS"); + } + return { + start, + end + }; +}; +var validate_offset_and_length_default = validateOffsetAndLength; + +// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/identity.js +var rawContent = (node) => { + async function* contentGenerator(options = {}) { + var _a; + const { start, end } = validate_offset_and_length_default(node.length, options.offset, options.length); + const buf2 = extract_data_from_block_default(node, 0n, start, end); + (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:progress:identity", { + bytesRead: BigInt(buf2.byteLength), + totalBytes: end - start, + fileSize: BigInt(node.byteLength) + })); + yield buf2; + } + return contentGenerator; +}; +var resolve2 = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => { + if (toResolve.length > 0) { + throw (0, import_err_code8.default)(new Error(`No link named ${path6} found in raw node ${cid}`), "ERR_NOT_FOUND"); + } + const buf2 = decode10(cid.multihash.bytes); + return { + entry: { + type: "identity", + name: name4, + path: path6, + cid, + content: rawContent(buf2.digest), + depth, + size: BigInt(buf2.digest.length), + node: buf2.digest + } + }; +}; +var identity_default = resolve2; + +// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/raw.js +var import_err_code9 = __toESM(require_err_code(), 1); +var rawContent2 = (node) => { + async function* contentGenerator(options = {}) { + var _a; + const { start, end } = validate_offset_and_length_default(node.length, options.offset, options.length); + const buf2 = extract_data_from_block_default(node, 0n, start, end); + (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:progress:raw", { + bytesRead: BigInt(buf2.byteLength), + totalBytes: end - start, + fileSize: BigInt(node.byteLength) + })); + yield buf2; + } + return contentGenerator; +}; +var resolve3 = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => { + if (toResolve.length > 0) { + throw (0, import_err_code9.default)(new Error(`No link named ${path6} found in raw node ${cid}`), "ERR_NOT_FOUND"); + } + const block = await blockstore.get(cid, options); + return { + entry: { + type: "raw", + name: name4, + path: path6, + cid, + content: rawContent2(block), + depth, + size: BigInt(block.length), + node: block + } + }; +}; +var raw_default = resolve3; + +// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/index.js +var import_err_code13 = __toESM(require_err_code(), 1); + +// node_modules/ipfs-unixfs-exporter/dist/src/utils/find-cid-in-shard.js +var import_err_code10 = __toESM(require_err_code(), 1); +var hashFn = async function(buf2) { + return (await murmur3128.encode(buf2)).slice(0, 8).reverse(); +}; +var addLinksToHamtBucket = async (links, bucket, rootBucket) => { + const padLength = (bucket.tableSize() - 1).toString(16).length; + await Promise.all(links.map(async (link) => { + if (link.Name == null) { + throw new Error("Unexpected Link without a Name"); + } + if (link.Name.length === padLength) { + const pos = parseInt(link.Name, 16); + bucket._putObjectAt(pos, new Bucket({ + hash: rootBucket._options.hash, + bits: rootBucket._options.bits + }, bucket, pos)); + return; + } + await rootBucket.put(link.Name.substring(2), true); + })); +}; +var toPrefix = (position, padLength) => { + return position.toString(16).toUpperCase().padStart(padLength, "0").substring(0, padLength); +}; +var toBucketPath = (position) => { + let bucket = position.bucket; + const path6 = []; + while (bucket._parent != null) { + path6.push(bucket); + bucket = bucket._parent; + } + path6.push(bucket); + return path6.reverse(); +}; +var findShardCid = async (node, name4, blockstore, context, options) => { + if (context == null) { + if (node.Data == null) { + throw (0, import_err_code10.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS"); + } + let dir; + try { + dir = UnixFS.unmarshal(node.Data); + } catch (err) { + throw (0, import_err_code10.default)(err, "ERR_NOT_UNIXFS"); + } + if (dir.type !== "hamt-sharded-directory") { + throw (0, import_err_code10.default)(new Error("not a HAMT"), "ERR_NOT_UNIXFS"); + } + if (dir.fanout == null) { + throw (0, import_err_code10.default)(new Error("missing fanout"), "ERR_NOT_UNIXFS"); + } + const rootBucket = createHAMT({ + hashFn, + bits: Math.log2(Number(dir.fanout)) + }); + context = { + rootBucket, + hamtDepth: 1, + lastBucket: rootBucket + }; + } + const padLength = (context.lastBucket.tableSize() - 1).toString(16).length; + await addLinksToHamtBucket(node.Links, context.lastBucket, context.rootBucket); + const position = await context.rootBucket._findNewBucketAndPos(name4); + let prefix = toPrefix(position.pos, padLength); + const bucketPath = toBucketPath(position); + if (bucketPath.length > context.hamtDepth) { + context.lastBucket = bucketPath[context.hamtDepth]; + prefix = toPrefix(context.lastBucket._posAtParent, padLength); + } + const link = node.Links.find((link2) => { + if (link2.Name == null) { + return false; + } + const entryPrefix = link2.Name.substring(0, padLength); + const entryName = link2.Name.substring(padLength); + if (entryPrefix !== prefix) { + return false; + } + if (entryName !== "" && entryName !== name4) { + return false; + } + return true; + }); + if (link == null) { + return; + } + if (link.Name != null && link.Name.substring(padLength) === name4) { + return link.Hash; + } + context.hamtDepth++; + const block = await blockstore.get(link.Hash, options); + node = decode11(block); + return findShardCid(node, name4, blockstore, context, options); +}; +var find_cid_in_shard_default = findShardCid; + +// node_modules/it-filter/dist/src/index.js +function isAsyncIterable7(thing) { + return thing[Symbol.asyncIterator] != null; +} +function filter(source, fn) { + if (isAsyncIterable7(source)) { + return async function* () { + for await (const entry of source) { + if (await fn(entry)) { + yield entry; + } + } + }(); + } + const peekable2 = src_default2(source); + const { value, done } = peekable2.next(); + if (done === true) { + return function* () { + }(); + } + const res = fn(value); + if (typeof res.then === "function") { + return async function* () { + if (await res) { + yield value; + } + for await (const entry of peekable2) { + if (await fn(entry)) { + yield entry; + } + } + }(); + } + const func = fn; + return function* () { + if (res === true) { + yield value; + } + for (const entry of peekable2) { + if (func(entry)) { + yield entry; + } + } + }(); +} +var src_default8 = filter; + +// node_modules/it-parallel/dist/src/index.js +var CustomEvent = globalThis.CustomEvent ?? Event; +async function* parallel(source, options = {}) { + let concurrency = options.concurrency ?? Infinity; + if (concurrency < 1) { + concurrency = Infinity; + } + const ordered = options.ordered == null ? false : options.ordered; + const emitter = new EventTarget(); + const ops = []; + let slotAvailable = pDefer(); + let resultAvailable = pDefer(); + let sourceFinished = false; + let sourceErr; + let opErred = false; + emitter.addEventListener("task-complete", () => { + resultAvailable.resolve(); + }); + void Promise.resolve().then(async () => { + try { + for await (const task of source) { + if (ops.length === concurrency) { + slotAvailable = pDefer(); + await slotAvailable.promise; + } + if (opErred) { + break; + } + const op = { + done: false + }; + ops.push(op); + task().then((result) => { + op.done = true; + op.ok = true; + op.value = result; + emitter.dispatchEvent(new CustomEvent("task-complete")); + }, (err) => { + op.done = true; + op.err = err; + emitter.dispatchEvent(new CustomEvent("task-complete")); + }); + } + sourceFinished = true; + emitter.dispatchEvent(new CustomEvent("task-complete")); + } catch (err) { + sourceErr = err; + emitter.dispatchEvent(new CustomEvent("task-complete")); + } + }); + function valuesAvailable() { + var _a; + if (ordered) { + return (_a = ops[0]) == null ? void 0 : _a.done; + } + return Boolean(ops.find((op) => op.done)); + } + function* yieldOrderedValues() { + while (ops.length > 0 && ops[0].done) { + const op = ops[0]; + ops.shift(); + if (op.ok) { + yield op.value; + } else { + opErred = true; + slotAvailable.resolve(); + throw op.err; + } + slotAvailable.resolve(); + } + } + function* yieldUnOrderedValues() { + while (valuesAvailable()) { + for (let i = 0; i < ops.length; i++) { + if (ops[i].done) { + const op = ops[i]; + ops.splice(i, 1); + i--; + if (op.ok) { + yield op.value; + } else { + opErred = true; + slotAvailable.resolve(); + throw op.err; + } + slotAvailable.resolve(); + } + } + } + } + while (true) { + if (!valuesAvailable()) { + resultAvailable = pDefer(); + await resultAvailable.promise; + } + if (sourceErr != null) { + throw sourceErr; + } + if (ordered) { + yield* yieldOrderedValues(); + } else { + yield* yieldUnOrderedValues(); + } + if (sourceFinished && ops.length === 0) { + break; + } + } +} + +// node_modules/it-pushable/dist/src/fifo.js +var FixedFIFO = class { + buffer; + mask; + top; + btm; + next; + constructor(hwm) { + if (!(hwm > 0) || (hwm - 1 & hwm) !== 0) { + throw new Error("Max size for a FixedFIFO should be a power of two"); + } + this.buffer = new Array(hwm); + this.mask = hwm - 1; + this.top = 0; + this.btm = 0; + this.next = null; + } + push(data) { + if (this.buffer[this.top] !== void 0) { + return false; + } + this.buffer[this.top] = data; + this.top = this.top + 1 & this.mask; + return true; + } + shift() { + const last2 = this.buffer[this.btm]; + if (last2 === void 0) { + return void 0; + } + this.buffer[this.btm] = void 0; + this.btm = this.btm + 1 & this.mask; + return last2; + } + isEmpty() { + return this.buffer[this.btm] === void 0; + } +}; +var FIFO = class { + size; + hwm; + head; + tail; + constructor(options = {}) { + this.hwm = options.splitLimit ?? 16; + this.head = new FixedFIFO(this.hwm); + this.tail = this.head; + this.size = 0; + } + calculateSize(obj) { + if ((obj == null ? void 0 : obj.byteLength) != null) { + return obj.byteLength; + } + return 1; + } + push(val) { + if ((val == null ? void 0 : val.value) != null) { + this.size += this.calculateSize(val.value); + } + if (!this.head.push(val)) { + const prev = this.head; + this.head = prev.next = new FixedFIFO(2 * this.head.buffer.length); + this.head.push(val); + } + } + shift() { + let val = this.tail.shift(); + if (val === void 0 && this.tail.next != null) { + const next = this.tail.next; + this.tail.next = null; + this.tail = next; + val = this.tail.shift(); + } + if ((val == null ? void 0 : val.value) != null) { + this.size -= this.calculateSize(val.value); + } + return val; + } + isEmpty() { + return this.head.isEmpty(); + } +}; + +// node_modules/it-pushable/dist/src/index.js +var AbortError3 = class extends Error { + type; + code; + constructor(message2, code5) { + super(message2 ?? "The operation was aborted"); + this.type = "aborted"; + this.code = code5 ?? "ABORT_ERR"; + } +}; +function pushable(options = {}) { + const getNext = (buffer2) => { + const next = buffer2.shift(); + if (next == null) { + return { done: true }; + } + if (next.error != null) { + throw next.error; + } + return { + done: next.done === true, + // @ts-expect-error if done is false, value will be present + value: next.value + }; + }; + return _pushable(getNext, options); +} +function _pushable(getNext, options) { + options = options ?? {}; + let onEnd = options.onEnd; + let buffer2 = new FIFO(); + let pushable2; + let onNext; + let ended; + let drain2 = pDefer(); + const waitNext = async () => { + try { + if (!buffer2.isEmpty()) { + return getNext(buffer2); + } + if (ended) { + return { done: true }; + } + return await new Promise((resolve6, reject) => { + onNext = (next) => { + onNext = null; + buffer2.push(next); + try { + resolve6(getNext(buffer2)); + } catch (err) { + reject(err); + } + return pushable2; + }; + }); + } finally { + if (buffer2.isEmpty()) { + queueMicrotask(() => { + drain2.resolve(); + drain2 = pDefer(); + }); + } + } + }; + const bufferNext = (next) => { + if (onNext != null) { + return onNext(next); + } + buffer2.push(next); + return pushable2; + }; + const bufferError = (err) => { + buffer2 = new FIFO(); + if (onNext != null) { + return onNext({ error: err }); + } + buffer2.push({ error: err }); + return pushable2; + }; + const push = (value) => { + if (ended) { + return pushable2; + } + if ((options == null ? void 0 : options.objectMode) !== true && (value == null ? void 0 : value.byteLength) == null) { + throw new Error("objectMode was not true but tried to push non-Uint8Array value"); + } + return bufferNext({ done: false, value }); + }; + const end = (err) => { + if (ended) + return pushable2; + ended = true; + return err != null ? bufferError(err) : bufferNext({ done: true }); + }; + const _return = () => { + buffer2 = new FIFO(); + end(); + return { done: true }; + }; + const _throw = (err) => { + end(err); + return { done: true }; + }; + pushable2 = { + [Symbol.asyncIterator]() { + return this; + }, + next: waitNext, + return: _return, + throw: _throw, + push, + end, + get readableLength() { + return buffer2.size; + }, + onEmpty: async (options2) => { + const signal = options2 == null ? void 0 : options2.signal; + signal == null ? void 0 : signal.throwIfAborted(); + if (buffer2.isEmpty()) { + return; + } + let cancel; + let listener; + if (signal != null) { + cancel = new Promise((resolve6, reject) => { + listener = () => { + reject(new AbortError3()); + }; + signal.addEventListener("abort", listener); + }); + } + try { + await Promise.race([ + drain2.promise, + cancel + ]); + } finally { + if (listener != null && signal != null) { + signal == null ? void 0 : signal.removeEventListener("abort", listener); + } + } + } + }; + if (onEnd == null) { + return pushable2; + } + const _pushable2 = pushable2; + pushable2 = { + [Symbol.asyncIterator]() { + return this; + }, + next() { + return _pushable2.next(); + }, + throw(err) { + _pushable2.throw(err); + if (onEnd != null) { + onEnd(err); + onEnd = void 0; + } + return { done: true }; + }, + return() { + _pushable2.return(); + if (onEnd != null) { + onEnd(); + onEnd = void 0; + } + return { done: true }; + }, + push, + end(err) { + _pushable2.end(err); + if (onEnd != null) { + onEnd(err); + onEnd = void 0; + } + return pushable2; + }, + get readableLength() { + return _pushable2.readableLength; + }, + onEmpty: (opts) => { + return _pushable2.onEmpty(opts); + } + }; + return pushable2; +} + +// node_modules/it-merge/dist/src/index.js +function isAsyncIterable8(thing) { + return thing[Symbol.asyncIterator] != null; +} +function merge(...sources) { + const syncSources = []; + for (const source of sources) { + if (!isAsyncIterable8(source)) { + syncSources.push(source); + } + } + if (syncSources.length === sources.length) { + return function* () { + for (const source of syncSources) { + yield* source; + } + }(); + } + return async function* () { + const output = pushable({ + objectMode: true + }); + void Promise.resolve().then(async () => { + try { + await Promise.all(sources.map(async (source) => { + for await (const item of source) { + output.push(item); + } + })); + output.end(); + } catch (err) { + output.end(err); + } + }); + yield* output; + }(); +} +var src_default9 = merge; + +// node_modules/it-pipe/dist/src/index.js +function pipe(first2, ...rest) { + if (first2 == null) { + throw new Error("Empty pipeline"); + } + if (isDuplex(first2)) { + const duplex = first2; + first2 = () => duplex.source; + } else if (isIterable2(first2) || isAsyncIterable9(first2)) { + const source = first2; + first2 = () => source; + } + const fns = [first2, ...rest]; + if (fns.length > 1) { + if (isDuplex(fns[fns.length - 1])) { + fns[fns.length - 1] = fns[fns.length - 1].sink; + } + } + if (fns.length > 2) { + for (let i = 1; i < fns.length - 1; i++) { + if (isDuplex(fns[i])) { + fns[i] = duplexPipelineFn(fns[i]); + } + } + } + return rawPipe(...fns); +} +var rawPipe = (...fns) => { + let res; + while (fns.length > 0) { + res = fns.shift()(res); + } + return res; +}; +var isAsyncIterable9 = (obj) => { + return (obj == null ? void 0 : obj[Symbol.asyncIterator]) != null; +}; +var isIterable2 = (obj) => { + return (obj == null ? void 0 : obj[Symbol.iterator]) != null; +}; +var isDuplex = (obj) => { + if (obj == null) { + return false; + } + return obj.sink != null && obj.source != null; +}; +var duplexPipelineFn = (duplex) => { + return (source) => { + const p = duplex.sink(source); + if ((p == null ? void 0 : p.then) != null) { + const stream = pushable({ + objectMode: true + }); + p.then(() => { + stream.end(); + }, (err) => { + stream.end(err); + }); + let sourceWrap; + const source2 = duplex.source; + if (isAsyncIterable9(source2)) { + sourceWrap = async function* () { + yield* source2; + stream.end(); + }; + } else if (isIterable2(source2)) { + sourceWrap = function* () { + yield* source2; + stream.end(); + }; + } else { + throw new Error("Unknown duplex source type - must be Iterable or AsyncIterable"); + } + return src_default9(stream, sourceWrap()); + } + return duplex.source; + }; +}; + +// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/directory.js +var directoryContent = (cid, node, unixfs2, path6, resolve6, depth, blockstore) => { + async function* yieldDirectoryContent(options = {}) { + var _a; + const offset = options.offset ?? 0; + const length4 = options.length ?? node.Links.length; + const links = node.Links.slice(offset, length4); + (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:directory", { + cid + })); + yield* pipe(links, (source) => src_default3(source, (link) => { + return async () => { + const linkName = link.Name ?? ""; + const linkPath = `${path6}/${linkName}`; + const result = await resolve6(link.Hash, linkName, linkPath, [], depth + 1, blockstore, options); + return result.entry; + }; + }), (source) => parallel(source, { ordered: true }), (source) => src_default8(source, (entry) => entry != null)); + } + return yieldDirectoryContent; +}; +var directory_default = directoryContent; + +// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/file.js +var import_err_code11 = __toESM(require_err_code(), 1); +async function walkDAG(blockstore, node, queue, streamPosition, start, end, options) { + if (node instanceof Uint8Array) { + const buf2 = extract_data_from_block_default(node, streamPosition, start, end); + queue.push(buf2); + return; + } + if (node.Data == null) { + throw (0, import_err_code11.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS"); + } + let file; + try { + file = UnixFS.unmarshal(node.Data); + } catch (err) { + throw (0, import_err_code11.default)(err, "ERR_NOT_UNIXFS"); + } + if (file.data != null) { + const data = file.data; + const buf2 = extract_data_from_block_default(data, streamPosition, start, end); + queue.push(buf2); + streamPosition += BigInt(buf2.byteLength); + } + const childOps = []; + if (node.Links.length !== file.blockSizes.length) { + throw (0, import_err_code11.default)(new Error("Inconsistent block sizes and dag links"), "ERR_NOT_UNIXFS"); + } + for (let i = 0; i < node.Links.length; i++) { + const childLink = node.Links[i]; + const childStart = streamPosition; + const childEnd = childStart + file.blockSizes[i]; + if (start >= childStart && start < childEnd || // child has offset byte + end >= childStart && end <= childEnd || // child has end byte + start < childStart && end > childEnd) { + childOps.push({ + link: childLink, + blockStart: streamPosition + }); + } + streamPosition = childEnd; + if (streamPosition > end) { + break; + } + } + await pipe(childOps, (source) => src_default3(source, (op) => { + return async () => { + const block = await blockstore.get(op.link.Hash, options); + return { + ...op, + block + }; + }; + }), (source) => parallel(source, { + ordered: true + }), async (source) => { + for await (const { link, block, blockStart } of source) { + let child; + switch (link.Hash.code) { + case code2: + child = decode11(block); + break; + case code3: + child = block; + break; + default: + queue.end((0, import_err_code11.default)(new Error(`Unsupported codec: ${link.Hash.code}`), "ERR_NOT_UNIXFS")); + return; + } + const childQueue = new dist_default({ + concurrency: 1 + }); + childQueue.on("error", (error) => { + queue.end(error); + }); + void childQueue.add(async () => { + var _a; + (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:file", { + cid: link.Hash + })); + await walkDAG(blockstore, child, queue, blockStart, start, end, options); + }); + await childQueue.onIdle(); + } + }); + if (streamPosition >= end) { + queue.end(); + } +} +var fileContent = (cid, node, unixfs2, path6, resolve6, depth, blockstore) => { + async function* yieldFileContent(options = {}) { + var _a, _b; + const fileSize = unixfs2.fileSize(); + if (fileSize === void 0) { + throw new Error("File was a directory"); + } + const { start, end } = validate_offset_and_length_default(fileSize, options.offset, options.length); + if (end === 0n) { + return; + } + let read4 = 0n; + const wanted = end - start; + const queue = pushable(); + (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:file", { + cid + })); + void walkDAG(blockstore, node, queue, 0n, start, end, options).catch((err) => { + queue.end(err); + }); + for await (const buf2 of queue) { + if (buf2 == null) { + continue; + } + read4 += BigInt(buf2.byteLength); + if (read4 > wanted) { + queue.end(); + throw (0, import_err_code11.default)(new Error("Read too many bytes - the file size reported by the UnixFS data in the root node may be incorrect"), "ERR_OVER_READ"); + } + if (read4 === wanted) { + queue.end(); + } + (_b = options.onProgress) == null ? void 0 : _b.call(options, new CustomProgressEvent("unixfs:exporter:progress:unixfs:file", { + bytesRead: read4, + totalBytes: wanted, + fileSize + })); + yield buf2; + } + if (read4 < wanted) { + throw (0, import_err_code11.default)(new Error("Traversed entire DAG but did not read enough bytes"), "ERR_UNDER_READ"); + } + } + return yieldFileContent; +}; +var file_default = fileContent; + +// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js +var import_err_code12 = __toESM(require_err_code(), 1); +var hamtShardedDirectoryContent = (cid, node, unixfs2, path6, resolve6, depth, blockstore) => { + function yieldHamtDirectoryContent(options = {}) { + var _a; + (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:hamt-sharded-directory", { + cid + })); + return listDirectory(node, path6, resolve6, depth, blockstore, options); + } + return yieldHamtDirectoryContent; +}; +async function* listDirectory(node, path6, resolve6, depth, blockstore, options) { + const links = node.Links; + if (node.Data == null) { + throw (0, import_err_code12.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS"); + } + let dir; + try { + dir = UnixFS.unmarshal(node.Data); + } catch (err) { + throw (0, import_err_code12.default)(err, "ERR_NOT_UNIXFS"); + } + if (dir.fanout == null) { + throw (0, import_err_code12.default)(new Error("missing fanout"), "ERR_NOT_UNIXFS"); + } + const padLength = (dir.fanout - 1n).toString(16).length; + const results = pipe(links, (source) => src_default3(source, (link) => { + return async () => { + var _a; + const name4 = link.Name != null ? link.Name.substring(padLength) : null; + if (name4 != null && name4 !== "") { + const result = await resolve6(link.Hash, name4, `${path6}/${name4}`, [], depth + 1, blockstore, options); + return { entries: result.entry == null ? [] : [result.entry] }; + } else { + const block = await blockstore.get(link.Hash, options); + node = decode11(block); + (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:hamt-sharded-directory", { + cid: link.Hash + })); + return { entries: listDirectory(node, path6, resolve6, depth, blockstore, options) }; + } + }; + }), (source) => parallel(source, { ordered: true })); + for await (const { entries } of results) { + yield* entries; + } +} +var hamt_sharded_directory_default = hamtShardedDirectoryContent; + +// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/index.js +var findLinkCid = (node, name4) => { + const link = node.Links.find((link2) => link2.Name === name4); + return link == null ? void 0 : link.Hash; +}; +var contentExporters = { + raw: file_default, + file: file_default, + directory: directory_default, + "hamt-sharded-directory": hamt_sharded_directory_default, + metadata: (cid, node, unixfs2, path6, resolve6, depth, blockstore) => { + return () => []; + }, + symlink: (cid, node, unixfs2, path6, resolve6, depth, blockstore) => { + return () => []; + } +}; +var unixFsResolver = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => { + const block = await blockstore.get(cid, options); + const node = decode11(block); + let unixfs2; + let next; + if (name4 == null) { + name4 = cid.toString(); + } + if (node.Data == null) { + throw (0, import_err_code13.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS"); + } + try { + unixfs2 = UnixFS.unmarshal(node.Data); + } catch (err) { + throw (0, import_err_code13.default)(err, "ERR_NOT_UNIXFS"); + } + if (path6 == null) { + path6 = name4; + } + if (toResolve.length > 0) { + let linkCid; + if ((unixfs2 == null ? void 0 : unixfs2.type) === "hamt-sharded-directory") { + linkCid = await find_cid_in_shard_default(node, toResolve[0], blockstore); + } else { + linkCid = findLinkCid(node, toResolve[0]); + } + if (linkCid == null) { + throw (0, import_err_code13.default)(new Error("file does not exist"), "ERR_NOT_FOUND"); + } + const nextName = toResolve.shift(); + const nextPath = `${path6}/${nextName}`; + next = { + cid: linkCid, + toResolve, + name: nextName ?? "", + path: nextPath + }; + } + const content = contentExporters[unixfs2.type](cid, node, unixfs2, path6, resolve6, depth, blockstore); + if (content == null) { + throw (0, import_err_code13.default)(new Error("could not find content exporter"), "ERR_NOT_FOUND"); + } + if (unixfs2.isDirectory()) { + return { + entry: { + type: "directory", + name: name4, + path: path6, + cid, + content, + unixfs: unixfs2, + depth, + node, + size: unixfs2.fileSize() + }, + next + }; + } + return { + entry: { + type: "file", + name: name4, + path: path6, + cid, + content, + unixfs: unixfs2, + depth, + node, + size: unixfs2.fileSize() + }, + next + }; +}; +var unixfs_v1_default = unixFsResolver; + +// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/index.js +var resolvers = { + [code2]: unixfs_v1_default, + [code3]: raw_default, + [code]: dag_cbor_default, + [identity2.code]: identity_default +}; +var resolve4 = async (cid, name4, path6, toResolve, depth, blockstore, options) => { + const resolver = resolvers[cid.code]; + if (resolver == null) { + throw (0, import_err_code14.default)(new Error(`No resolver for code ${cid.code}`), "ERR_NO_RESOLVER"); + } + return resolver(cid, name4, path6, toResolve, resolve4, depth, blockstore, options); +}; +var resolvers_default = resolve4; + +// node_modules/ipfs-unixfs-exporter/dist/src/index.js +var toPathComponents2 = (path6 = "") => { + return (path6.trim().match(/([^\\^/]|\\\/)+/g) ?? []).filter(Boolean); +}; +var cidAndRest = (path6) => { + if (path6 instanceof Uint8Array) { + return { + cid: CID2.decode(path6), + toResolve: [] + }; + } + const cid = CID2.asCID(path6); + if (cid != null) { + return { + cid, + toResolve: [] + }; + } + if (typeof path6 === "string") { + if (path6.indexOf("/ipfs/") === 0) { + path6 = path6.substring(6); + } + const output = toPathComponents2(path6); + return { + cid: CID2.parse(output[0]), + toResolve: output.slice(1) + }; + } + throw (0, import_err_code15.default)(new Error(`Unknown path type ${path6}`), "ERR_BAD_PATH"); +}; +async function* walkPath(path6, blockstore, options = {}) { + let { cid, toResolve } = cidAndRest(path6); + let name4 = cid.toString(); + let entryPath = name4; + const startingDepth = toResolve.length; + while (true) { + const result = await resolvers_default(cid, name4, entryPath, toResolve, startingDepth, blockstore, options); + if (result.entry == null && result.next == null) { + throw (0, import_err_code15.default)(new Error(`Could not resolve ${path6}`), "ERR_NOT_FOUND"); + } + if (result.entry != null) { + yield result.entry; + } + if (result.next == null) { + return; + } + toResolve = result.next.toResolve; + cid = result.next.cid; + name4 = result.next.name; + entryPath = result.next.path; + } +} +async function exporter(path6, blockstore, options = {}) { + const result = await src_default7(walkPath(path6, blockstore, options)); + if (result == null) { + throw (0, import_err_code15.default)(new Error(`Could not resolve ${path6}`), "ERR_NOT_FOUND"); + } + return result; +} +async function* recursive(path6, blockstore, options = {}) { + const node = await exporter(path6, blockstore, options); + if (node == null) { + return; + } + yield node; + if (node.type === "directory") { + for await (const child of recurse(node, options)) { + yield child; + } + } + async function* recurse(node2, options2) { + for await (const file of node2.content(options2)) { + yield file; + if (file instanceof Uint8Array) { + continue; + } + if (file.type === "directory") { + yield* recurse(file, options2); + } + } + } +} + +// node_modules/merge-options/index.mjs +var import_index3 = __toESM(require_merge_options(), 1); +var merge_options_default = import_index3.default; + +// node_modules/@helia/unixfs/dist/src/errors.js +var UnixFSError = class extends Error { + name; + code; + constructor(message2, name4, code5) { + super(message2); + this.name = name4; + this.code = code5; + } +}; +var NotUnixFSError = class extends UnixFSError { + constructor(message2 = "not a Unixfs node") { + super(message2, "NotUnixFSError", "ERR_NOT_UNIXFS"); + } +}; +var InvalidPBNodeError = class extends UnixFSError { + constructor(message2 = "invalid PBNode") { + super(message2, "InvalidPBNodeError", "ERR_INVALID_PBNODE"); + } +}; +var UnknownError = class extends UnixFSError { + constructor(message2 = "unknown error") { + super(message2, "InvalidPBNodeError", "ERR_UNKNOWN_ERROR"); + } +}; +var AlreadyExistsError = class extends UnixFSError { + constructor(message2 = "path already exists") { + super(message2, "AlreadyExistsError", "ERR_ALREADY_EXISTS"); + } +}; +var DoesNotExistError = class extends UnixFSError { + constructor(message2 = "path does not exist") { + super(message2, "DoesNotExistError", "ERR_DOES_NOT_EXIST"); + } +}; +var NoContentError = class extends UnixFSError { + constructor(message2 = "no content") { + super(message2, "NoContentError", "ERR_NO_CONTENT"); + } +}; +var NotAFileError = class extends UnixFSError { + constructor(message2 = "not a file") { + super(message2, "NotAFileError", "ERR_NOT_A_FILE"); + } +}; +var NotADirectoryError = class extends UnixFSError { + constructor(message2 = "not a directory") { + super(message2, "NotADirectoryError", "ERR_NOT_A_DIRECTORY"); + } +}; +var InvalidParametersError = class extends UnixFSError { + constructor(message2 = "invalid parameters") { + super(message2, "InvalidParametersError", "ERR_INVALID_PARAMETERS"); + } +}; + +// node_modules/@libp2p/logger/dist/src/index.js +var import_debug = __toESM(require_src2(), 1); +import_debug.default.formatters.b = (v) => { + return v == null ? "undefined" : base58btc2.baseEncode(v); +}; +import_debug.default.formatters.t = (v) => { + return v == null ? "undefined" : base322.baseEncode(v); +}; +import_debug.default.formatters.m = (v) => { + return v == null ? "undefined" : base64.baseEncode(v); +}; +import_debug.default.formatters.p = (v) => { + return v == null ? "undefined" : v.toString(); +}; +import_debug.default.formatters.c = (v) => { + return v == null ? "undefined" : v.toString(); +}; +import_debug.default.formatters.k = (v) => { + return v == null ? "undefined" : v.toString(); +}; +import_debug.default.formatters.a = (v) => { + return v == null ? "undefined" : v.toString(); +}; +function createDisabledLogger(namespace) { + const logger3 = () => { + }; + logger3.enabled = false; + logger3.color = ""; + logger3.diff = 0; + logger3.log = () => { + }; + logger3.namespace = namespace; + logger3.destroy = () => true; + logger3.extend = () => logger3; + return logger3; +} +function logger(name4) { + let trace = createDisabledLogger(`${name4}:trace`); + if (import_debug.default.enabled(`${name4}:trace`) && import_debug.default.names.map((r) => r.toString()).find((n) => n.includes(":trace")) != null) { + trace = (0, import_debug.default)(`${name4}:trace`); + } + return Object.assign((0, import_debug.default)(name4), { + error: (0, import_debug.default)(`${name4}:error`), + trace + }); +} + +// node_modules/@helia/unixfs/dist/src/commands/utils/add-link.js +var import_sparse_array3 = __toESM(require_sparse_array(), 1); + +// node_modules/@helia/unixfs/dist/src/commands/utils/consumable-hash.js +function wrapHash2(hashFn2) { + function hashing(value) { + if (value instanceof InfiniteHash2) { + return value; + } else { + return new InfiniteHash2(value, hashFn2); + } + } + return hashing; +} +var InfiniteHash2 = class { + _value; + _hashFn; + _depth; + _availableBits; + _currentBufferIndex; + _buffers; + constructor(value, hashFn2) { + if (!(value instanceof Uint8Array)) { + throw new Error("can only hash Uint8Arrays"); + } + this._value = value; + this._hashFn = hashFn2; + this._depth = -1; + this._availableBits = 0; + this._currentBufferIndex = 0; + this._buffers = []; + } + async take(bits) { + let pendingBits = bits; + while (this._availableBits < pendingBits) { + await this._produceMoreBits(); + } + let result = 0; + while (pendingBits > 0) { + const hash = this._buffers[this._currentBufferIndex]; + const available = Math.min(hash.availableBits(), pendingBits); + const took = hash.take(available); + result = (result << available) + took; + pendingBits -= available; + this._availableBits -= available; + if (hash.availableBits() === 0) { + this._currentBufferIndex++; + } + } + return result; + } + untake(bits) { + let pendingBits = bits; + while (pendingBits > 0) { + const hash = this._buffers[this._currentBufferIndex]; + const availableForUntake = Math.min(hash.totalBits() - hash.availableBits(), pendingBits); + hash.untake(availableForUntake); + pendingBits -= availableForUntake; + this._availableBits += availableForUntake; + if (this._currentBufferIndex > 0 && hash.totalBits() === hash.availableBits()) { + this._depth--; + this._currentBufferIndex--; + } + } + } + async _produceMoreBits() { + this._depth++; + const value = this._depth > 0 ? concat2([this._value, Uint8Array.from([this._depth])]) : this._value; + const hashValue = await this._hashFn(value); + const buffer2 = new ConsumableBuffer2(hashValue); + this._buffers.push(buffer2); + this._availableBits += buffer2.availableBits(); + } +}; +var START_MASKS2 = [ + 255, + 254, + 252, + 248, + 240, + 224, + 192, + 128 +]; +var STOP_MASKS2 = [ + 1, + 3, + 7, + 15, + 31, + 63, + 127, + 255 +]; +var ConsumableBuffer2 = class { + _value; + _currentBytePos; + _currentBitPos; + constructor(value) { + this._value = value; + this._currentBytePos = value.length - 1; + this._currentBitPos = 7; + } + availableBits() { + return this._currentBitPos + 1 + this._currentBytePos * 8; + } + totalBits() { + return this._value.length * 8; + } + take(bits) { + let pendingBits = bits; + let result = 0; + while (pendingBits > 0 && this._haveBits()) { + const byte = this._value[this._currentBytePos]; + const availableBits = this._currentBitPos + 1; + const taking = Math.min(availableBits, pendingBits); + const value = byteBitsToInt2(byte, availableBits - taking, taking); + result = (result << taking) + value; + pendingBits -= taking; + this._currentBitPos -= taking; + if (this._currentBitPos < 0) { + this._currentBitPos = 7; + this._currentBytePos--; + } + } + return result; + } + untake(bits) { + this._currentBitPos += bits; + while (this._currentBitPos > 7) { + this._currentBitPos -= 8; + this._currentBytePos += 1; + } + } + _haveBits() { + return this._currentBytePos >= 0; + } +}; +function byteBitsToInt2(byte, start, length4) { + const mask = maskFor2(start, length4); + return (byte & mask) >>> start; +} +function maskFor2(start, length4) { + return START_MASKS2[start] & STOP_MASKS2[Math.min(length4 + start - 1, 7)]; +} + +// node_modules/@helia/unixfs/dist/src/commands/utils/hamt-constants.js +var hamtHashCode = BigInt(murmur3128.code); +var hamtBucketBits = 8; +async function hamtHashFn2(buf2) { + return (await murmur3128.encode(buf2)).subarray(0, 8).reverse(); +} + +// node_modules/@helia/unixfs/dist/src/commands/utils/hamt-utils.js +var import_sparse_array2 = __toESM(require_sparse_array(), 1); + +// node_modules/@helia/unixfs/dist/src/commands/utils/persist.js +var persist2 = async (buffer2, blockstore, options) => { + if (options.codec == null) { + options.codec = src_exports2; + } + const multihash = await sha256.digest(buffer2); + const cid = CID2.create(options.cidVersion, options.codec.code, multihash); + await blockstore.put(cid, buffer2, { + ...options, + signal: options.signal + }); + return cid; +}; + +// node_modules/@helia/unixfs/dist/src/commands/utils/dir-sharded.js +var Dir2 = class { + options; + root; + dir; + path; + dirty; + flat; + parent; + parentKey; + unixfs; + mode; + mtime; + cid; + size; + nodeSize; + constructor(props, options) { + this.options = options ?? {}; + this.root = props.root; + this.dir = props.dir; + this.path = props.path; + this.dirty = props.dirty; + this.flat = props.flat; + this.parent = props.parent; + this.parentKey = props.parentKey; + this.unixfs = props.unixfs; + this.mode = props.mode; + this.mtime = props.mtime; + } +}; +var DirSharded2 = class extends Dir2 { + _bucket; + constructor(props, options) { + super(props, options); + this._bucket = createHAMT({ + hashFn: hamtHashFn2, + bits: 8 + }); + } + async put(name4, value) { + this.cid = void 0; + this.size = void 0; + this.nodeSize = void 0; + await this._bucket.put(name4, value); + } + async get(name4) { + return this._bucket.get(name4); + } + childCount() { + return this._bucket.leafCount(); + } + directChildrenCount() { + return this._bucket.childrenCount(); + } + onlyChild() { + return this._bucket.onlyChild(); + } + async *eachChildSeries() { + for await (const { key, value } of this._bucket.eachLeafSeries()) { + yield { + key, + child: value + }; + } + } + estimateNodeSize() { + if (this.nodeSize !== void 0) { + return this.nodeSize; + } + this.nodeSize = calculateSize2(this._bucket, this, this.options); + return this.nodeSize; + } + async *flush(blockstore) { + for await (const entry of flush2(this._bucket, blockstore, this, this.options)) { + yield { + ...entry, + path: this.path + }; + } + } +}; +async function* flush2(bucket, blockstore, shardRoot, options) { + const children = bucket._children; + const links = []; + let childrenSize = 0n; + for (let i = 0; i < children.length; i++) { + const child = children.get(i); + if (child == null) { + continue; + } + const labelPrefix = i.toString(16).toUpperCase().padStart(2, "0"); + if (child instanceof Bucket) { + let shard; + for await (const subShard of flush2(child, blockstore, null, options)) { + shard = subShard; + } + if (shard == null) { + throw new Error("Could not flush sharded directory, no subshard found"); + } + links.push({ + Name: labelPrefix, + Tsize: Number(shard.size), + Hash: shard.cid + }); + childrenSize += shard.size; + } else if (isDir2(child.value)) { + const dir2 = child.value; + let flushedDir; + for await (const entry of dir2.flush(blockstore)) { + flushedDir = entry; + yield flushedDir; + } + if (flushedDir == null) { + throw new Error("Did not flush dir"); + } + const label = labelPrefix + child.key; + links.push({ + Name: label, + Tsize: Number(flushedDir.size), + Hash: flushedDir.cid + }); + childrenSize += flushedDir.size; + } else { + const value = child.value; + if (value.cid == null) { + continue; + } + const label = labelPrefix + child.key; + const size2 = value.size; + links.push({ + Name: label, + Tsize: Number(size2), + Hash: value.cid + }); + childrenSize += BigInt(size2 ?? 0); + } + } + const data = Uint8Array.from(children.bitField().reverse()); + const dir = new UnixFS({ + type: "hamt-sharded-directory", + data, + fanout: BigInt(bucket.tableSize()), + hashType: hamtHashCode, + mtime: shardRoot == null ? void 0 : shardRoot.mtime, + mode: shardRoot == null ? void 0 : shardRoot.mode + }); + const node = { + Data: dir.marshal(), + Links: links + }; + const buffer2 = encode7(prepare(node)); + const cid = await persist2(buffer2, blockstore, options); + const size = BigInt(buffer2.byteLength) + childrenSize; + yield { + cid, + unixfs: dir, + size + }; +} +function isDir2(obj) { + return typeof obj.flush === "function"; +} +function calculateSize2(bucket, shardRoot, options) { + const children = bucket._children; + const links = []; + for (let i = 0; i < children.length; i++) { + const child = children.get(i); + if (child == null) { + continue; + } + const labelPrefix = i.toString(16).toUpperCase().padStart(2, "0"); + if (child instanceof Bucket) { + const size = calculateSize2(child, null, options); + links.push({ + Name: labelPrefix, + Tsize: Number(size), + Hash: options.cidVersion === 0 ? CID_V02 : CID_V12 + }); + } else if (typeof child.value.flush === "function") { + const dir2 = child.value; + const size = dir2.nodeSize(); + links.push({ + Name: labelPrefix + child.key, + Tsize: Number(size), + Hash: options.cidVersion === 0 ? CID_V02 : CID_V12 + }); + } else { + const value = child.value; + if (value.cid == null) { + continue; + } + const label = labelPrefix + child.key; + const size = value.size; + links.push({ + Name: label, + Tsize: Number(size), + Hash: value.cid + }); + } + } + const data = Uint8Array.from(children.bitField().reverse()); + const dir = new UnixFS({ + type: "hamt-sharded-directory", + data, + fanout: BigInt(bucket.tableSize()), + hashType: hamtHashCode, + mtime: shardRoot == null ? void 0 : shardRoot.mtime, + mode: shardRoot == null ? void 0 : shardRoot.mode + }); + const buffer2 = encode7(prepare({ + Data: dir.marshal(), + Links: links + })); + return buffer2.length; +} +var CID_V02 = CID2.parse("QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn"); +var CID_V12 = CID2.parse("zdj7WbTaiJT1fgatdet9Ei9iDB5hdCxkbVyhyh8YTUnXMiwYi"); + +// node_modules/@helia/unixfs/dist/src/commands/utils/hamt-utils.js +var log = logger("helia:unixfs:commands:utils:hamt-utils"); +var toPrefix2 = (position) => { + return position.toString(16).toUpperCase().padStart(2, "0").substring(0, 2); +}; +var createShard = async (blockstore, contents, options) => { + const shard = new DirSharded2({ + root: true, + dir: true, + parent: void 0, + parentKey: void 0, + path: "", + dirty: true, + flat: false, + mtime: options.mtime, + mode: options.mode + }, options); + for (let i = 0; i < contents.length; i++) { + await shard._bucket.put(contents[i].name, { + size: contents[i].size, + cid: contents[i].cid + }); + } + const res = await src_default7(shard.flush(blockstore)); + if (res == null) { + throw new Error("Flushing shard yielded no result"); + } + return res; +}; +var updateShardedDirectory = async (path6, blockstore, options) => { + const shardRoot = UnixFS.unmarshal(path6[0].node.Data ?? new Uint8Array(0)); + const fanout = BigInt(Math.pow(2, hamtBucketBits)); + path6.reverse(); + let cid; + let node; + for (let i = 0; i < path6.length; i++) { + const isRoot = i === path6.length - 1; + const segment = path6[i]; + const data = Uint8Array.from(segment.children.bitField().reverse()); + const dir = new UnixFS({ + type: "hamt-sharded-directory", + data, + fanout, + hashType: hamtHashCode + }); + if (isRoot) { + dir.mtime = shardRoot.mtime; + dir.mode = shardRoot.mode; + } + node = { + Data: dir.marshal(), + Links: segment.node.Links + }; + const block = encode7(prepare(node)); + cid = await persist2(block, blockstore, options); + if (!isRoot) { + const nextSegment = path6[i + 1]; + if (nextSegment == null) { + throw new Error("Was not operating on shard root but also had no parent?"); + } + log("updating link in parent sub-shard with prefix %s", nextSegment.prefix); + nextSegment.node.Links = nextSegment.node.Links.filter((l) => l.Name !== nextSegment.prefix); + nextSegment.node.Links.push({ + Name: nextSegment.prefix, + Hash: cid, + Tsize: segment.node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), block.byteLength) + }); + } + } + if (cid == null || node == null) { + throw new Error("Noting persisted"); + } + return { cid, node }; +}; +var recreateShardedDirectory = async (cid, fileName, blockstore, options) => { + const wrapped = wrapHash2(hamtHashFn2); + const hash = wrapped(fromString3(fileName)); + const path6 = []; + while (true) { + const block = await blockstore.get(cid, options); + const node = decode11(block); + const children = new import_sparse_array2.default(); + const index = await hash.take(hamtBucketBits); + const prefix = toPrefix2(index); + path6.push({ + prefix, + children, + node + }); + let childLink; + for (const link of node.Links) { + const linkName2 = link.Name ?? ""; + if (linkName2.length < 2) { + throw new Error("Invalid HAMT - link name was too short"); + } + const position = parseInt(linkName2.substring(0, 2), 16); + children.set(position, true); + if (linkName2.startsWith(prefix)) { + childLink = link; + } + } + if (childLink == null) { + log("no link found with prefix %s for %s", prefix, fileName); + break; + } + const linkName = childLink.Name ?? ""; + if (linkName.length < 2) { + throw new Error("Invalid HAMT - link name was too short"); + } + if (linkName.length === 2) { + cid = childLink.Hash; + log("descend into sub-shard with prefix %s", linkName); + continue; + } + break; + } + return { path: path6, hash }; +}; + +// node_modules/@helia/unixfs/dist/src/commands/utils/is-over-shard-threshold.js +async function isOverShardThreshold(node, blockstore, threshold, options) { + if (node.Data == null) { + throw new Error("DagPB node had no data"); + } + const unixfs2 = UnixFS.unmarshal(node.Data); + let size; + if (unixfs2.type === "directory") { + size = estimateNodeSize(node); + } else if (unixfs2.type === "hamt-sharded-directory") { + size = await estimateShardSize(node, 0, threshold, blockstore, options); + } else { + throw new Error("Can only estimate the size of directories or shards"); + } + return size > threshold; +} +function estimateNodeSize(node) { + let size = 0; + for (const link of node.Links) { + size += (link.Name ?? "").length; + size += link.Hash.version === 1 ? CID_V12.bytes.byteLength : CID_V02.bytes.byteLength; + } + return size; +} +async function estimateShardSize(node, current, max, blockstore, options) { + if (current > max) { + return max; + } + if (node.Data == null) { + return current; + } + const unixfs2 = UnixFS.unmarshal(node.Data); + if (!unixfs2.isDirectory()) { + return current; + } + for (const link of node.Links) { + let name4 = link.Name ?? ""; + name4 = name4.substring(2); + current += name4.length; + current += link.Hash.bytes.byteLength; + if (link.Hash.code === code2) { + const block = await blockstore.get(link.Hash, options); + const node2 = decode11(block); + current += await estimateShardSize(node2, current, max, blockstore, options); + } + } + return current; +} + +// node_modules/@helia/unixfs/dist/src/commands/utils/add-link.js +var log2 = logger("helia:unixfs:components:utils:add-link"); +async function addLink(parent, child, blockstore, options) { + if (parent.node.Data == null) { + throw new InvalidParametersError("Invalid parent passed to addLink"); + } + const meta = UnixFS.unmarshal(parent.node.Data); + if (meta.type === "hamt-sharded-directory") { + log2("adding link to sharded directory"); + return addToShardedDirectory(parent, child, blockstore, options); + } + log2(`adding ${child.Name} (${child.Hash}) to regular directory`); + const result = await addToDirectory(parent, child, blockstore, options); + if (await isOverShardThreshold(result.node, blockstore, options.shardSplitThresholdBytes, options)) { + log2("converting directory to sharded directory"); + const converted = await convertToShardedDirectory(result, blockstore); + result.cid = converted.cid; + result.node = decode11(await blockstore.get(converted.cid, options)); + } + return result; +} +var convertToShardedDirectory = async (parent, blockstore) => { + if (parent.node.Data == null) { + throw new InvalidParametersError("Invalid parent passed to convertToShardedDirectory"); + } + const unixfs2 = UnixFS.unmarshal(parent.node.Data); + const result = await createShard(blockstore, parent.node.Links.map((link) => ({ + name: link.Name ?? "", + size: BigInt(link.Tsize ?? 0), + cid: link.Hash + })), { + mode: unixfs2.mode, + mtime: unixfs2.mtime, + cidVersion: parent.cid.version + }); + log2(`converted directory to sharded directory ${result.cid}`); + return result; +}; +var addToDirectory = async (parent, child, blockstore, options) => { + const parentLinks = parent.node.Links.filter((link) => { + const matches = link.Name === child.Name; + if (matches && !options.allowOverwriting) { + throw new AlreadyExistsError(); + } + return !matches; + }); + parentLinks.push(child); + if (parent.node.Data == null) { + throw new InvalidPBNodeError("Parent node with no data passed to addToDirectory"); + } + const node = UnixFS.unmarshal(parent.node.Data); + let data; + if (node.mtime != null) { + const ms = Date.now(); + const secs = Math.floor(ms / 1e3); + node.mtime = { + secs: BigInt(secs), + nsecs: (ms - secs * 1e3) * 1e3 + }; + data = node.marshal(); + } else { + data = parent.node.Data; + } + parent.node = prepare({ + Data: data, + Links: parentLinks + }); + const buf2 = encode7(parent.node); + const hash = await sha256.digest(buf2); + const cid = CID2.create(parent.cid.version, code2, hash); + await blockstore.put(cid, buf2); + return { + node: parent.node, + cid + }; +}; +var addToShardedDirectory = async (parent, child, blockstore, options) => { + var _a; + const { path: path6, hash } = await recreateShardedDirectory(parent.cid, child.Name, blockstore, options); + const finalSegment = path6[path6.length - 1]; + if (finalSegment == null) { + throw new Error("Invalid HAMT, could not generate path"); + } + const prefix = finalSegment.prefix; + const index = parseInt(prefix, 16); + log2("next prefix for %s is %s", child.Name, prefix); + const linkName = `${prefix}${child.Name}`; + const existingLink = finalSegment.node.Links.find((l) => (l.Name ?? "").startsWith(prefix)); + if (existingLink != null) { + log2("link %s was present in shard", linkName); + if (existingLink.Name === linkName) { + if (!options.allowOverwriting) { + throw new AlreadyExistsError(); + } + log2("overwriting %s in subshard", child.Name); + finalSegment.node.Links = finalSegment.node.Links.filter((l) => l.Name !== linkName); + finalSegment.node.Links.push({ + Name: linkName, + Hash: child.Hash, + Tsize: child.Tsize + }); + } else if (((_a = existingLink.Name) == null ? void 0 : _a.length) === 2) { + throw new Error("Existing link was subshard?!"); + } else { + log2("prefix %s already exists, creating new subshard", prefix); + const index2 = finalSegment.node.Links.findIndex((l) => { + var _a2; + return (_a2 = l.Name) == null ? void 0 : _a2.startsWith(prefix); + }); + const sibling = finalSegment.node.Links.splice(index2, 1)[0]; + const siblingName = (sibling.Name ?? "").substring(2); + const wrapped = wrapHash2(hamtHashFn2); + const siblingHash = wrapped(fromString3(siblingName)); + for (let i = 0; i < path6.length; i++) { + await siblingHash.take(hamtBucketBits); + } + while (true) { + const siblingIndex = await siblingHash.take(hamtBucketBits); + const siblingPrefix = toPrefix2(siblingIndex); + sibling.Name = `${siblingPrefix}${siblingName}`; + const newIndex = await hash.take(hamtBucketBits); + const newPrefix = toPrefix2(newIndex); + if (siblingPrefix === newPrefix) { + const children2 = new import_sparse_array3.default(); + children2.set(newIndex, true); + path6.push({ + prefix: newPrefix, + children: children2, + node: { + Links: [] + } + }); + continue; + } + const children = new import_sparse_array3.default(); + children.set(newIndex, true); + children.set(siblingIndex, true); + path6.push({ + prefix, + children, + node: { + Links: [ + sibling, + { + Name: `${newPrefix}${child.Name}`, + Hash: child.Hash, + Tsize: child.Tsize + } + ] + } + }); + break; + } + } + } else { + log2("link %s was not present in sub-shard", linkName); + child.Name = linkName; + finalSegment.node.Links.push(child); + finalSegment.children.set(index, true); + log2("adding %s to existing sub-shard", linkName); + } + return updateShardedDirectory(path6, blockstore, options); +}; + +// node_modules/@helia/unixfs/dist/src/commands/utils/cid-to-directory.js +async function cidToDirectory(cid, blockstore, options = {}) { + const entry = await exporter(cid, blockstore, options); + if (entry.type !== "directory") { + throw new NotADirectoryError(`${cid.toString()} was not a UnixFS directory`); + } + return { + cid, + node: entry.node + }; +} + +// node_modules/@helia/unixfs/dist/src/commands/utils/cid-to-pblink.js +async function cidToPBLink(cid, name4, blockstore, options) { + const sourceEntry = await exporter(cid, blockstore, options); + if (sourceEntry.type !== "directory" && sourceEntry.type !== "file" && sourceEntry.type !== "raw") { + throw new NotUnixFSError(`${cid.toString()} was not a UnixFS node`); + } + return { + Name: name4, + Tsize: sourceEntry.node instanceof Uint8Array ? sourceEntry.node.byteLength : dagNodeTsize(sourceEntry.node), + Hash: cid + }; +} +function dagNodeTsize(node) { + const linkSizes = node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0); + return encode7(node).byteLength + linkSizes; +} + +// node_modules/@helia/unixfs/dist/src/commands/utils/resolve.js +var log3 = logger("helia:unixfs:components:utils:resolve"); +async function resolve5(cid, path6, blockstore, options) { + if (path6 == null || path6 === "") { + return { cid }; + } + log3('resolve "%s" under %c', path6, cid); + const parts = path6.split("/").filter(Boolean); + const segments = [{ + name: "", + cid, + size: 0n + }]; + for (let i = 0; i < parts.length; i++) { + const part = parts[i]; + const result = await exporter(cid, blockstore, options); + log3('resolving "%s"', part, result); + if (result.type === "file") { + if (i < parts.length - 1) { + throw new InvalidParametersError("Path was invalid"); + } + cid = result.cid; + } else if (result.type === "directory") { + let dirCid; + for await (const entry of result.content()) { + if (entry.name === part) { + dirCid = entry.cid; + break; + } + } + if (dirCid == null) { + throw new DoesNotExistError("Could not find path in directory"); + } + cid = dirCid; + segments.push({ + name: part, + cid, + size: result.size + }); + } else { + throw new InvalidParametersError("Could not resolve path"); + } + } + log3("resolved %s to %c", path6, cid); + return { + cid, + path: path6, + segments + }; +} +async function updatePathCids(cid, result, blockstore, options) { + if (result.segments == null || result.segments.length === 0) { + return cid; + } + let child = result.segments.pop(); + if (child == null) { + throw new Error("Insufficient segments"); + } + child.cid = cid; + result.segments.reverse(); + for (const parent of result.segments) { + const [directory, pblink] = await Promise.all([ + cidToDirectory(parent.cid, blockstore, options), + cidToPBLink(child.cid, child.name, blockstore, options) + ]); + const result2 = await addLink(directory, pblink, blockstore, { + ...options, + allowOverwriting: true, + cidVersion: cid.version + }); + cid = result2.cid; + parent.cid = cid; + child = parent; + } + return cid; +} + +// node_modules/@helia/unixfs/dist/src/commands/cat.js +var mergeOptions2 = merge_options_default.bind({ ignoreUndefined: true }); +var defaultOptions = {}; +async function* cat(cid, blockstore, options = {}) { + const opts = mergeOptions2(defaultOptions, options); + const resolved = await resolve5(cid, opts.path, blockstore, opts); + const result = await exporter(resolved.cid, blockstore, opts); + if (result.type !== "file" && result.type !== "raw") { + throw new NotAFileError(); + } + if (result.content == null) { + throw new NoContentError(); + } + yield* result.content(opts); +} + +// node_modules/@helia/unixfs/dist/src/commands/utils/constants.js +var SHARD_SPLIT_THRESHOLD_BYTES = 262144; + +// node_modules/@helia/unixfs/dist/src/commands/chmod.js +var mergeOptions3 = merge_options_default.bind({ ignoreUndefined: true }); +var log4 = logger("helia:unixfs:chmod"); +var defaultOptions2 = { + recursive: false, + shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES +}; +async function chmod(cid, mode, blockstore, options = {}) { + const opts = mergeOptions3(defaultOptions2, options); + const resolved = await resolve5(cid, opts.path, blockstore, options); + log4("chmod %c %d", resolved.cid, mode); + if (opts.recursive) { + const root = await pipe( + async function* () { + for await (const entry of recursive(resolved.cid, blockstore, options)) { + let metadata2; + let links2 = []; + if (entry.type === "raw") { + metadata2 = new UnixFS({ type: "file", data: entry.node }); + } else if (entry.type === "file" || entry.type === "directory") { + metadata2 = entry.unixfs; + links2 = entry.node.Links; + } else { + throw new NotUnixFSError(); + } + metadata2.mode = mode; + const node = { + Data: metadata2.marshal(), + Links: links2 + }; + yield { + path: entry.path, + content: node + }; + } + }, + // @ts-expect-error cannot combine progress types + (source) => importer(source, blockstore, { + ...opts, + dagBuilder: async function* (source2, block2) { + for await (const entry of source2) { + yield async function() { + const node = entry.content; + const buf2 = encode7(node); + const updatedCid2 = await persist2(buf2, block2, { + ...opts, + cidVersion: cid.version + }); + if (node.Data == null) { + throw new InvalidPBNodeError(`${updatedCid2} had no data`); + } + const unixfs2 = UnixFS.unmarshal(node.Data); + return { + cid: updatedCid2, + size: BigInt(buf2.length), + path: entry.path, + unixfs: unixfs2 + }; + }; + } + } + }), + async (nodes) => src_default7(nodes) + ); + if (root == null) { + throw new UnknownError(`Could not chmod ${resolved.cid.toString()}`); + } + return updatePathCids(root.cid, resolved, blockstore, opts); + } + const block = await blockstore.get(resolved.cid, options); + let metadata; + let links = []; + if (resolved.cid.code === code3) { + metadata = new UnixFS({ type: "file", data: block }); + } else { + const node = decode11(block); + if (node.Data == null) { + throw new InvalidPBNodeError(`${resolved.cid.toString()} had no data`); + } + links = node.Links; + metadata = UnixFS.unmarshal(node.Data); + } + metadata.mode = mode; + const updatedBlock = encode7({ + Data: metadata.marshal(), + Links: links + }); + const hash = await sha256.digest(updatedBlock); + const updatedCid = CID2.create(resolved.cid.version, code2, hash); + await blockstore.put(updatedCid, updatedBlock); + return updatePathCids(updatedCid, resolved, blockstore, opts); +} + +// node_modules/@helia/unixfs/dist/src/commands/cp.js +var mergeOptions4 = merge_options_default.bind({ ignoreUndefined: true }); +var log5 = logger("helia:unixfs:cp"); +var defaultOptions3 = { + force: false, + shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES +}; +async function cp(source, target, name4, blockstore, options = {}) { + const opts = mergeOptions4(defaultOptions3, options); + if (name4.includes("/")) { + throw new InvalidParametersError("Name must not have slashes"); + } + const [directory, pblink] = await Promise.all([ + cidToDirectory(target, blockstore, opts), + cidToPBLink(source, name4, blockstore, opts) + ]); + log5('Adding %c as "%s" to %c', source, name4, target); + const result = await addLink(directory, pblink, blockstore, { + allowOverwriting: opts.force, + cidVersion: target.version, + ...opts + }); + return result.cid; +} + +// node_modules/@helia/unixfs/dist/src/commands/ls.js +var mergeOptions5 = merge_options_default.bind({ ignoreUndefined: true }); +var defaultOptions4 = {}; +async function* ls(cid, blockstore, options = {}) { + const opts = mergeOptions5(defaultOptions4, options); + const resolved = await resolve5(cid, opts.path, blockstore, opts); + const result = await exporter(resolved.cid, blockstore); + if (result.type === "file" || result.type === "raw") { + yield result; + return; + } + if (result.content == null) { + throw new NoContentError(); + } + if (result.type !== "directory") { + throw new NotADirectoryError(); + } + yield* result.content({ + offset: options.offset, + length: options.length + }); +} + +// node_modules/@helia/unixfs/dist/src/commands/mkdir.js +var mergeOptions6 = merge_options_default.bind({ ignoreUndefined: true }); +var log6 = logger("helia:unixfs:mkdir"); +var defaultOptions5 = { + cidVersion: 1, + force: false, + shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES +}; +async function mkdir(parentCid, dirname, blockstore, options = {}) { + const opts = mergeOptions6(defaultOptions5, options); + if (dirname.includes("/")) { + throw new InvalidParametersError("Path must not have slashes"); + } + const entry = await exporter(parentCid, blockstore, options); + if (entry.type !== "directory") { + throw new NotADirectoryError(`${parentCid.toString()} was not a UnixFS directory`); + } + log6("creating %s", dirname); + const metadata = new UnixFS({ + type: "directory", + mode: opts.mode, + mtime: opts.mtime + }); + const node = { + Data: metadata.marshal(), + Links: [] + }; + const buf2 = encode7(node); + const hash = await sha256.digest(buf2); + const emptyDirCid = CID2.create(opts.cidVersion, code2, hash); + await blockstore.put(emptyDirCid, buf2); + const [directory, pblink] = await Promise.all([ + cidToDirectory(parentCid, blockstore, opts), + cidToPBLink(emptyDirCid, dirname, blockstore, opts) + ]); + log6("adding empty dir called %s to %c", dirname, parentCid); + const result = await addLink(directory, pblink, blockstore, { + ...opts, + allowOverwriting: opts.force + }); + return result.cid; +} + +// node_modules/@helia/unixfs/dist/src/commands/utils/remove-link.js +var log7 = logger("helia:unixfs:utils:remove-link"); +async function removeLink(parent, name4, blockstore, options) { + if (parent.node.Data == null) { + throw new InvalidPBNodeError("Parent node had no data"); + } + const meta = UnixFS.unmarshal(parent.node.Data); + if (meta.type === "hamt-sharded-directory") { + log7(`removing ${name4} from sharded directory`); + const result = await removeFromShardedDirectory(parent, name4, blockstore, options); + if (!await isOverShardThreshold(result.node, blockstore, options.shardSplitThresholdBytes, options)) { + log7("converting shard to flat directory %c", parent.cid); + return convertToFlatDirectory(result, blockstore, options); + } + return result; + } + log7(`removing link ${name4} regular directory`); + return removeFromDirectory(parent, name4, blockstore, options); +} +var removeFromDirectory = async (parent, name4, blockstore, options) => { + parent.node.Links = parent.node.Links.filter((link) => { + return link.Name !== name4; + }); + const parentBlock = encode7(parent.node); + const parentCid = await persist2(parentBlock, blockstore, { + ...options, + cidVersion: parent.cid.version + }); + log7(`Updated regular directory ${parentCid}`); + return { + node: parent.node, + cid: parentCid + }; +}; +var removeFromShardedDirectory = async (parent, name4, blockstore, options) => { + const { path: path6 } = await recreateShardedDirectory(parent.cid, name4, blockstore, options); + const finalSegment = path6[path6.length - 1]; + if (finalSegment == null) { + throw new Error("Invalid HAMT, could not generate path"); + } + const linkName = finalSegment.node.Links.filter((l) => (l.Name ?? "").substring(2) === name4).map((l) => l.Name).pop(); + if (linkName == null) { + throw new Error("File not found"); + } + const prefix = linkName.substring(0, 2); + const index = parseInt(prefix, 16); + finalSegment.node.Links = finalSegment.node.Links.filter((link) => link.Name !== linkName); + finalSegment.children.unset(index); + if (finalSegment.node.Links.length === 1) { + while (true) { + if (path6.length === 1) { + break; + } + const segment = path6[path6.length - 1]; + if (segment == null || segment.node.Links.length > 1) { + break; + } + path6.pop(); + const nextSegment = path6[path6.length - 1]; + if (nextSegment == null) { + break; + } + const link = segment.node.Links[0]; + nextSegment.node.Links = nextSegment.node.Links.filter((l) => !(l.Name ?? "").startsWith(nextSegment.prefix)); + nextSegment.node.Links.push({ + Hash: link.Hash, + Name: `${nextSegment.prefix}${(link.Name ?? "").substring(2)}`, + Tsize: link.Tsize + }); + } + } + return updateShardedDirectory(path6, blockstore, options); +}; +var convertToFlatDirectory = async (parent, blockstore, options) => { + if (parent.node.Data == null) { + throw new InvalidParametersError("Invalid parent passed to convertToFlatDirectory"); + } + const rootNode = { + Links: [] + }; + const dir = await exporter(parent.cid, blockstore); + if (dir.type !== "directory") { + throw new Error("Unexpected node type"); + } + for await (const entry of dir.content()) { + let tsize = 0; + if (entry.node instanceof Uint8Array) { + tsize = entry.node.byteLength; + } else { + tsize = encode7(entry.node).length; + } + rootNode.Links.push({ + Hash: entry.cid, + Name: entry.name, + Tsize: tsize + }); + } + const oldUnixfs = UnixFS.unmarshal(parent.node.Data); + rootNode.Data = new UnixFS({ type: "directory", mode: oldUnixfs.mode, mtime: oldUnixfs.mtime }).marshal(); + const block = encode7(prepare(rootNode)); + const cid = await persist2(block, blockstore, { + codec: src_exports2, + cidVersion: parent.cid.version, + signal: options.signal + }); + return { + cid, + node: rootNode + }; +}; + +// node_modules/@helia/unixfs/dist/src/commands/rm.js +var mergeOptions7 = merge_options_default.bind({ ignoreUndefined: true }); +var log8 = logger("helia:unixfs:rm"); +var defaultOptions6 = { + shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES +}; +async function rm(target, name4, blockstore, options = {}) { + const opts = mergeOptions7(defaultOptions6, options); + if (name4.includes("/")) { + throw new InvalidParametersError("Name must not have slashes"); + } + const directory = await cidToDirectory(target, blockstore, opts); + log8("Removing %s from %c", name4, target); + const result = await removeLink(directory, name4, blockstore, { + ...opts, + cidVersion: target.version + }); + return result.cid; +} + +// node_modules/@helia/unixfs/dist/src/commands/stat.js +var mergeOptions8 = merge_options_default.bind({ ignoreUndefined: true }); +var log9 = logger("helia:unixfs:stat"); +var defaultOptions7 = {}; +async function stat(cid, blockstore, options = {}) { + var _a; + const opts = mergeOptions8(defaultOptions7, options); + const resolved = await resolve5(cid, options.path, blockstore, opts); + log9("stat %c", resolved.cid); + const result = await exporter(resolved.cid, blockstore, opts); + if (result.type !== "file" && result.type !== "directory" && result.type !== "raw") { + throw new NotUnixFSError(); + } + let fileSize = 0n; + let dagSize = 0n; + let localFileSize = 0n; + let localDagSize = 0n; + let blocks = 0; + let mode; + let mtime; + const type = result.type; + let unixfs2; + if (result.type === "raw") { + fileSize = BigInt(result.node.byteLength); + dagSize = BigInt(result.node.byteLength); + localFileSize = BigInt(result.node.byteLength); + localDagSize = BigInt(result.node.byteLength); + blocks = 1; + } + if (result.type === "directory") { + fileSize = 0n; + dagSize = BigInt(result.unixfs.marshal().byteLength); + localFileSize = 0n; + localDagSize = dagSize; + blocks = 1; + mode = result.unixfs.mode; + mtime = result.unixfs.mtime; + unixfs2 = result.unixfs; + } + if (result.type === "file") { + const results = await inspectDag(resolved.cid, blockstore, opts); + fileSize = result.unixfs.fileSize(); + dagSize = BigInt((((_a = result.node.Data) == null ? void 0 : _a.byteLength) ?? 0) + result.node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0)); + localFileSize = BigInt(results.localFileSize); + localDagSize = BigInt(results.localDagSize); + blocks = results.blocks; + mode = result.unixfs.mode; + mtime = result.unixfs.mtime; + unixfs2 = result.unixfs; + } + return { + cid: resolved.cid, + mode, + mtime, + fileSize, + dagSize, + localFileSize, + localDagSize, + blocks, + type, + unixfs: unixfs2 + }; +} +async function inspectDag(cid, blockstore, options) { + const results = { + localFileSize: 0, + localDagSize: 0, + blocks: 0 + }; + if (await blockstore.has(cid, options)) { + const block = await blockstore.get(cid, options); + results.blocks++; + results.localDagSize += block.byteLength; + if (cid.code === code3) { + results.localFileSize += block.byteLength; + } else if (cid.code === code2) { + const pbNode = decode11(block); + if (pbNode.Links.length > 0) { + for (const link of pbNode.Links) { + const linkResult = await inspectDag(link.Hash, blockstore, options); + results.localFileSize += linkResult.localFileSize; + results.localDagSize += linkResult.localDagSize; + results.blocks += linkResult.blocks; + } + } else { + if (pbNode.Data == null) { + throw new InvalidPBNodeError(`PBNode ${cid.toString()} had no data`); + } + const unixfs2 = UnixFS.unmarshal(pbNode.Data); + if (unixfs2.data == null) { + throw new InvalidPBNodeError(`UnixFS node ${cid.toString()} had no data`); + } + results.localFileSize += unixfs2.data.byteLength ?? 0; + } + } else { + throw new UnknownError(`${cid.toString()} was neither DAG_PB nor RAW`); + } + } + return results; +} + +// node_modules/@helia/unixfs/dist/src/commands/touch.js +var mergeOptions9 = merge_options_default.bind({ ignoreUndefined: true }); +var log10 = logger("helia:unixfs:touch"); +var defaultOptions8 = { + recursive: false, + shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES +}; +async function touch(cid, blockstore, options = {}) { + const opts = mergeOptions9(defaultOptions8, options); + const resolved = await resolve5(cid, opts.path, blockstore, opts); + const mtime = opts.mtime ?? { + secs: BigInt(Math.round(Date.now() / 1e3)), + nsecs: 0 + }; + log10("touch %c %o", resolved.cid, mtime); + if (opts.recursive) { + const root = await pipe( + async function* () { + for await (const entry of recursive(resolved.cid, blockstore)) { + let metadata2; + let links2; + if (entry.type === "raw") { + metadata2 = new UnixFS({ data: entry.node }); + links2 = []; + } else if (entry.type === "file" || entry.type === "directory") { + metadata2 = entry.unixfs; + links2 = entry.node.Links; + } else { + throw new NotUnixFSError(); + } + metadata2.mtime = mtime; + const node = { + Data: metadata2.marshal(), + Links: links2 + }; + yield { + path: entry.path, + content: node + }; + } + }, + // @ts-expect-error blockstore types are incompatible + (source) => importer(source, blockstore, { + ...opts, + dagBuilder: async function* (source2, block2) { + for await (const entry of source2) { + yield async function() { + const node = entry.content; + const buf2 = encode7(node); + const updatedCid2 = await persist2(buf2, block2, { + ...opts, + cidVersion: cid.version + }); + if (node.Data == null) { + throw new InvalidPBNodeError(`${updatedCid2} had no data`); + } + const unixfs2 = UnixFS.unmarshal(node.Data); + return { + cid: updatedCid2, + size: BigInt(buf2.length), + path: entry.path, + unixfs: unixfs2 + }; + }; + } + } + }), + async (nodes) => src_default7(nodes) + ); + if (root == null) { + throw new UnknownError(`Could not chmod ${resolved.cid.toString()}`); + } + return updatePathCids(root.cid, resolved, blockstore, opts); + } + const block = await blockstore.get(resolved.cid, options); + let metadata; + let links = []; + if (resolved.cid.code === code3) { + metadata = new UnixFS({ data: block }); + } else { + const node = decode11(block); + links = node.Links; + if (node.Data == null) { + throw new InvalidPBNodeError(`${resolved.cid.toString()} had no data`); + } + metadata = UnixFS.unmarshal(node.Data); + } + metadata.mtime = mtime; + const updatedBlock = encode7({ + Data: metadata.marshal(), + Links: links + }); + const hash = await sha256.digest(updatedBlock); + const updatedCid = CID2.create(resolved.cid.version, code2, hash); + await blockstore.put(updatedCid, updatedBlock); + return updatePathCids(updatedCid, resolved, blockstore, opts); +} + +// node_modules/it-glob/dist/src/index.js +var import_promises = __toESM(require("fs/promises"), 1); +var import_path = __toESM(require("path"), 1); + +// node_modules/minimatch/dist/mjs/index.js +var import_brace_expansion = __toESM(require_brace_expansion(), 1); + +// node_modules/minimatch/dist/mjs/assert-valid-pattern.js +var MAX_PATTERN_LENGTH = 1024 * 64; +var assertValidPattern = (pattern) => { + if (typeof pattern !== "string") { + throw new TypeError("invalid pattern"); + } + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError("pattern is too long"); + } +}; + +// node_modules/minimatch/dist/mjs/brace-expressions.js +var posixClasses = { + "[:alnum:]": ["\\p{L}\\p{Nl}\\p{Nd}", true], + "[:alpha:]": ["\\p{L}\\p{Nl}", true], + "[:ascii:]": ["\\x00-\\x7f", false], + "[:blank:]": ["\\p{Zs}\\t", true], + "[:cntrl:]": ["\\p{Cc}", true], + "[:digit:]": ["\\p{Nd}", true], + "[:graph:]": ["\\p{Z}\\p{C}", true, true], + "[:lower:]": ["\\p{Ll}", true], + "[:print:]": ["\\p{C}", true], + "[:punct:]": ["\\p{P}", true], + "[:space:]": ["\\p{Z}\\t\\r\\n\\v\\f", true], + "[:upper:]": ["\\p{Lu}", true], + "[:word:]": ["\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}", true], + "[:xdigit:]": ["A-Fa-f0-9", false] +}; +var braceEscape = (s) => s.replace(/[[\]\\-]/g, "\\$&"); +var regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); +var rangesToString = (ranges) => ranges.join(""); +var parseClass = (glob2, position) => { + const pos = position; + if (glob2.charAt(pos) !== "[") { + throw new Error("not in a brace expression"); + } + const ranges = []; + const negs = []; + let i = pos + 1; + let sawStart = false; + let uflag = false; + let escaping = false; + let negate = false; + let endPos = pos; + let rangeStart = ""; + WHILE: + while (i < glob2.length) { + const c = glob2.charAt(i); + if ((c === "!" || c === "^") && i === pos + 1) { + negate = true; + i++; + continue; + } + if (c === "]" && sawStart && !escaping) { + endPos = i + 1; + break; + } + sawStart = true; + if (c === "\\") { + if (!escaping) { + escaping = true; + i++; + continue; + } + } + if (c === "[" && !escaping) { + for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) { + if (glob2.startsWith(cls, i)) { + if (rangeStart) { + return ["$.", false, glob2.length - pos, true]; + } + i += cls.length; + if (neg) + negs.push(unip); + else + ranges.push(unip); + uflag = uflag || u; + continue WHILE; + } + } + } + escaping = false; + if (rangeStart) { + if (c > rangeStart) { + ranges.push(braceEscape(rangeStart) + "-" + braceEscape(c)); + } else if (c === rangeStart) { + ranges.push(braceEscape(c)); + } + rangeStart = ""; + i++; + continue; + } + if (glob2.startsWith("-]", i + 1)) { + ranges.push(braceEscape(c + "-")); + i += 2; + continue; + } + if (glob2.startsWith("-", i + 1)) { + rangeStart = c; + i += 2; + continue; + } + ranges.push(braceEscape(c)); + i++; + } + if (endPos < i) { + return ["", false, 0, false]; + } + if (!ranges.length && !negs.length) { + return ["$.", false, glob2.length - pos, true]; + } + if (negs.length === 0 && ranges.length === 1 && /^\\?.$/.test(ranges[0]) && !negate) { + const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0]; + return [regexpEscape(r), false, endPos - pos, false]; + } + const sranges = "[" + (negate ? "^" : "") + rangesToString(ranges) + "]"; + const snegs = "[" + (negate ? "" : "^") + rangesToString(negs) + "]"; + const comb = ranges.length && negs.length ? "(" + sranges + "|" + snegs + ")" : ranges.length ? sranges : snegs; + return [comb, uflag, endPos - pos, true]; +}; + +// node_modules/minimatch/dist/mjs/unescape.js +var unescape = (s, { windowsPathsNoEscape = false } = {}) => { + return windowsPathsNoEscape ? s.replace(/\[([^\/\\])\]/g, "$1") : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, "$1$2").replace(/\\([^\/])/g, "$1"); +}; + +// node_modules/minimatch/dist/mjs/ast.js +var types2 = /* @__PURE__ */ new Set(["!", "?", "+", "*", "@"]); +var isExtglobType = (c) => types2.has(c); +var startNoTraversal = "(?!(?:^|/)\\.\\.?(?:$|/))"; +var startNoDot = "(?!\\.)"; +var addPatternStart = /* @__PURE__ */ new Set(["[", "."]); +var justDots = /* @__PURE__ */ new Set(["..", "."]); +var reSpecials = new Set("().*{}+?[]^$\\!"); +var regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); +var qmark = "[^/]"; +var star = qmark + "*?"; +var starNoEmpty = qmark + "+?"; +var AST = class _AST { + type; + #root; + #hasMagic; + #uflag = false; + #parts = []; + #parent; + #parentIndex; + #negs; + #filledNegs = false; + #options; + #toString; + // set to true if it's an extglob with no children + // (which really means one child of '') + #emptyExt = false; + constructor(type, parent, options = {}) { + this.type = type; + if (type) + this.#hasMagic = true; + this.#parent = parent; + this.#root = this.#parent ? this.#parent.#root : this; + this.#options = this.#root === this ? options : this.#root.#options; + this.#negs = this.#root === this ? [] : this.#root.#negs; + if (type === "!" && !this.#root.#filledNegs) + this.#negs.push(this); + this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0; + } + get hasMagic() { + if (this.#hasMagic !== void 0) + return this.#hasMagic; + for (const p of this.#parts) { + if (typeof p === "string") + continue; + if (p.type || p.hasMagic) + return this.#hasMagic = true; + } + return this.#hasMagic; + } + // reconstructs the pattern + toString() { + if (this.#toString !== void 0) + return this.#toString; + if (!this.type) { + return this.#toString = this.#parts.map((p) => String(p)).join(""); + } else { + return this.#toString = this.type + "(" + this.#parts.map((p) => String(p)).join("|") + ")"; + } + } + #fillNegs() { + if (this !== this.#root) + throw new Error("should only call on root"); + if (this.#filledNegs) + return this; + this.toString(); + this.#filledNegs = true; + let n; + while (n = this.#negs.pop()) { + if (n.type !== "!") + continue; + let p = n; + let pp = p.#parent; + while (pp) { + for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) { + for (const part of n.#parts) { + if (typeof part === "string") { + throw new Error("string part in extglob AST??"); + } + part.copyIn(pp.#parts[i]); + } + } + p = pp; + pp = p.#parent; + } + } + return this; + } + push(...parts) { + for (const p of parts) { + if (p === "") + continue; + if (typeof p !== "string" && !(p instanceof _AST && p.#parent === this)) { + throw new Error("invalid part: " + p); + } + this.#parts.push(p); + } + } + toJSON() { + var _a; + const ret = this.type === null ? this.#parts.slice().map((p) => typeof p === "string" ? p : p.toJSON()) : [this.type, ...this.#parts.map((p) => p.toJSON())]; + if (this.isStart() && !this.type) + ret.unshift([]); + if (this.isEnd() && (this === this.#root || this.#root.#filledNegs && ((_a = this.#parent) == null ? void 0 : _a.type) === "!")) { + ret.push({}); + } + return ret; + } + isStart() { + var _a; + if (this.#root === this) + return true; + if (!((_a = this.#parent) == null ? void 0 : _a.isStart())) + return false; + if (this.#parentIndex === 0) + return true; + const p = this.#parent; + for (let i = 0; i < this.#parentIndex; i++) { + const pp = p.#parts[i]; + if (!(pp instanceof _AST && pp.type === "!")) { + return false; + } + } + return true; + } + isEnd() { + var _a, _b, _c; + if (this.#root === this) + return true; + if (((_a = this.#parent) == null ? void 0 : _a.type) === "!") + return true; + if (!((_b = this.#parent) == null ? void 0 : _b.isEnd())) + return false; + if (!this.type) + return (_c = this.#parent) == null ? void 0 : _c.isEnd(); + const pl = this.#parent ? this.#parent.#parts.length : 0; + return this.#parentIndex === pl - 1; + } + copyIn(part) { + if (typeof part === "string") + this.push(part); + else + this.push(part.clone(this)); + } + clone(parent) { + const c = new _AST(this.type, parent); + for (const p of this.#parts) { + c.copyIn(p); + } + return c; + } + static #parseAST(str, ast, pos, opt) { + let escaping = false; + let inBrace = false; + let braceStart = -1; + let braceNeg = false; + if (ast.type === null) { + let i2 = pos; + let acc2 = ""; + while (i2 < str.length) { + const c = str.charAt(i2++); + if (escaping || c === "\\") { + escaping = !escaping; + acc2 += c; + continue; + } + if (inBrace) { + if (i2 === braceStart + 1) { + if (c === "^" || c === "!") { + braceNeg = true; + } + } else if (c === "]" && !(i2 === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc2 += c; + continue; + } else if (c === "[") { + inBrace = true; + braceStart = i2; + braceNeg = false; + acc2 += c; + continue; + } + if (!opt.noext && isExtglobType(c) && str.charAt(i2) === "(") { + ast.push(acc2); + acc2 = ""; + const ext2 = new _AST(c, ast); + i2 = _AST.#parseAST(str, ext2, i2, opt); + ast.push(ext2); + continue; + } + acc2 += c; + } + ast.push(acc2); + return i2; + } + let i = pos + 1; + let part = new _AST(null, ast); + const parts = []; + let acc = ""; + while (i < str.length) { + const c = str.charAt(i++); + if (escaping || c === "\\") { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === "^" || c === "!") { + braceNeg = true; + } + } else if (c === "]" && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } else if (c === "[") { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (isExtglobType(c) && str.charAt(i) === "(") { + part.push(acc); + acc = ""; + const ext2 = new _AST(c, part); + part.push(ext2); + i = _AST.#parseAST(str, ext2, i, opt); + continue; + } + if (c === "|") { + part.push(acc); + acc = ""; + parts.push(part); + part = new _AST(null, ast); + continue; + } + if (c === ")") { + if (acc === "" && ast.#parts.length === 0) { + ast.#emptyExt = true; + } + part.push(acc); + acc = ""; + ast.push(...parts, part); + return i; + } + acc += c; + } + ast.type = null; + ast.#hasMagic = void 0; + ast.#parts = [str.substring(pos - 1)]; + return i; + } + static fromGlob(pattern, options = {}) { + const ast = new _AST(null, void 0, options); + _AST.#parseAST(pattern, ast, 0, options); + return ast; + } + // returns the regular expression if there's magic, or the unescaped + // string if not. + toMMPattern() { + if (this !== this.#root) + return this.#root.toMMPattern(); + const glob2 = this.toString(); + const [re, body, hasMagic, uflag] = this.toRegExpSource(); + const anyMagic = hasMagic || this.#hasMagic || this.#options.nocase && !this.#options.nocaseMagicOnly && glob2.toUpperCase() !== glob2.toLowerCase(); + if (!anyMagic) { + return body; + } + const flags = (this.#options.nocase ? "i" : "") + (uflag ? "u" : ""); + return Object.assign(new RegExp(`^${re}$`, flags), { + _src: re, + _glob: glob2 + }); + } + // returns the string match, the regexp source, whether there's magic + // in the regexp (so a regular expression is required) and whether or + // not the uflag is needed for the regular expression (for posix classes) + // TODO: instead of injecting the start/end at this point, just return + // the BODY of the regexp, along with the start/end portions suitable + // for binding the start/end in either a joined full-path makeRe context + // (where we bind to (^|/), or a standalone matchPart context (where + // we bind to ^, and not /). Otherwise slashes get duped! + // + // In part-matching mode, the start is: + // - if not isStart: nothing + // - if traversal possible, but not allowed: ^(?!\.\.?$) + // - if dots allowed or not possible: ^ + // - if dots possible and not allowed: ^(?!\.) + // end is: + // - if not isEnd(): nothing + // - else: $ + // + // In full-path matching mode, we put the slash at the START of the + // pattern, so start is: + // - if first pattern: same as part-matching mode + // - if not isStart(): nothing + // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/)) + // - if dots allowed or not possible: / + // - if dots possible and not allowed: /(?!\.) + // end is: + // - if last pattern, same as part-matching mode + // - else nothing + // + // Always put the (?:$|/) on negated tails, though, because that has to be + // there to bind the end of the negated pattern portion, and it's easier to + // just stick it in now rather than try to inject it later in the middle of + // the pattern. + // + // We can just always return the same end, and leave it up to the caller + // to know whether it's going to be used joined or in parts. + // And, if the start is adjusted slightly, can do the same there: + // - if not isStart: nothing + // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$) + // - if dots allowed or not possible: (?:/|^) + // - if dots possible and not allowed: (?:/|^)(?!\.) + // + // But it's better to have a simpler binding without a conditional, for + // performance, so probably better to return both start options. + // + // Then the caller just ignores the end if it's not the first pattern, + // and the start always gets applied. + // + // But that's always going to be $ if it's the ending pattern, or nothing, + // so the caller can just attach $ at the end of the pattern when building. + // + // So the todo is: + // - better detect what kind of start is needed + // - return both flavors of starting pattern + // - attach $ at the end of the pattern when creating the actual RegExp + // + // Ah, but wait, no, that all only applies to the root when the first pattern + // is not an extglob. If the first pattern IS an extglob, then we need all + // that dot prevention biz to live in the extglob portions, because eg + // +(*|.x*) can match .xy but not .yx. + // + // So, return the two flavors if it's #root and the first child is not an + // AST, otherwise leave it to the child AST to handle it, and there, + // use the (?:^|/) style of start binding. + // + // Even simplified further: + // - Since the start for a join is eg /(?!\.) and the start for a part + // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root + // or start or whatever) and prepend ^ or / at the Regexp construction. + toRegExpSource(allowDot) { + var _a; + const dot = allowDot ?? !!this.#options.dot; + if (this.#root === this) + this.#fillNegs(); + if (!this.type) { + const noEmpty = this.isStart() && this.isEnd(); + const src3 = this.#parts.map((p) => { + const [re, _, hasMagic, uflag] = typeof p === "string" ? _AST.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot); + this.#hasMagic = this.#hasMagic || hasMagic; + this.#uflag = this.#uflag || uflag; + return re; + }).join(""); + let start2 = ""; + if (this.isStart()) { + if (typeof this.#parts[0] === "string") { + const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]); + if (!dotTravAllowed) { + const aps = addPatternStart; + const needNoTrav = ( + // dots are allowed, and the pattern starts with [ or . + dot && aps.has(src3.charAt(0)) || // the pattern starts with \., and then [ or . + src3.startsWith("\\.") && aps.has(src3.charAt(2)) || // the pattern starts with \.\., and then [ or . + src3.startsWith("\\.\\.") && aps.has(src3.charAt(4)) + ); + const needNoDot = !dot && !allowDot && aps.has(src3.charAt(0)); + start2 = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : ""; + } + } + } + let end = ""; + if (this.isEnd() && this.#root.#filledNegs && ((_a = this.#parent) == null ? void 0 : _a.type) === "!") { + end = "(?:$|\\/)"; + } + const final2 = start2 + src3 + end; + return [ + final2, + unescape(src3), + this.#hasMagic = !!this.#hasMagic, + this.#uflag + ]; + } + const repeated = this.type === "*" || this.type === "+"; + const start = this.type === "!" ? "(?:(?!(?:" : "(?:"; + let body = this.#partsToRegExp(dot); + if (this.isStart() && this.isEnd() && !body && this.type !== "!") { + const s = this.toString(); + this.#parts = [s]; + this.type = null; + this.#hasMagic = void 0; + return [s, unescape(this.toString()), false, false]; + } + let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot ? "" : this.#partsToRegExp(true); + if (bodyDotAllowed === body) { + bodyDotAllowed = ""; + } + if (bodyDotAllowed) { + body = `(?:${body})(?:${bodyDotAllowed})*?`; + } + let final = ""; + if (this.type === "!" && this.#emptyExt) { + final = (this.isStart() && !dot ? startNoDot : "") + starNoEmpty; + } else { + const close = this.type === "!" ? ( + // !() must match something,but !(x) can match '' + "))" + (this.isStart() && !dot && !allowDot ? startNoDot : "") + star + ")" + ) : this.type === "@" ? ")" : this.type === "?" ? ")?" : this.type === "+" && bodyDotAllowed ? ")" : this.type === "*" && bodyDotAllowed ? `)?` : `)${this.type}`; + final = start + body + close; + } + return [ + final, + unescape(body), + this.#hasMagic = !!this.#hasMagic, + this.#uflag + ]; + } + #partsToRegExp(dot) { + return this.#parts.map((p) => { + if (typeof p === "string") { + throw new Error("string type in extglob ast??"); + } + const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot); + this.#uflag = this.#uflag || uflag; + return re; + }).filter((p) => !(this.isStart() && this.isEnd()) || !!p).join("|"); + } + static #parseGlob(glob2, hasMagic, noEmpty = false) { + let escaping = false; + let re = ""; + let uflag = false; + for (let i = 0; i < glob2.length; i++) { + const c = glob2.charAt(i); + if (escaping) { + escaping = false; + re += (reSpecials.has(c) ? "\\" : "") + c; + continue; + } + if (c === "\\") { + if (i === glob2.length - 1) { + re += "\\\\"; + } else { + escaping = true; + } + continue; + } + if (c === "[") { + const [src3, needUflag, consumed, magic] = parseClass(glob2, i); + if (consumed) { + re += src3; + uflag = uflag || needUflag; + i += consumed - 1; + hasMagic = hasMagic || magic; + continue; + } + } + if (c === "*") { + if (noEmpty && glob2 === "*") + re += starNoEmpty; + else + re += star; + hasMagic = true; + continue; + } + if (c === "?") { + re += qmark; + hasMagic = true; + continue; + } + re += regExpEscape(c); + } + return [re, unescape(glob2), !!hasMagic, uflag]; + } +}; + +// node_modules/minimatch/dist/mjs/escape.js +var escape = (s, { windowsPathsNoEscape = false } = {}) => { + return windowsPathsNoEscape ? s.replace(/[?*()[\]]/g, "[$&]") : s.replace(/[?*()[\]\\]/g, "\\$&"); +}; + +// node_modules/minimatch/dist/mjs/index.js +var minimatch = (p, pattern, options = {}) => { + assertValidPattern(pattern); + if (!options.nocomment && pattern.charAt(0) === "#") { + return false; + } + return new Minimatch(pattern, options).match(p); +}; +var starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/; +var starDotExtTest = (ext2) => (f) => !f.startsWith(".") && f.endsWith(ext2); +var starDotExtTestDot = (ext2) => (f) => f.endsWith(ext2); +var starDotExtTestNocase = (ext2) => { + ext2 = ext2.toLowerCase(); + return (f) => !f.startsWith(".") && f.toLowerCase().endsWith(ext2); +}; +var starDotExtTestNocaseDot = (ext2) => { + ext2 = ext2.toLowerCase(); + return (f) => f.toLowerCase().endsWith(ext2); +}; +var starDotStarRE = /^\*+\.\*+$/; +var starDotStarTest = (f) => !f.startsWith(".") && f.includes("."); +var starDotStarTestDot = (f) => f !== "." && f !== ".." && f.includes("."); +var dotStarRE = /^\.\*+$/; +var dotStarTest = (f) => f !== "." && f !== ".." && f.startsWith("."); +var starRE = /^\*+$/; +var starTest = (f) => f.length !== 0 && !f.startsWith("."); +var starTestDot = (f) => f.length !== 0 && f !== "." && f !== ".."; +var qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/; +var qmarksTestNocase = ([$0, ext2 = ""]) => { + const noext = qmarksTestNoExt([$0]); + if (!ext2) + return noext; + ext2 = ext2.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext2); +}; +var qmarksTestNocaseDot = ([$0, ext2 = ""]) => { + const noext = qmarksTestNoExtDot([$0]); + if (!ext2) + return noext; + ext2 = ext2.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext2); +}; +var qmarksTestDot = ([$0, ext2 = ""]) => { + const noext = qmarksTestNoExtDot([$0]); + return !ext2 ? noext : (f) => noext(f) && f.endsWith(ext2); +}; +var qmarksTest = ([$0, ext2 = ""]) => { + const noext = qmarksTestNoExt([$0]); + return !ext2 ? noext : (f) => noext(f) && f.endsWith(ext2); +}; +var qmarksTestNoExt = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && !f.startsWith("."); +}; +var qmarksTestNoExtDot = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && f !== "." && f !== ".."; +}; +var defaultPlatform = typeof process === "object" && process ? typeof process.env === "object" && process.env && process.env.__MINIMATCH_TESTING_PLATFORM__ || process.platform : "posix"; +var path = { + win32: { sep: "\\" }, + posix: { sep: "/" } +}; +var sep = defaultPlatform === "win32" ? path.win32.sep : path.posix.sep; +minimatch.sep = sep; +var GLOBSTAR = Symbol("globstar **"); +minimatch.GLOBSTAR = GLOBSTAR; +var qmark2 = "[^/]"; +var star2 = qmark2 + "*?"; +var twoStarDot = "(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?"; +var twoStarNoDot = "(?:(?!(?:\\/|^)\\.).)*?"; +var filter2 = (pattern, options = {}) => (p) => minimatch(p, pattern, options); +minimatch.filter = filter2; +var ext = (a, b = {}) => Object.assign({}, a, b); +var defaults = (def) => { + if (!def || typeof def !== "object" || !Object.keys(def).length) { + return minimatch; + } + const orig = minimatch; + const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options)); + return Object.assign(m, { + Minimatch: class Minimatch extends orig.Minimatch { + constructor(pattern, options = {}) { + super(pattern, ext(def, options)); + } + static defaults(options) { + return orig.defaults(ext(def, options)).Minimatch; + } + }, + AST: class AST extends orig.AST { + /* c8 ignore start */ + constructor(type, parent, options = {}) { + super(type, parent, ext(def, options)); + } + /* c8 ignore stop */ + static fromGlob(pattern, options = {}) { + return orig.AST.fromGlob(pattern, ext(def, options)); + } + }, + unescape: (s, options = {}) => orig.unescape(s, ext(def, options)), + escape: (s, options = {}) => orig.escape(s, ext(def, options)), + filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)), + defaults: (options) => orig.defaults(ext(def, options)), + makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)), + braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)), + match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)), + sep: orig.sep, + GLOBSTAR + }); +}; +minimatch.defaults = defaults; +var braceExpand = (pattern, options = {}) => { + assertValidPattern(pattern); + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + return [pattern]; + } + return (0, import_brace_expansion.default)(pattern); +}; +minimatch.braceExpand = braceExpand; +var makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe(); +minimatch.makeRe = makeRe; +var match = (list, pattern, options = {}) => { + const mm = new Minimatch(pattern, options); + list = list.filter((f) => mm.match(f)); + if (mm.options.nonull && !list.length) { + list.push(pattern); + } + return list; +}; +minimatch.match = match; +var globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/; +var regExpEscape2 = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); +var Minimatch = class { + options; + set; + pattern; + windowsPathsNoEscape; + nonegate; + negate; + comment; + empty; + preserveMultipleSlashes; + partial; + globSet; + globParts; + nocase; + isWindows; + platform; + windowsNoMagicRoot; + regexp; + constructor(pattern, options = {}) { + assertValidPattern(pattern); + options = options || {}; + this.options = options; + this.pattern = pattern; + this.platform = options.platform || defaultPlatform; + this.isWindows = this.platform === "win32"; + this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; + if (this.windowsPathsNoEscape) { + this.pattern = this.pattern.replace(/\\/g, "/"); + } + this.preserveMultipleSlashes = !!options.preserveMultipleSlashes; + this.regexp = null; + this.negate = false; + this.nonegate = !!options.nonegate; + this.comment = false; + this.empty = false; + this.partial = !!options.partial; + this.nocase = !!this.options.nocase; + this.windowsNoMagicRoot = options.windowsNoMagicRoot !== void 0 ? options.windowsNoMagicRoot : !!(this.isWindows && this.nocase); + this.globSet = []; + this.globParts = []; + this.set = []; + this.make(); + } + hasMagic() { + if (this.options.magicalBraces && this.set.length > 1) { + return true; + } + for (const pattern of this.set) { + for (const part of pattern) { + if (typeof part !== "string") + return true; + } + } + return false; + } + debug(..._) { + } + make() { + const pattern = this.pattern; + const options = this.options; + if (!options.nocomment && pattern.charAt(0) === "#") { + this.comment = true; + return; + } + if (!pattern) { + this.empty = true; + return; + } + this.parseNegate(); + this.globSet = [...new Set(this.braceExpand())]; + if (options.debug) { + this.debug = (...args) => console.error(...args); + } + this.debug(this.pattern, this.globSet); + const rawGlobParts = this.globSet.map((s) => this.slashSplit(s)); + this.globParts = this.preprocess(rawGlobParts); + this.debug(this.pattern, this.globParts); + let set = this.globParts.map((s, _, __) => { + if (this.isWindows && this.windowsNoMagicRoot) { + const isUNC = s[0] === "" && s[1] === "" && (s[2] === "?" || !globMagic.test(s[2])) && !globMagic.test(s[3]); + const isDrive = /^[a-z]:/i.test(s[0]); + if (isUNC) { + return [...s.slice(0, 4), ...s.slice(4).map((ss) => this.parse(ss))]; + } else if (isDrive) { + return [s[0], ...s.slice(1).map((ss) => this.parse(ss))]; + } + } + return s.map((ss) => this.parse(ss)); + }); + this.debug(this.pattern, set); + this.set = set.filter((s) => s.indexOf(false) === -1); + if (this.isWindows) { + for (let i = 0; i < this.set.length; i++) { + const p = this.set[i]; + if (p[0] === "" && p[1] === "" && this.globParts[i][2] === "?" && typeof p[3] === "string" && /^[a-z]:$/i.test(p[3])) { + p[2] = "?"; + } + } + } + this.debug(this.pattern, this.set); + } + // various transforms to equivalent pattern sets that are + // faster to process in a filesystem walk. The goal is to + // eliminate what we can, and push all ** patterns as far + // to the right as possible, even if it increases the number + // of patterns that we have to process. + preprocess(globParts) { + if (this.options.noglobstar) { + for (let i = 0; i < globParts.length; i++) { + for (let j = 0; j < globParts[i].length; j++) { + if (globParts[i][j] === "**") { + globParts[i][j] = "*"; + } + } + } + } + const { optimizationLevel = 1 } = this.options; + if (optimizationLevel >= 2) { + globParts = this.firstPhasePreProcess(globParts); + globParts = this.secondPhasePreProcess(globParts); + } else if (optimizationLevel >= 1) { + globParts = this.levelOneOptimize(globParts); + } else { + globParts = this.adjascentGlobstarOptimize(globParts); + } + return globParts; + } + // just get rid of adjascent ** portions + adjascentGlobstarOptimize(globParts) { + return globParts.map((parts) => { + let gs = -1; + while (-1 !== (gs = parts.indexOf("**", gs + 1))) { + let i = gs; + while (parts[i + 1] === "**") { + i++; + } + if (i !== gs) { + parts.splice(gs, i - gs); + } + } + return parts; + }); + } + // get rid of adjascent ** and resolve .. portions + levelOneOptimize(globParts) { + return globParts.map((parts) => { + parts = parts.reduce((set, part) => { + const prev = set[set.length - 1]; + if (part === "**" && prev === "**") { + return set; + } + if (part === "..") { + if (prev && prev !== ".." && prev !== "." && prev !== "**") { + set.pop(); + return set; + } + } + set.push(part); + return set; + }, []); + return parts.length === 0 ? [""] : parts; + }); + } + levelTwoFileOptimize(parts) { + if (!Array.isArray(parts)) { + parts = this.slashSplit(parts); + } + let didSomething = false; + do { + didSomething = false; + if (!this.preserveMultipleSlashes) { + for (let i = 1; i < parts.length - 1; i++) { + const p = parts[i]; + if (i === 1 && p === "" && parts[0] === "") + continue; + if (p === "." || p === "") { + didSomething = true; + parts.splice(i, 1); + i--; + } + } + if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) { + didSomething = true; + parts.pop(); + } + } + let dd = 0; + while (-1 !== (dd = parts.indexOf("..", dd + 1))) { + const p = parts[dd - 1]; + if (p && p !== "." && p !== ".." && p !== "**") { + didSomething = true; + parts.splice(dd - 1, 2); + dd -= 2; + } + } + } while (didSomething); + return parts.length === 0 ? [""] : parts; + } + // First phase: single-pattern processing + //
 is 1 or more portions
+  //  is 1 or more portions
+  // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+  // 
/

/../ ->

/
+  // **/**/ -> **/
+  //
+  // **/*/ -> */**/ <== not valid because ** doesn't follow
+  // this WOULD be allowed if ** did follow symlinks, or * didn't
+  firstPhasePreProcess(globParts) {
+    let didSomething = false;
+    do {
+      didSomething = false;
+      for (let parts of globParts) {
+        let gs = -1;
+        while (-1 !== (gs = parts.indexOf("**", gs + 1))) {
+          let gss = gs;
+          while (parts[gss + 1] === "**") {
+            gss++;
+          }
+          if (gss > gs) {
+            parts.splice(gs + 1, gss - gs);
+          }
+          let next = parts[gs + 1];
+          const p = parts[gs + 2];
+          const p2 = parts[gs + 3];
+          if (next !== "..")
+            continue;
+          if (!p || p === "." || p === ".." || !p2 || p2 === "." || p2 === "..") {
+            continue;
+          }
+          didSomething = true;
+          parts.splice(gs, 1);
+          const other = parts.slice(0);
+          other[gs] = "**";
+          globParts.push(other);
+          gs--;
+        }
+        if (!this.preserveMultipleSlashes) {
+          for (let i = 1; i < parts.length - 1; i++) {
+            const p = parts[i];
+            if (i === 1 && p === "" && parts[0] === "")
+              continue;
+            if (p === "." || p === "") {
+              didSomething = true;
+              parts.splice(i, 1);
+              i--;
+            }
+          }
+          if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
+            didSomething = true;
+            parts.pop();
+          }
+        }
+        let dd = 0;
+        while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
+          const p = parts[dd - 1];
+          if (p && p !== "." && p !== ".." && p !== "**") {
+            didSomething = true;
+            const needDot = dd === 1 && parts[dd + 1] === "**";
+            const splin = needDot ? ["."] : [];
+            parts.splice(dd - 1, 2, ...splin);
+            if (parts.length === 0)
+              parts.push("");
+            dd -= 2;
+          }
+        }
+      }
+    } while (didSomething);
+    return globParts;
+  }
+  // second phase: multi-pattern dedupes
+  // {
/*/,
/

/} ->

/*/
+  // {
/,
/} -> 
/
+  // {
/**/,
/} -> 
/**/
+  //
+  // {
/**/,
/**/

/} ->

/**/
+  // ^-- not valid because ** doens't follow symlinks
+  secondPhasePreProcess(globParts) {
+    for (let i = 0; i < globParts.length - 1; i++) {
+      for (let j = i + 1; j < globParts.length; j++) {
+        const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+        if (!matched)
+          continue;
+        globParts[i] = matched;
+        globParts[j] = [];
+      }
+    }
+    return globParts.filter((gs) => gs.length);
+  }
+  partsMatch(a, b, emptyGSMatch = false) {
+    let ai = 0;
+    let bi = 0;
+    let result = [];
+    let which = "";
+    while (ai < a.length && bi < b.length) {
+      if (a[ai] === b[bi]) {
+        result.push(which === "b" ? b[bi] : a[ai]);
+        ai++;
+        bi++;
+      } else if (emptyGSMatch && a[ai] === "**" && b[bi] === a[ai + 1]) {
+        result.push(a[ai]);
+        ai++;
+      } else if (emptyGSMatch && b[bi] === "**" && a[ai] === b[bi + 1]) {
+        result.push(b[bi]);
+        bi++;
+      } else if (a[ai] === "*" && b[bi] && (this.options.dot || !b[bi].startsWith(".")) && b[bi] !== "**") {
+        if (which === "b")
+          return false;
+        which = "a";
+        result.push(a[ai]);
+        ai++;
+        bi++;
+      } else if (b[bi] === "*" && a[ai] && (this.options.dot || !a[ai].startsWith(".")) && a[ai] !== "**") {
+        if (which === "a")
+          return false;
+        which = "b";
+        result.push(b[bi]);
+        ai++;
+        bi++;
+      } else {
+        return false;
+      }
+    }
+    return a.length === b.length && result;
+  }
+  parseNegate() {
+    if (this.nonegate)
+      return;
+    const pattern = this.pattern;
+    let negate = false;
+    let negateOffset = 0;
+    for (let i = 0; i < pattern.length && pattern.charAt(i) === "!"; i++) {
+      negate = !negate;
+      negateOffset++;
+    }
+    if (negateOffset)
+      this.pattern = pattern.slice(negateOffset);
+    this.negate = negate;
+  }
+  // set partial to true to test if, for example,
+  // "/a/b" matches the start of "/*/b/*/d"
+  // Partial means, if you run out of file before you run
+  // out of pattern, then that's fine, as long as all
+  // the parts match.
+  matchOne(file, pattern, partial = false) {
+    const options = this.options;
+    if (this.isWindows) {
+      const fileDrive = typeof file[0] === "string" && /^[a-z]:$/i.test(file[0]);
+      const fileUNC = !fileDrive && file[0] === "" && file[1] === "" && file[2] === "?" && /^[a-z]:$/i.test(file[3]);
+      const patternDrive = typeof pattern[0] === "string" && /^[a-z]:$/i.test(pattern[0]);
+      const patternUNC = !patternDrive && pattern[0] === "" && pattern[1] === "" && pattern[2] === "?" && typeof pattern[3] === "string" && /^[a-z]:$/i.test(pattern[3]);
+      const fdi = fileUNC ? 3 : fileDrive ? 0 : void 0;
+      const pdi = patternUNC ? 3 : patternDrive ? 0 : void 0;
+      if (typeof fdi === "number" && typeof pdi === "number") {
+        const [fd, pd] = [file[fdi], pattern[pdi]];
+        if (fd.toLowerCase() === pd.toLowerCase()) {
+          pattern[pdi] = fd;
+          if (pdi > fdi) {
+            pattern = pattern.slice(pdi);
+          } else if (fdi > pdi) {
+            file = file.slice(fdi);
+          }
+        }
+      }
+    }
+    const { optimizationLevel = 1 } = this.options;
+    if (optimizationLevel >= 2) {
+      file = this.levelTwoFileOptimize(file);
+    }
+    this.debug("matchOne", this, { file, pattern });
+    this.debug("matchOne", file.length, pattern.length);
+    for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+      this.debug("matchOne loop");
+      var p = pattern[pi];
+      var f = file[fi];
+      this.debug(pattern, p, f);
+      if (p === false) {
+        return false;
+      }
+      if (p === GLOBSTAR) {
+        this.debug("GLOBSTAR", [pattern, p, f]);
+        var fr = fi;
+        var pr = pi + 1;
+        if (pr === pl) {
+          this.debug("** at the end");
+          for (; fi < fl; fi++) {
+            if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".")
+              return false;
+          }
+          return true;
+        }
+        while (fr < fl) {
+          var swallowee = file[fr];
+          this.debug("\nglobstar while", file, fr, pattern, pr, swallowee);
+          if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+            this.debug("globstar found match!", fr, fl, swallowee);
+            return true;
+          } else {
+            if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") {
+              this.debug("dot detected!", file, fr, pattern, pr);
+              break;
+            }
+            this.debug("globstar swallow a segment, and continue");
+            fr++;
+          }
+        }
+        if (partial) {
+          this.debug("\n>>> no match, partial?", file, fr, pattern, pr);
+          if (fr === fl) {
+            return true;
+          }
+        }
+        return false;
+      }
+      let hit;
+      if (typeof p === "string") {
+        hit = f === p;
+        this.debug("string match", p, f, hit);
+      } else {
+        hit = p.test(f);
+        this.debug("pattern match", p, f, hit);
+      }
+      if (!hit)
+        return false;
+    }
+    if (fi === fl && pi === pl) {
+      return true;
+    } else if (fi === fl) {
+      return partial;
+    } else if (pi === pl) {
+      return fi === fl - 1 && file[fi] === "";
+    } else {
+      throw new Error("wtf?");
+    }
+  }
+  braceExpand() {
+    return braceExpand(this.pattern, this.options);
+  }
+  parse(pattern) {
+    assertValidPattern(pattern);
+    const options = this.options;
+    if (pattern === "**")
+      return GLOBSTAR;
+    if (pattern === "")
+      return "";
+    let m;
+    let fastTest = null;
+    if (m = pattern.match(starRE)) {
+      fastTest = options.dot ? starTestDot : starTest;
+    } else if (m = pattern.match(starDotExtRE)) {
+      fastTest = (options.nocase ? options.dot ? starDotExtTestNocaseDot : starDotExtTestNocase : options.dot ? starDotExtTestDot : starDotExtTest)(m[1]);
+    } else if (m = pattern.match(qmarksRE)) {
+      fastTest = (options.nocase ? options.dot ? qmarksTestNocaseDot : qmarksTestNocase : options.dot ? qmarksTestDot : qmarksTest)(m);
+    } else if (m = pattern.match(starDotStarRE)) {
+      fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+    } else if (m = pattern.match(dotStarRE)) {
+      fastTest = dotStarTest;
+    }
+    const re = AST.fromGlob(pattern, this.options).toMMPattern();
+    return fastTest ? Object.assign(re, { test: fastTest }) : re;
+  }
+  makeRe() {
+    if (this.regexp || this.regexp === false)
+      return this.regexp;
+    const set = this.set;
+    if (!set.length) {
+      this.regexp = false;
+      return this.regexp;
+    }
+    const options = this.options;
+    const twoStar = options.noglobstar ? star2 : options.dot ? twoStarDot : twoStarNoDot;
+    const flags = new Set(options.nocase ? ["i"] : []);
+    let re = set.map((pattern) => {
+      const pp = pattern.map((p) => {
+        if (p instanceof RegExp) {
+          for (const f of p.flags.split(""))
+            flags.add(f);
+        }
+        return typeof p === "string" ? regExpEscape2(p) : p === GLOBSTAR ? GLOBSTAR : p._src;
+      });
+      pp.forEach((p, i) => {
+        const next = pp[i + 1];
+        const prev = pp[i - 1];
+        if (p !== GLOBSTAR || prev === GLOBSTAR) {
+          return;
+        }
+        if (prev === void 0) {
+          if (next !== void 0 && next !== GLOBSTAR) {
+            pp[i + 1] = "(?:\\/|" + twoStar + "\\/)?" + next;
+          } else {
+            pp[i] = twoStar;
+          }
+        } else if (next === void 0) {
+          pp[i - 1] = prev + "(?:\\/|" + twoStar + ")?";
+        } else if (next !== GLOBSTAR) {
+          pp[i - 1] = prev + "(?:\\/|\\/" + twoStar + "\\/)" + next;
+          pp[i + 1] = GLOBSTAR;
+        }
+      });
+      return pp.filter((p) => p !== GLOBSTAR).join("/");
+    }).join("|");
+    const [open, close] = set.length > 1 ? ["(?:", ")"] : ["", ""];
+    re = "^" + open + re + close + "$";
+    if (this.negate)
+      re = "^(?!" + re + ").+$";
+    try {
+      this.regexp = new RegExp(re, [...flags].join(""));
+    } catch (ex) {
+      this.regexp = false;
+    }
+    return this.regexp;
+  }
+  slashSplit(p) {
+    if (this.preserveMultipleSlashes) {
+      return p.split("/");
+    } else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+      return ["", ...p.split(/\/+/)];
+    } else {
+      return p.split(/\/+/);
+    }
+  }
+  match(f, partial = this.partial) {
+    this.debug("match", f, this.pattern);
+    if (this.comment) {
+      return false;
+    }
+    if (this.empty) {
+      return f === "";
+    }
+    if (f === "/" && partial) {
+      return true;
+    }
+    const options = this.options;
+    if (this.isWindows) {
+      f = f.split("\\").join("/");
+    }
+    const ff = this.slashSplit(f);
+    this.debug(this.pattern, "split", ff);
+    const set = this.set;
+    this.debug(this.pattern, "set", set);
+    let filename = ff[ff.length - 1];
+    if (!filename) {
+      for (let i = ff.length - 2; !filename && i >= 0; i--) {
+        filename = ff[i];
+      }
+    }
+    for (let i = 0; i < set.length; i++) {
+      const pattern = set[i];
+      let file = ff;
+      if (options.matchBase && pattern.length === 1) {
+        file = [filename];
+      }
+      const hit = this.matchOne(file, pattern, partial);
+      if (hit) {
+        if (options.flipNegate) {
+          return true;
+        }
+        return !this.negate;
+      }
+    }
+    if (options.flipNegate) {
+      return false;
+    }
+    return this.negate;
+  }
+  static defaults(def) {
+    return minimatch.defaults(def).Minimatch;
+  }
+};
+minimatch.AST = AST;
+minimatch.Minimatch = Minimatch;
+minimatch.escape = escape;
+minimatch.unescape = unescape;
+
+// node_modules/it-glob/dist/src/index.js
+async function* glob(dir, pattern, options = {}) {
+  const absoluteDir = import_path.default.resolve(dir);
+  const relativeDir = import_path.default.relative(options.cwd ?? process.cwd(), dir);
+  const stats = await import_promises.default.stat(absoluteDir);
+  if (stats.isDirectory()) {
+    for await (const entry of _glob(absoluteDir, "", pattern, options)) {
+      yield entry;
+    }
+    return;
+  }
+  if (minimatch(relativeDir, pattern, options)) {
+    yield options.absolute === true ? absoluteDir : relativeDir;
+  }
+}
+async function* _glob(base3, dir, pattern, options) {
+  for await (const entry of await import_promises.default.opendir(import_path.default.join(base3, dir))) {
+    const relativeEntryPath = import_path.default.join(dir, entry.name);
+    const absoluteEntryPath = import_path.default.join(base3, dir, entry.name);
+    let match2 = minimatch(relativeEntryPath, pattern, options);
+    const isDirectory = entry.isDirectory();
+    if (isDirectory && options.nodir === true) {
+      match2 = false;
+    }
+    if (match2) {
+      yield options.absolute === true ? absoluteEntryPath : relativeEntryPath;
+    }
+    if (isDirectory) {
+      yield* _glob(base3, relativeEntryPath, pattern, options);
+    }
+  }
+}
+
+// node_modules/@helia/unixfs/dist/src/index.js
+var DefaultUnixFS = class {
+  components;
+  constructor(components) {
+    this.components = components;
+  }
+  async *addAll(source, options = {}) {
+    yield* addAll(source, this.components.blockstore, options);
+  }
+  async addBytes(bytes, options = {}) {
+    return addBytes(bytes, this.components.blockstore, options);
+  }
+  async addByteStream(bytes, options = {}) {
+    return addByteStream(bytes, this.components.blockstore, options);
+  }
+  async addFile(file, options = {}) {
+    return addFile(file, this.components.blockstore, options);
+  }
+  async addDirectory(dir = {}, options = {}) {
+    return addDirectory(dir, this.components.blockstore, options);
+  }
+  async *cat(cid, options = {}) {
+    yield* cat(cid, this.components.blockstore, options);
+  }
+  async chmod(cid, mode, options = {}) {
+    return chmod(cid, mode, this.components.blockstore, options);
+  }
+  async cp(source, target, name4, options = {}) {
+    return cp(source, target, name4, this.components.blockstore, options);
+  }
+  async *ls(cid, options = {}) {
+    yield* ls(cid, this.components.blockstore, options);
+  }
+  async mkdir(cid, dirname, options = {}) {
+    return mkdir(cid, dirname, this.components.blockstore, options);
+  }
+  async rm(cid, path6, options = {}) {
+    return rm(cid, path6, this.components.blockstore, options);
+  }
+  async stat(cid, options = {}) {
+    return stat(cid, this.components.blockstore, options);
+  }
+  async touch(cid, options = {}) {
+    return touch(cid, this.components.blockstore, options);
+  }
+};
+function unixfs(helia) {
+  return new DefaultUnixFS(helia);
+}
+
+// node_modules/blockstore-fs/dist/src/index.js
+var import_promises2 = __toESM(require("fs/promises"), 1);
+var import_node_path2 = __toESM(require("path"), 1);
+var import_node_util = require("util");
+
+// node_modules/blockstore-core/dist/src/errors.js
+var errors_exports = {};
+__export(errors_exports, {
+  abortedError: () => abortedError,
+  closeFailedError: () => closeFailedError,
+  deleteFailedError: () => deleteFailedError,
+  getFailedError: () => getFailedError,
+  hasFailedError: () => hasFailedError,
+  notFoundError: () => notFoundError,
+  openFailedError: () => openFailedError,
+  putFailedError: () => putFailedError
+});
+var import_err_code16 = __toESM(require_err_code(), 1);
+function openFailedError(err) {
+  err = err ?? new Error("Open failed");
+  return (0, import_err_code16.default)(err, "ERR_OPEN_FAILED");
+}
+function closeFailedError(err) {
+  err = err ?? new Error("Close failed");
+  return (0, import_err_code16.default)(err, "ERR_CLOSE_FAILED");
+}
+function putFailedError(err) {
+  err = err ?? new Error("Put failed");
+  return (0, import_err_code16.default)(err, "ERR_PUT_FAILED");
+}
+function getFailedError(err) {
+  err = err ?? new Error("Get failed");
+  return (0, import_err_code16.default)(err, "ERR_GET_FAILED");
+}
+function deleteFailedError(err) {
+  err = err ?? new Error("Delete failed");
+  return (0, import_err_code16.default)(err, "ERR_DELETE_FAILED");
+}
+function hasFailedError(err) {
+  err = err ?? new Error("Has failed");
+  return (0, import_err_code16.default)(err, "ERR_HAS_FAILED");
+}
+function notFoundError(err) {
+  err = err ?? new Error("Not Found");
+  return (0, import_err_code16.default)(err, "ERR_NOT_FOUND");
+}
+function abortedError(err) {
+  err = err ?? new Error("Aborted");
+  return (0, import_err_code16.default)(err, "ERR_ABORTED");
+}
+
+// node_modules/blockstore-core/node_modules/@libp2p/logger/dist/src/index.js
+var import_debug2 = __toESM(require_src2(), 1);
+import_debug2.default.formatters.b = (v) => {
+  return v == null ? "undefined" : base58btc2.baseEncode(v);
+};
+import_debug2.default.formatters.t = (v) => {
+  return v == null ? "undefined" : base322.baseEncode(v);
+};
+import_debug2.default.formatters.m = (v) => {
+  return v == null ? "undefined" : base64.baseEncode(v);
+};
+import_debug2.default.formatters.p = (v) => {
+  return v == null ? "undefined" : v.toString();
+};
+import_debug2.default.formatters.c = (v) => {
+  return v == null ? "undefined" : v.toString();
+};
+import_debug2.default.formatters.k = (v) => {
+  return v == null ? "undefined" : v.toString();
+};
+import_debug2.default.formatters.a = (v) => {
+  return v == null ? "undefined" : v.toString();
+};
+function createDisabledLogger2(namespace) {
+  const logger3 = () => {
+  };
+  logger3.enabled = false;
+  logger3.color = "";
+  logger3.diff = 0;
+  logger3.log = () => {
+  };
+  logger3.namespace = namespace;
+  logger3.destroy = () => true;
+  logger3.extend = () => logger3;
+  return logger3;
+}
+function logger2(name4) {
+  let trace = createDisabledLogger2(`${name4}:trace`);
+  if (import_debug2.default.enabled(`${name4}:trace`) && import_debug2.default.names.map((r) => r.toString()).find((n) => n.includes(":trace")) != null) {
+    trace = (0, import_debug2.default)(`${name4}:trace`);
+  }
+  return Object.assign((0, import_debug2.default)(name4), {
+    error: (0, import_debug2.default)(`${name4}:error`),
+    trace
+  });
+}
+
+// node_modules/blockstore-core/dist/src/tiered.js
+var log11 = logger2("blockstore:core:tiered");
+
+// node_modules/blockstore-core/dist/src/index.js
+var Errors = {
+  ...errors_exports
+};
+
+// node_modules/blockstore-fs/dist/src/index.js
+var import_fast_write_atomic = __toESM(require_fast_write_atomic(), 1);
+
+// node_modules/blockstore-fs/dist/src/sharding.js
+var import_node_path = __toESM(require("path"), 1);
+var NextToLast = class {
+  extension;
+  prefixLength;
+  base;
+  constructor(init = {}) {
+    this.extension = init.extension ?? ".data";
+    this.prefixLength = init.prefixLength ?? 2;
+    this.base = init.base ?? base32upper2;
+  }
+  encode(cid) {
+    const str = this.base.encoder.encode(cid.multihash.bytes);
+    const prefix = str.substring(str.length - this.prefixLength);
+    return {
+      dir: prefix,
+      file: `${str}${this.extension}`
+    };
+  }
+  decode(str) {
+    let fileName = import_node_path.default.basename(str);
+    if (fileName.endsWith(this.extension)) {
+      fileName = fileName.substring(0, fileName.length - this.extension.length);
+    }
+    return CID2.decode(this.base.decoder.decode(fileName));
+  }
+};
+
+// node_modules/blockstore-fs/dist/src/index.js
+var writeAtomic = (0, import_node_util.promisify)(import_fast_write_atomic.default);
+async function writeFile(file, contents) {
+  try {
+    await writeAtomic(file, contents);
+  } catch (err) {
+    if (err.code === "EPERM" && err.syscall === "rename") {
+      await import_promises2.default.access(file, import_promises2.default.constants.F_OK | import_promises2.default.constants.W_OK);
+      return;
+    }
+    throw err;
+  }
+}
+var FsBlockstore = class {
+  path;
+  createIfMissing;
+  errorIfExists;
+  putManyConcurrency;
+  getManyConcurrency;
+  deleteManyConcurrency;
+  shardingStrategy;
+  constructor(location, init = {}) {
+    this.path = import_node_path2.default.resolve(location);
+    this.createIfMissing = init.createIfMissing ?? true;
+    this.errorIfExists = init.errorIfExists ?? false;
+    this.deleteManyConcurrency = init.deleteManyConcurrency ?? 50;
+    this.getManyConcurrency = init.getManyConcurrency ?? 50;
+    this.putManyConcurrency = init.putManyConcurrency ?? 50;
+    this.shardingStrategy = init.shardingStrategy ?? new NextToLast();
+  }
+  async open() {
+    try {
+      await import_promises2.default.access(this.path, import_promises2.default.constants.F_OK | import_promises2.default.constants.W_OK);
+      if (this.errorIfExists) {
+        throw Errors.openFailedError(new Error(`Blockstore directory: ${this.path} already exists`));
+      }
+    } catch (err) {
+      if (err.code === "ENOENT") {
+        if (this.createIfMissing) {
+          await import_promises2.default.mkdir(this.path, { recursive: true });
+          return;
+        } else {
+          throw Errors.openFailedError(new Error(`Blockstore directory: ${this.path} does not exist`));
+        }
+      }
+      throw err;
+    }
+  }
+  async close() {
+    await Promise.resolve();
+  }
+  async put(key, val) {
+    const { dir, file } = this.shardingStrategy.encode(key);
+    try {
+      if (dir != null && dir !== "") {
+        await import_promises2.default.mkdir(import_node_path2.default.join(this.path, dir), {
+          recursive: true
+        });
+      }
+      await writeFile(import_node_path2.default.join(this.path, dir, file), val);
+      return key;
+    } catch (err) {
+      throw Errors.putFailedError(err);
+    }
+  }
+  async *putMany(source) {
+    yield* parallelBatch(src_default3(source, ({ cid, block }) => {
+      return async () => {
+        await this.put(cid, block);
+        return cid;
+      };
+    }), this.putManyConcurrency);
+  }
+  async get(key) {
+    const { dir, file } = this.shardingStrategy.encode(key);
+    try {
+      return await import_promises2.default.readFile(import_node_path2.default.join(this.path, dir, file));
+    } catch (err) {
+      throw Errors.notFoundError(err);
+    }
+  }
+  async *getMany(source) {
+    yield* parallelBatch(src_default3(source, (key) => {
+      return async () => {
+        return {
+          cid: key,
+          block: await this.get(key)
+        };
+      };
+    }), this.getManyConcurrency);
+  }
+  async delete(key) {
+    const { dir, file } = this.shardingStrategy.encode(key);
+    try {
+      await import_promises2.default.unlink(import_node_path2.default.join(this.path, dir, file));
+    } catch (err) {
+      if (err.code === "ENOENT") {
+        return;
+      }
+      throw Errors.deleteFailedError(err);
+    }
+  }
+  async *deleteMany(source) {
+    yield* parallelBatch(src_default3(source, (key) => {
+      return async () => {
+        await this.delete(key);
+        return key;
+      };
+    }), this.deleteManyConcurrency);
+  }
+  /**
+   * Check for the existence of the given key
+   */
+  async has(key) {
+    const { dir, file } = this.shardingStrategy.encode(key);
+    try {
+      await import_promises2.default.access(import_node_path2.default.join(this.path, dir, file));
+    } catch (err) {
+      return false;
+    }
+    return true;
+  }
+  async *getAll() {
+    const pattern = `**/*${this.shardingStrategy.extension}`.split(import_node_path2.default.sep).join("/");
+    const files = glob(this.path, pattern, {
+      absolute: true
+    });
+    for await (const file of files) {
+      try {
+        const buf2 = await import_promises2.default.readFile(file);
+        const pair = {
+          cid: this.shardingStrategy.decode(file),
+          block: buf2
+        };
+        yield pair;
+      } catch (err) {
+        if (err.code !== "ENOENT") {
+          throw err;
+        }
+      }
+    }
+  }
+};
+
+// src/objectManager.js
+var import_node_fs = require("fs");
+var import_promises3 = require("fs/promises");
+var import_node_os = __toESM(require("os"));
+var import_node_path3 = __toESM(require("path"));
+var import_node_stream = require("stream");
+var import_uuid = require("uuid");
+var ObjectManager = class {
+  #DEFAULT_ENDPOINT = "https://s3.filebase.com";
+  #DEFAULT_REGION = "us-east-1";
+  #DEFAULT_MAX_CONCURRENT_UPLOADS = 4;
+  #client;
+  #credentials;
+  #defaultBucket;
+  #gatewayConfiguration;
+  #maxConcurrentUploads;
+  /**
+   * @typedef {Object} objectManagerOptions Optional settings for the constructor.
+   * @property {string} [bucket] Default bucket to use.
+   * @property {objectDownloadOptions} [gateway] Default gateway to use.
+   * @property {number} [maxConcurrentUploads] The maximum number of concurrent uploads.
+   */
+  /**
+   * @typedef {Object} objectDownloadOptions Optional settings for downloading objects
+   * @property {string} endpoint Default gateway to use.
+   * @property {string} [token] Token for the default gateway.
+   * @property {number} [timeout=60000] Timeout for the default gateway
+   */
+  /**
+   * @summary Creates a new instance of the constructor.
+   * @param {string} clientKey - The access key ID for authentication.
+   * @param {string} clientSecret - The secret access key for authentication.
+   * @param {objectManagerOptions} options - Optional settings for the constructor.
+   * @tutorial quickstart-object
+   * @example
+   * import { ObjectManager } from "@filebase/sdk";
+   * const objectManager = new ObjectManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", {
+   *   bucket: "my-default-bucket",
+   *   maxConcurrentUploads: 4,
+   *   gateway: {
+   *     endpoint: "https://my-default-gateway.mydomain.com
+   *     token: SUPER_SECRET_GATEWAY_TOKEN
+   *   }
+   * });
+   */
+  constructor(clientKey, clientSecret, options) {
+    var _a, _b, _c;
+    const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, clientConfiguration = {
+      credentials: {
+        accessKeyId: clientKey,
+        secretAccessKey: clientSecret
+      },
+      endpoint: clientEndpoint,
+      region: this.#DEFAULT_REGION,
+      forcePathStyle: true
+    };
+    this.#defaultBucket = options == null ? void 0 : options.bucket;
+    this.#maxConcurrentUploads = (options == null ? void 0 : options.maxConcurrentUploads) || this.#DEFAULT_MAX_CONCURRENT_UPLOADS;
+    this.#credentials = {
+      key: clientKey,
+      secret: clientSecret
+    };
+    this.#client = new import_client_s32.S3Client(clientConfiguration);
+    this.#gatewayConfiguration = {
+      endpoint: (_a = options == null ? void 0 : options.gateway) == null ? void 0 : _a.endpoint,
+      token: (_b = options == null ? void 0 : options.gateway) == null ? void 0 : _b.token,
+      timeout: (_c = options == null ? void 0 : options.gateway) == null ? void 0 : _c.timeout
+    };
+  }
+  /**
+   * @typedef {Object} objectOptions
+   * @property {string} [bucket] - The bucket to pin the IPFS CID into.
+   */
+  /**
+   * @typedef {Object} objectHeadResult
+   * @property {string} cid The CID of the uploaded object
+   * @property {function} download Convenience function to download the object via S3 or the selected gateway
+   * @property {array} [entries] If a directory then returns an array of the containing objects
+   * @property {string} entries.cid The CID of the uploaded object
+   * @property {string} entries.path The path of the object
+   */
+  /**
+   * If the source parameter is an array of objects, it will pack multiple files into a CAR file for upload.
+   * The method returns a Promise that resolves to an object containing the CID (Content Identifier) of the uploaded file
+   * and an optional entries object when uploading a CAR file.
+   *
+   * @summary Uploads a file or a CAR file to the specified bucket.
+   * @param {string} key - The key or path of the file in the bucket.
+   * @param {Buffer|ReadableStream|Array} source - The content of the object to be uploaded.
+   *    If an array of files is provided, each file should have a 'path' property specifying the path of the file
+   *    and a 'content' property specifying the content of the file.  The SDK will then construct a CAR file locally
+   *    and use that as the content of the object to be uploaded.
+   * @param {Object} [metadata] Optional metadata for pin object
+   * @param {objectOptions} [options] - The options for uploading the object.
+   * @returns {Promise}
+   * @example
+   * // Upload Object
+   * await objectManager.upload("my-object", Buffer.from("Hello World!"));
+   * // Upload Object with Metadata
+   * await objectManager.upload("my-custom-object", Buffer.from("Hello Big World!"), {
+   *   "application": "my-filebase-app"
+   * });
+   * // Upload Directory
+   * await objectManager.upload("my-first-directory", [
+   *  {
+   *   path: "/testObjects/1.txt",
+   *   content: Buffer.from("upload test object", "utf-8"),
+   *  },
+   *  {
+   *   path: "/testObjects/deep/1.txt",
+   *   content: Buffer.from("upload deep test object", "utf-8"),
+   *  },
+   *  {
+   *   path: "/topLevel.txt",
+   *   content: Buffer.from("upload top level test object", "utf-8"),
+   *  },
+   * ]);
+   */
+  async upload(key, source, metadata, options) {
+    const uploadUUID = (0, import_uuid.v4)();
+    const bucket = (options == null ? void 0 : options.bucket) || this.#defaultBucket, uploadOptions = {
+      client: this.#client,
+      params: {
+        Bucket: bucket,
+        Key: key,
+        Body: source,
+        Metadata: metadata || {}
+      },
+      queueSize: this.#maxConcurrentUploads,
+      partSize: 26843546
+      //25.6Mb || 250Gb Max File Size
+    };
+    let parsedEntries = {};
+    if (Array.isArray(source)) {
+      uploadOptions.params.Metadata = {
+        ...uploadOptions.params.Metadata,
+        import: "car"
+      };
+      let temporaryCarFilePath, temporaryBlockstoreDir;
+      try {
+        temporaryBlockstoreDir = import_node_path3.default.resolve(
+          import_node_os.default.tmpdir(),
+          "filebase-sdk",
+          "uploads",
+          uploadUUID
+        );
+        temporaryCarFilePath = `${temporaryBlockstoreDir}/main.car`;
+        await (0, import_promises3.mkdir)(temporaryBlockstoreDir, { recursive: true });
+        const temporaryBlockstore = new FsBlockstore(temporaryBlockstoreDir);
+        const heliaFs = unixfs({
+          blockstore: temporaryBlockstore
+        });
+        for (let sourceEntry of source) {
+          sourceEntry.path = sourceEntry.path[0] === "/" ? `/${uploadUUID}${sourceEntry.path}` : `/${uploadUUID}/${sourceEntry.path}`;
+        }
+        for await (const entry of heliaFs.addAll(source)) {
+          parsedEntries[entry.path] = entry;
+        }
+        const rootEntry = parsedEntries[uploadUUID];
+        const carExporter = car({ blockstore: temporaryBlockstore }), { writer, out } = CarWriter2.create([rootEntry.cid]);
+        const output = (0, import_node_fs.createWriteStream)(temporaryCarFilePath);
+        import_node_stream.Readable.from(out).pipe(output);
+        await carExporter.export(rootEntry.cid, writer);
+        uploadOptions.params.Body = (0, import_node_fs.createReadStream)(temporaryCarFilePath);
+        const parallelUploads3 = new import_lib_storage.Upload(uploadOptions);
+        await parallelUploads3.done();
+        await temporaryBlockstore.close();
+      } finally {
+        if (typeof temporaryBlockstoreDir !== "undefined") {
+          await (0, import_promises3.rm)(temporaryBlockstoreDir, { recursive: true, force: true });
+        }
+      }
+    } else {
+      const parallelUploads3 = new import_lib_storage.Upload(uploadOptions);
+      await parallelUploads3.done();
+    }
+    const command = new import_client_s32.HeadObjectCommand({
+      Bucket: bucket,
+      Key: key,
+      Body: source
+    }), headResult = await this.#client.send(command), responseCid = headResult.Metadata.cid;
+    if (Object.keys(parsedEntries).length === 0) {
+      return {
+        cid: responseCid,
+        download: () => {
+          return this.#routeDownload(responseCid, key, options);
+        }
+      };
+    }
+    return {
+      cid: responseCid,
+      download: () => {
+        return this.#routeDownload(responseCid, key, options);
+      },
+      entries: parsedEntries
+    };
+  }
+  async #routeDownload(cid, key, options) {
+    return typeof this.#gatewayConfiguration.endpoint !== "undefined" ? downloadFromGateway(cid, this.#gatewayConfiguration) : this.download(key, options);
+  }
+  /**
+   * @summary Gets an objects info and metadata using the S3 API.
+   * @param {string} key - The key of the object to be inspected.
+   * @param {objectOptions} [options] - The options for inspecting the object.
+   * @returns {Promise}
+   */
+  async get(key, options) {
+    const bucket = (options == null ? void 0 : options.bucket) || this.#defaultBucket;
+    try {
+      const command = new import_client_s32.HeadObjectCommand({
+        Bucket: bucket,
+        Key: key
+      }), response = await this.#client.send(command);
+      response.download = () => {
+        return this.#routeDownload(response.Metadata.cid, key, options);
+      };
+      return response;
+    } catch (err) {
+      if (err.name === "NotFound") {
+        return false;
+      }
+      throw err;
+    }
+  }
+  /**
+   * @summary Downloads an object from the specified bucket using the provided key.
+   * @param {string} key - The key of the object to be downloaded.
+   * @param {objectOptions} [options] - The options for downloading the object..
+   * @returns {Promise} - A promise that resolves with the contents of the downloaded object as a Stream.
+   * @example
+   * // Download object with name of `download-object-example`
+   * await objectManager.download(`download-object-example`);
+   */
+  async download(key, options) {
+    if (typeof this.#gatewayConfiguration.endpoint === "string") {
+      const objectToFetch = await this.get(key, options);
+      return objectToFetch.download();
+    } else {
+      const command = new import_client_s32.GetObjectCommand({
+        Bucket: (options == null ? void 0 : options.bucket) || this.#defaultBucket,
+        Key: key
+      }), response = await this.#client.send(command);
+      return response.Body;
+    }
+  }
+  /**
+   * @typedef {Object} listObjectsResult
+   * @property {boolean} IsTruncated Indicates if more results exist on the server
+   * @property {string} NextContinuationToken ContinuationToken used to paginate list requests
+   * @property {Array} Contents List of Keys stored in the S3 Bucket
+   * @property {string} Contents.Key Key of the Object
+   * @property {string} Contents.LastModified Date Last Modified of the Object
+   * @property {string} Contents.CID CID of the Object
+   * @property {string} Contents.ETag ETag of the Object
+   * @property {number} Contents.Size Size in Bytes of the Object
+   * @property {string} Contents.StorageClass Class of Storage of the Object
+   * @property {function} Contents.download Convenience function to download the item using the S3 gateway
+   */
+  /**
+   * @typedef {Object} listObjectOptions
+   * @property {string} [Bucket] The name of the bucket. If not provided, the default bucket will be used.
+   * @property {string} [ContinuationToken=null] Continues listing from this objects name.
+   * @property {string} [Delimiter=null] Character used to group keys
+   * @property {number} [MaxKeys=1000] The maximum number of objects to retrieve. Defaults to 1000.
+   */
+  /**
+   * Retrieves a list of objects from a specified bucket.
+   *
+   * @param {listObjectOptions} options - The options for listing objects.
+   * @returns {Promise} - A promise that resolves to an array of objects.
+   * @example
+   * // List objects in bucket with a limit of 1000
+   * await objectManager.list({
+   *   MaxKeys: 1000
+   * });
+   */
+  async list(options = {
+    Bucket: this.#defaultBucket,
+    ContinuationToken: null,
+    Delimiter: null,
+    MaxKeys: 1e3
+  }) {
+    if ((options == null ? void 0 : options.MaxKeys) && options.MaxKeys > 1e5) {
+      throw new Error(`MaxKeys Maximum value is 100000`);
+    }
+    const bucket = (options == null ? void 0 : options.Bucket) || this.#defaultBucket, limit = (options == null ? void 0 : options.MaxKeys) || 1e3, commandOptions = {
+      Bucket: bucket,
+      MaxKeys: limit
+    }, command = new import_client_s32.ListObjectsV2Command({
+      ...options,
+      ...commandOptions
+    });
+    const { Contents, IsTruncated, NextContinuationToken } = await this.#client.send(command);
+    return { Contents, IsTruncated, NextContinuationToken };
+  }
+  /**
+   * @summary Deletes an object from the specified bucket using the provided key.
+   * @param {string} key - The key of the object to be deleted.
+   * @param {objectOptions} [options] - The options for deleting the file.
+   * @returns {Promise} - A Promise that resolves with the result of the delete operation.
+   * @example
+   * // Delete object with name of `delete-object-example`
+   * await objectManager.delete(`delete-object-example`);
+   */
+  async delete(key, options) {
+    const command = new import_client_s32.DeleteObjectCommand({
+      Bucket: (options == null ? void 0 : options.bucket) || this.#defaultBucket,
+      Key: key
+    });
+    await this.#client.send(command);
+    return true;
+  }
+  /**
+   * @typedef {Object} copyObjectOptions
+   * @property {string} [sourceBucket] The source bucket from where the object is to be copied.
+   * @property {string} [destinationKey] The key of the object in the destination bucket. By default, it is the same as the sourceKey.
+   */
+  /**
+   * If the destinationKey is not provided, the object will be copied with the same key as the sourceKey.
+   *
+   * @summary Copy the object from sourceKey in the sourceBucket to destinationKey in the destinationBucket.
+   * @param {string} sourceKey - The key of the object to be copied from the sourceBucket.
+   * @param {string} destinationBucket - The bucket where the object will be copied to.
+   * @param {copyObjectOptions} [options] - Additional options for the copy operation.
+   *
+   * @returns {Promise} - A Promise that resolves with the result of the copy operation.
+   * @example
+   * // Copy object `copy-object-test` from `copy-object-test-pass-src` to `copy-object-test-pass-dest`
+   * // TIP: Set bucket on constructor and it will be used as the default source for copying objects.
+   * await objectManager.copy(`copy-object-test`, `copy-object-dest`, {
+   *   sourceBucket: `copy-object-src`
+   * });
+   */
+  async copy(sourceKey, destinationBucket, options = {
+    sourceBucket: this.#defaultBucket,
+    destinationKey: void 0
+  }) {
+    const copySource = `${(options == null ? void 0 : options.sourceBucket) || this.#defaultBucket}/${sourceKey}`, command = new import_client_s32.CopyObjectCommand({
+      CopySource: copySource,
+      Bucket: destinationBucket,
+      Key: (options == null ? void 0 : options.destinationKey) || sourceKey
+    });
+    await this.#client.send(command);
+    return true;
+  }
+};
+var objectManager_default = ObjectManager;
+
+// src/pinManager.js
+var import_axios4 = __toESM(require("axios"));
+var PinManager = class {
+  #DEFAULT_ENDPOINT = "https://api.filebase.io";
+  #DEFAULT_TIMEOUT = 6e4;
+  #client;
+  #credentials;
+  #gatewayConfiguration;
+  #defaultBucket;
+  /**
+   * @typedef {Object} pinManagerOptions Optional settings for the constructor.
+   * @property {string} [bucket] Default bucket to use.
+   * @property {pinDownloadOptions} [gateway] Default gateway to use.
+   */
+  /**
+   * @typedef {Object} pinDownloadOptions Optional settings for downloading pins
+   * @property {string} endpoint Default gateway to use.
+   * @property {string} [token] Token for the default gateway.
+   * @property {number} [timeout=60000] Timeout for the default gateway
+   */
+  /**
+   * @summary Creates a new instance of the constructor.
+   * @param {string} clientKey - The access key ID for authentication.
+   * @param {string} clientSecret - The secret access key for authentication.
+   * @param {pinManagerOptions} [options] - Optional settings for the constructor.
+   * @tutorial quickstart-pin
+   * @example
+   * import { PinManager } from "@filebase/sdk";
+   * const pinManager = new PinManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", {
+   *   bucket: "my-default-bucket",
+   *   gateway: {
+   *     endpoint: "https://my-default-gateway.mydomain.com
+   *     token: SUPER_SECRET_GATEWAY_TOKEN
+   *   }
+   * });
+   */
+  constructor(clientKey, clientSecret, options) {
+    var _a, _b, _c;
+    this.#defaultBucket = options == null ? void 0 : options.bucket;
+    const PSAClientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, baseURL = `${PSAClientEndpoint}/v1/ipfs/pins`;
+    this.#credentials = {
+      key: clientKey,
+      secret: clientSecret
+    };
+    this.#client = import_axios4.default.create({
+      baseURL,
+      timeout: this.#DEFAULT_TIMEOUT
+    });
+    this.#gatewayConfiguration = {
+      endpoint: (_a = options == null ? void 0 : options.gateway) == null ? void 0 : _a.endpoint,
+      token: (_b = options == null ? void 0 : options.gateway) == null ? void 0 : _b.token,
+      timeout: ((_c = options == null ? void 0 : options.gateway) == null ? void 0 : _c.timeout) || this.#DEFAULT_TIMEOUT
+    };
+  }
+  /**
+   * @typedef {Object} pinStatus
+   * @property {string} requestid Globally unique identifier of the pin request; can be used to check the status of ongoing pinning, or pin removal
+   * @property {string} status Status a pin object can have at a pinning service. ("queued","pinning","pinned","failed")
+   * @property {string} created Immutable timestamp indicating when a pin request entered a pinning service; can be used for filtering results and pagination
+   * @property {Object} pin Pin object
+   * @property {string} pin.cid Content Identifier (CID) pinned recursively
+   * @property {string} pin.name Name for pinned data; can be used for lookups later
+   * @property {Array} pin.origins Optional list of multiaddrs known to provide the data
+   * @property {Object} pin.meta Optional metadata for pin object
+   * @property {Array} delegates List of multiaddrs designated by pinning service that will receive the pin data
+   * @property {object} [info] Optional info for PinStatus response
+   * @property {function} download Convenience function to download pin
+   */
+  /**
+   * @typedef {Object} pinOptions
+   * @property {string} [bucket] - The bucket to pin the IPFS CID into.
+   */
+  /**
+   * @typedef {Object} listPinOptions
+   * @property {Array} [cid] Return pin objects responsible for pinning the specified CID(s); be aware that using longer hash functions introduces further constraints on the number of CIDs that will fit under the limit of 2000 characters per URL in browser contexts
+   * @property {string} [name] Return pin objects with specified name (by default a case-sensitive, exact match)
+   * @property {string} [match] Customize the text matching strategy applied when the name filter is present; exact (the default) is a case-sensitive exact match, partial matches anywhere in the name, iexact and ipartial are case-insensitive versions of the exact and partial strategies
+   * @property {Array} [status] Return pin objects for pins with the specified status (when missing, service defaults to pinned only)
+   * @property {string} [before] Return results created (queued) before provided timestamp
+   * @property {string} [after] Return results created (queued) after provided timestamp
+   * @property {number} [limit] Max records to return
+   * @property {Object} [meta] Return pin objects that match specified metadata keys passed as a string representation of a JSON object; when implementing a client library, make sure the parameter is URL-encoded to ensure safe transport
+   */
+  /**
+   * @typedef {Object} listPinResults
+   * @property {number} count Total number of pin objects that exist for passed query filters
+   * @property {Array} Array of PinStatus results
+   */
+  /**
+   * @summary List the pins in a given bucket
+   * @param {listPinOptions} [listOptions]
+   * @param {pinOptions} [options]
+   * @returns {Promise}
+   * @example
+   * // List pins in bucket with a limit of 1000
+   * await pinManager.list({
+   *   limit: 1000
+   * });
+   */
+  async list(listOptions, options) {
+    try {
+      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), getResponse = await this.#client.request({
+        method: "GET",
+        params: listOptions,
+        headers: { Authorization: `Bearer ${encodedToken}` }
+      });
+      for (let pinStatus of getResponse.data.results) {
+        pinStatus.download = () => {
+          return this.download(pinStatus.pin.cid);
+        };
+      }
+      return getResponse.data;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Create a pin in the selected bucket
+   * @param {string} key Key or path of the file in the bucket
+   * @param {string} cid Content Identifier (CID) to be pinned recursively
+   * @param {Object} [metadata] Optional metadata for pin object
+   * @param {pinOptions} [options] Options for pinning the object
+   * @returns {Promise}
+   * @example
+   * // Create Pin with Metadata
+   * await pinManager.create("my-pin", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", {
+   *   "application": "my-custom-app-on-filebase"
+   * });
+   */
+  async create(key, cid, metadata, options) {
+    try {
+      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), pinStatus = await this.#client.request({
+        method: "POST",
+        data: {
+          cid,
+          name: key,
+          meta: metadata
+        },
+        headers: { Authorization: `Bearer ${encodedToken}` }
+      });
+      pinStatus.data.download = () => {
+        return this.download(pinStatus.data.pin.cid);
+      };
+      return pinStatus.data;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @typedef {Object} replacePinOptions
+   * @augments pinOptions
+   * @property {Object} [metadata] Optional metadata to set on pin during replacement
+   * @property {string} [name] Optional name for pin to set during replacement
+   */
+  /**
+   * @summary Replace a pinned object in the selected bucket
+   * @param {string} requestid Unique ID for the pinned object
+   * @param {string} cid Content Identifier (CID) to be pinned recursively
+   * @param {replacePinOptions} [options] Options for pinning the object
+   * @returns {Promise}
+   * @example
+   * // Replace Pin with Metadata
+   * await pinManager.create("qr4231213", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", {
+   *   "revision": Date.now()
+   * }
+   */
+  async replace(requestid, cid, options) {
+    try {
+      let replaceData = {
+        cid,
+        meta: (options == null ? void 0 : options.metadata) || {}
+      };
+      if (options == null ? void 0 : options.name) {
+        replaceData.name = options.name;
+      }
+      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), pinStatusResult = await this.#client.request({
+        method: "POST",
+        url: `/${requestid}`,
+        data: replaceData,
+        validateStatus: (status) => {
+          return status === 200;
+        },
+        headers: { Authorization: `Bearer ${encodedToken}` }
+      });
+      const pinStatus = pinStatusResult.data;
+      pinStatus.download = () => {
+        return this.download(pinStatus.pin.cid);
+      };
+      return pinStatus;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Download a pin from the selected IPFS gateway
+   * @param {string} cid
+   * @param {pinDownloadOptions} [options]
+   * @returns {Promise}
+   * @example
+   * // Download Pin by CID
+   * await pinManager.download("QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF");
+   */
+  async download(cid, options) {
+    const downloadOptions = Object.assign(this.#gatewayConfiguration, options);
+    return downloadFromGateway(cid, downloadOptions);
+  }
+  /**
+   * @summary Get details about a pinned object
+   * @param {string} requestid Globally unique identifier of the pin request
+   * @param {pinOptions} [options] Options for getting the pin
+   * @returns {Promise}
+   * @example
+   * // Get Pin Info by RequestId
+   * await pinManager.get("qr4231214");
+   */
+  async get(requestid, options) {
+    try {
+      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), getResponseResult = await this.#client.request({
+        method: "GET",
+        url: `/${requestid}`,
+        headers: { Authorization: `Bearer ${encodedToken}` },
+        validateStatus: (status) => {
+          return status === 200 || status === 404;
+        }
+      });
+      if (getResponseResult.status === 404) {
+        return false;
+      }
+      const pinStatus = getResponseResult.data;
+      pinStatus.download = () => {
+        return this.download(pinStatus.pin.cid);
+      };
+      return pinStatus;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Delete a pinned object from the selected bucket
+   * @param requestid Globally unique identifier of the pin request
+   * @param {pinOptions} [options] Options for deleting the pin
+   * @returns {Promise}
+   * @example
+   * // Delete Pin by RequestId
+   * await pinManager.delete("qr4231213");
+   */
+  async delete(requestid, options) {
+    try {
+      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket);
+      await this.#client.request({
+        method: "DELETE",
+        url: `/${requestid}`,
+        headers: { Authorization: `Bearer ${encodedToken}` },
+        validateStatus: (status) => {
+          return status === 202;
+        }
+      });
+      return true;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  #getEncodedToken(bucket) {
+    bucket = bucket || this.#defaultBucket;
+    return Buffer.from(
+      `${this.#credentials.key}:${this.#credentials.secret}:${bucket}`
+    ).toString("base64");
+  }
+};
+var pinManager_default = PinManager;
+// Annotate the CommonJS export names for ESM import in node:
+0 && (module.exports = {
+  BucketManager,
+  GatewayManager,
+  NameManager,
+  ObjectManager,
+  PinManager
+});
diff --git a/dist/index.mjs b/dist/index.mjs
new file mode 100644
index 0000000..b5682db
--- /dev/null
+++ b/dist/index.mjs
@@ -0,0 +1,18045 @@
+var __create = Object.create;
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __getProtoOf = Object.getPrototypeOf;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
+  get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
+}) : x)(function(x) {
+  if (typeof require !== "undefined")
+    return require.apply(this, arguments);
+  throw Error('Dynamic require of "' + x + '" is not supported');
+});
+var __commonJS = (cb, mod) => function __require2() {
+  return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
+};
+var __export = (target, all) => {
+  for (var name4 in all)
+    __defProp(target, name4, { get: all[name4], enumerable: true });
+};
+var __copyProps = (to, from4, except, desc) => {
+  if (from4 && typeof from4 === "object" || typeof from4 === "function") {
+    for (let key of __getOwnPropNames(from4))
+      if (!__hasOwnProp.call(to, key) && key !== except)
+        __defProp(to, key, { get: () => from4[key], enumerable: !(desc = __getOwnPropDesc(from4, key)) || desc.enumerable });
+  }
+  return to;
+};
+var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
+  // If the importer is in node compatibility mode or this is not an ESM
+  // file that has been converted to a CommonJS file using a Babel-
+  // compatible transform (i.e. "__esModule" has not been set), then set
+  // "default" to the CommonJS "module.exports" for node compatibility.
+  isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
+  mod
+));
+
+// node_modules/varint/encode.js
+var require_encode = __commonJS({
+  "node_modules/varint/encode.js"(exports, module) {
+    module.exports = encode12;
+    var MSB3 = 128;
+    var REST3 = 127;
+    var MSBALL3 = ~REST3;
+    var INT3 = Math.pow(2, 31);
+    function encode12(num, out, offset) {
+      if (Number.MAX_SAFE_INTEGER && num > Number.MAX_SAFE_INTEGER) {
+        encode12.bytes = 0;
+        throw new RangeError("Could not encode varint");
+      }
+      out = out || [];
+      offset = offset || 0;
+      var oldOffset = offset;
+      while (num >= INT3) {
+        out[offset++] = num & 255 | MSB3;
+        num /= 128;
+      }
+      while (num & MSBALL3) {
+        out[offset++] = num & 255 | MSB3;
+        num >>>= 7;
+      }
+      out[offset] = num | 0;
+      encode12.bytes = offset - oldOffset + 1;
+      return out;
+    }
+  }
+});
+
+// node_modules/varint/decode.js
+var require_decode = __commonJS({
+  "node_modules/varint/decode.js"(exports, module) {
+    module.exports = read4;
+    var MSB3 = 128;
+    var REST3 = 127;
+    function read4(buf2, offset) {
+      var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf2.length;
+      do {
+        if (counter >= l || shift > 49) {
+          read4.bytes = 0;
+          throw new RangeError("Could not decode varint");
+        }
+        b = buf2[counter++];
+        res += shift < 28 ? (b & REST3) << shift : (b & REST3) * Math.pow(2, shift);
+        shift += 7;
+      } while (b >= MSB3);
+      read4.bytes = counter - offset;
+      return res;
+    }
+  }
+});
+
+// node_modules/varint/length.js
+var require_length = __commonJS({
+  "node_modules/varint/length.js"(exports, module) {
+    var N13 = Math.pow(2, 7);
+    var N23 = Math.pow(2, 14);
+    var N33 = Math.pow(2, 21);
+    var N43 = Math.pow(2, 28);
+    var N53 = Math.pow(2, 35);
+    var N63 = Math.pow(2, 42);
+    var N73 = Math.pow(2, 49);
+    var N83 = Math.pow(2, 56);
+    var N93 = Math.pow(2, 63);
+    module.exports = function(value) {
+      return value < N13 ? 1 : value < N23 ? 2 : value < N33 ? 3 : value < N43 ? 4 : value < N53 ? 5 : value < N63 ? 6 : value < N73 ? 7 : value < N83 ? 8 : value < N93 ? 9 : 10;
+    };
+  }
+});
+
+// node_modules/varint/index.js
+var require_varint = __commonJS({
+  "node_modules/varint/index.js"(exports, module) {
+    module.exports = {
+      encode: require_encode(),
+      decode: require_decode(),
+      encodingLength: require_length()
+    };
+  }
+});
+
+// node_modules/eventemitter3/index.js
+var require_eventemitter3 = __commonJS({
+  "node_modules/eventemitter3/index.js"(exports, module) {
+    "use strict";
+    var has = Object.prototype.hasOwnProperty;
+    var prefix = "~";
+    function Events() {
+    }
+    if (Object.create) {
+      Events.prototype = /* @__PURE__ */ Object.create(null);
+      if (!new Events().__proto__)
+        prefix = false;
+    }
+    function EE(fn, context, once) {
+      this.fn = fn;
+      this.context = context;
+      this.once = once || false;
+    }
+    function addListener(emitter, event, fn, context, once) {
+      if (typeof fn !== "function") {
+        throw new TypeError("The listener must be a function");
+      }
+      var listener = new EE(fn, context || emitter, once), evt = prefix ? prefix + event : event;
+      if (!emitter._events[evt])
+        emitter._events[evt] = listener, emitter._eventsCount++;
+      else if (!emitter._events[evt].fn)
+        emitter._events[evt].push(listener);
+      else
+        emitter._events[evt] = [emitter._events[evt], listener];
+      return emitter;
+    }
+    function clearEvent(emitter, evt) {
+      if (--emitter._eventsCount === 0)
+        emitter._events = new Events();
+      else
+        delete emitter._events[evt];
+    }
+    function EventEmitter2() {
+      this._events = new Events();
+      this._eventsCount = 0;
+    }
+    EventEmitter2.prototype.eventNames = function eventNames() {
+      var names = [], events, name4;
+      if (this._eventsCount === 0)
+        return names;
+      for (name4 in events = this._events) {
+        if (has.call(events, name4))
+          names.push(prefix ? name4.slice(1) : name4);
+      }
+      if (Object.getOwnPropertySymbols) {
+        return names.concat(Object.getOwnPropertySymbols(events));
+      }
+      return names;
+    };
+    EventEmitter2.prototype.listeners = function listeners(event) {
+      var evt = prefix ? prefix + event : event, handlers = this._events[evt];
+      if (!handlers)
+        return [];
+      if (handlers.fn)
+        return [handlers.fn];
+      for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) {
+        ee[i] = handlers[i].fn;
+      }
+      return ee;
+    };
+    EventEmitter2.prototype.listenerCount = function listenerCount(event) {
+      var evt = prefix ? prefix + event : event, listeners = this._events[evt];
+      if (!listeners)
+        return 0;
+      if (listeners.fn)
+        return 1;
+      return listeners.length;
+    };
+    EventEmitter2.prototype.emit = function emit(event, a1, a2, a3, a4, a5) {
+      var evt = prefix ? prefix + event : event;
+      if (!this._events[evt])
+        return false;
+      var listeners = this._events[evt], len = arguments.length, args, i;
+      if (listeners.fn) {
+        if (listeners.once)
+          this.removeListener(event, listeners.fn, void 0, true);
+        switch (len) {
+          case 1:
+            return listeners.fn.call(listeners.context), true;
+          case 2:
+            return listeners.fn.call(listeners.context, a1), true;
+          case 3:
+            return listeners.fn.call(listeners.context, a1, a2), true;
+          case 4:
+            return listeners.fn.call(listeners.context, a1, a2, a3), true;
+          case 5:
+            return listeners.fn.call(listeners.context, a1, a2, a3, a4), true;
+          case 6:
+            return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true;
+        }
+        for (i = 1, args = new Array(len - 1); i < len; i++) {
+          args[i - 1] = arguments[i];
+        }
+        listeners.fn.apply(listeners.context, args);
+      } else {
+        var length4 = listeners.length, j;
+        for (i = 0; i < length4; i++) {
+          if (listeners[i].once)
+            this.removeListener(event, listeners[i].fn, void 0, true);
+          switch (len) {
+            case 1:
+              listeners[i].fn.call(listeners[i].context);
+              break;
+            case 2:
+              listeners[i].fn.call(listeners[i].context, a1);
+              break;
+            case 3:
+              listeners[i].fn.call(listeners[i].context, a1, a2);
+              break;
+            case 4:
+              listeners[i].fn.call(listeners[i].context, a1, a2, a3);
+              break;
+            default:
+              if (!args)
+                for (j = 1, args = new Array(len - 1); j < len; j++) {
+                  args[j - 1] = arguments[j];
+                }
+              listeners[i].fn.apply(listeners[i].context, args);
+          }
+        }
+      }
+      return true;
+    };
+    EventEmitter2.prototype.on = function on(event, fn, context) {
+      return addListener(this, event, fn, context, false);
+    };
+    EventEmitter2.prototype.once = function once(event, fn, context) {
+      return addListener(this, event, fn, context, true);
+    };
+    EventEmitter2.prototype.removeListener = function removeListener(event, fn, context, once) {
+      var evt = prefix ? prefix + event : event;
+      if (!this._events[evt])
+        return this;
+      if (!fn) {
+        clearEvent(this, evt);
+        return this;
+      }
+      var listeners = this._events[evt];
+      if (listeners.fn) {
+        if (listeners.fn === fn && (!once || listeners.once) && (!context || listeners.context === context)) {
+          clearEvent(this, evt);
+        }
+      } else {
+        for (var i = 0, events = [], length4 = listeners.length; i < length4; i++) {
+          if (listeners[i].fn !== fn || once && !listeners[i].once || context && listeners[i].context !== context) {
+            events.push(listeners[i]);
+          }
+        }
+        if (events.length)
+          this._events[evt] = events.length === 1 ? events[0] : events;
+        else
+          clearEvent(this, evt);
+      }
+      return this;
+    };
+    EventEmitter2.prototype.removeAllListeners = function removeAllListeners(event) {
+      var evt;
+      if (event) {
+        evt = prefix ? prefix + event : event;
+        if (this._events[evt])
+          clearEvent(this, evt);
+      } else {
+        this._events = new Events();
+        this._eventsCount = 0;
+      }
+      return this;
+    };
+    EventEmitter2.prototype.off = EventEmitter2.prototype.removeListener;
+    EventEmitter2.prototype.addListener = EventEmitter2.prototype.on;
+    EventEmitter2.prefixed = prefix;
+    EventEmitter2.EventEmitter = EventEmitter2;
+    if ("undefined" !== typeof module) {
+      module.exports = EventEmitter2;
+    }
+  }
+});
+
+// node_modules/err-code/index.js
+var require_err_code = __commonJS({
+  "node_modules/err-code/index.js"(exports, module) {
+    "use strict";
+    function assign(obj, props) {
+      for (const key in props) {
+        Object.defineProperty(obj, key, {
+          value: props[key],
+          enumerable: true,
+          configurable: true
+        });
+      }
+      return obj;
+    }
+    function createError(err, code5, props) {
+      if (!err || typeof err === "string") {
+        throw new TypeError("Please pass an Error to err-code");
+      }
+      if (!props) {
+        props = {};
+      }
+      if (typeof code5 === "object") {
+        props = code5;
+        code5 = "";
+      }
+      if (code5) {
+        props.code = code5;
+      }
+      try {
+        return assign(err, props);
+      } catch (_) {
+        props.message = err.message;
+        props.stack = err.stack;
+        const ErrClass = function() {
+        };
+        ErrClass.prototype = Object.create(Object.getPrototypeOf(err));
+        const output = assign(new ErrClass(), props);
+        return output;
+      }
+    }
+    module.exports = createError;
+  }
+});
+
+// node_modules/murmurhash3js-revisited/lib/murmurHash3js.js
+var require_murmurHash3js = __commonJS({
+  "node_modules/murmurhash3js-revisited/lib/murmurHash3js.js"(exports, module) {
+    (function(root, undefined2) {
+      "use strict";
+      var library = {
+        "version": "3.0.0",
+        "x86": {},
+        "x64": {},
+        "inputValidation": true
+      };
+      function _validBytes(bytes) {
+        if (!Array.isArray(bytes) && !ArrayBuffer.isView(bytes)) {
+          return false;
+        }
+        for (var i = 0; i < bytes.length; i++) {
+          if (!Number.isInteger(bytes[i]) || bytes[i] < 0 || bytes[i] > 255) {
+            return false;
+          }
+        }
+        return true;
+      }
+      function _x86Multiply(m, n) {
+        return (m & 65535) * n + (((m >>> 16) * n & 65535) << 16);
+      }
+      function _x86Rotl(m, n) {
+        return m << n | m >>> 32 - n;
+      }
+      function _x86Fmix(h) {
+        h ^= h >>> 16;
+        h = _x86Multiply(h, 2246822507);
+        h ^= h >>> 13;
+        h = _x86Multiply(h, 3266489909);
+        h ^= h >>> 16;
+        return h;
+      }
+      function _x64Add(m, n) {
+        m = [m[0] >>> 16, m[0] & 65535, m[1] >>> 16, m[1] & 65535];
+        n = [n[0] >>> 16, n[0] & 65535, n[1] >>> 16, n[1] & 65535];
+        var o = [0, 0, 0, 0];
+        o[3] += m[3] + n[3];
+        o[2] += o[3] >>> 16;
+        o[3] &= 65535;
+        o[2] += m[2] + n[2];
+        o[1] += o[2] >>> 16;
+        o[2] &= 65535;
+        o[1] += m[1] + n[1];
+        o[0] += o[1] >>> 16;
+        o[1] &= 65535;
+        o[0] += m[0] + n[0];
+        o[0] &= 65535;
+        return [o[0] << 16 | o[1], o[2] << 16 | o[3]];
+      }
+      function _x64Multiply(m, n) {
+        m = [m[0] >>> 16, m[0] & 65535, m[1] >>> 16, m[1] & 65535];
+        n = [n[0] >>> 16, n[0] & 65535, n[1] >>> 16, n[1] & 65535];
+        var o = [0, 0, 0, 0];
+        o[3] += m[3] * n[3];
+        o[2] += o[3] >>> 16;
+        o[3] &= 65535;
+        o[2] += m[2] * n[3];
+        o[1] += o[2] >>> 16;
+        o[2] &= 65535;
+        o[2] += m[3] * n[2];
+        o[1] += o[2] >>> 16;
+        o[2] &= 65535;
+        o[1] += m[1] * n[3];
+        o[0] += o[1] >>> 16;
+        o[1] &= 65535;
+        o[1] += m[2] * n[2];
+        o[0] += o[1] >>> 16;
+        o[1] &= 65535;
+        o[1] += m[3] * n[1];
+        o[0] += o[1] >>> 16;
+        o[1] &= 65535;
+        o[0] += m[0] * n[3] + m[1] * n[2] + m[2] * n[1] + m[3] * n[0];
+        o[0] &= 65535;
+        return [o[0] << 16 | o[1], o[2] << 16 | o[3]];
+      }
+      function _x64Rotl(m, n) {
+        n %= 64;
+        if (n === 32) {
+          return [m[1], m[0]];
+        } else if (n < 32) {
+          return [m[0] << n | m[1] >>> 32 - n, m[1] << n | m[0] >>> 32 - n];
+        } else {
+          n -= 32;
+          return [m[1] << n | m[0] >>> 32 - n, m[0] << n | m[1] >>> 32 - n];
+        }
+      }
+      function _x64LeftShift(m, n) {
+        n %= 64;
+        if (n === 0) {
+          return m;
+        } else if (n < 32) {
+          return [m[0] << n | m[1] >>> 32 - n, m[1] << n];
+        } else {
+          return [m[1] << n - 32, 0];
+        }
+      }
+      function _x64Xor(m, n) {
+        return [m[0] ^ n[0], m[1] ^ n[1]];
+      }
+      function _x64Fmix(h) {
+        h = _x64Xor(h, [0, h[0] >>> 1]);
+        h = _x64Multiply(h, [4283543511, 3981806797]);
+        h = _x64Xor(h, [0, h[0] >>> 1]);
+        h = _x64Multiply(h, [3301882366, 444984403]);
+        h = _x64Xor(h, [0, h[0] >>> 1]);
+        return h;
+      }
+      library.x86.hash32 = function(bytes, seed) {
+        if (library.inputValidation && !_validBytes(bytes)) {
+          return undefined2;
+        }
+        seed = seed || 0;
+        var remainder = bytes.length % 4;
+        var blocks = bytes.length - remainder;
+        var h1 = seed;
+        var k1 = 0;
+        var c1 = 3432918353;
+        var c2 = 461845907;
+        for (var i = 0; i < blocks; i = i + 4) {
+          k1 = bytes[i] | bytes[i + 1] << 8 | bytes[i + 2] << 16 | bytes[i + 3] << 24;
+          k1 = _x86Multiply(k1, c1);
+          k1 = _x86Rotl(k1, 15);
+          k1 = _x86Multiply(k1, c2);
+          h1 ^= k1;
+          h1 = _x86Rotl(h1, 13);
+          h1 = _x86Multiply(h1, 5) + 3864292196;
+        }
+        k1 = 0;
+        switch (remainder) {
+          case 3:
+            k1 ^= bytes[i + 2] << 16;
+          case 2:
+            k1 ^= bytes[i + 1] << 8;
+          case 1:
+            k1 ^= bytes[i];
+            k1 = _x86Multiply(k1, c1);
+            k1 = _x86Rotl(k1, 15);
+            k1 = _x86Multiply(k1, c2);
+            h1 ^= k1;
+        }
+        h1 ^= bytes.length;
+        h1 = _x86Fmix(h1);
+        return h1 >>> 0;
+      };
+      library.x86.hash128 = function(bytes, seed) {
+        if (library.inputValidation && !_validBytes(bytes)) {
+          return undefined2;
+        }
+        seed = seed || 0;
+        var remainder = bytes.length % 16;
+        var blocks = bytes.length - remainder;
+        var h1 = seed;
+        var h2 = seed;
+        var h3 = seed;
+        var h4 = seed;
+        var k1 = 0;
+        var k2 = 0;
+        var k3 = 0;
+        var k4 = 0;
+        var c1 = 597399067;
+        var c2 = 2869860233;
+        var c3 = 951274213;
+        var c4 = 2716044179;
+        for (var i = 0; i < blocks; i = i + 16) {
+          k1 = bytes[i] | bytes[i + 1] << 8 | bytes[i + 2] << 16 | bytes[i + 3] << 24;
+          k2 = bytes[i + 4] | bytes[i + 5] << 8 | bytes[i + 6] << 16 | bytes[i + 7] << 24;
+          k3 = bytes[i + 8] | bytes[i + 9] << 8 | bytes[i + 10] << 16 | bytes[i + 11] << 24;
+          k4 = bytes[i + 12] | bytes[i + 13] << 8 | bytes[i + 14] << 16 | bytes[i + 15] << 24;
+          k1 = _x86Multiply(k1, c1);
+          k1 = _x86Rotl(k1, 15);
+          k1 = _x86Multiply(k1, c2);
+          h1 ^= k1;
+          h1 = _x86Rotl(h1, 19);
+          h1 += h2;
+          h1 = _x86Multiply(h1, 5) + 1444728091;
+          k2 = _x86Multiply(k2, c2);
+          k2 = _x86Rotl(k2, 16);
+          k2 = _x86Multiply(k2, c3);
+          h2 ^= k2;
+          h2 = _x86Rotl(h2, 17);
+          h2 += h3;
+          h2 = _x86Multiply(h2, 5) + 197830471;
+          k3 = _x86Multiply(k3, c3);
+          k3 = _x86Rotl(k3, 17);
+          k3 = _x86Multiply(k3, c4);
+          h3 ^= k3;
+          h3 = _x86Rotl(h3, 15);
+          h3 += h4;
+          h3 = _x86Multiply(h3, 5) + 2530024501;
+          k4 = _x86Multiply(k4, c4);
+          k4 = _x86Rotl(k4, 18);
+          k4 = _x86Multiply(k4, c1);
+          h4 ^= k4;
+          h4 = _x86Rotl(h4, 13);
+          h4 += h1;
+          h4 = _x86Multiply(h4, 5) + 850148119;
+        }
+        k1 = 0;
+        k2 = 0;
+        k3 = 0;
+        k4 = 0;
+        switch (remainder) {
+          case 15:
+            k4 ^= bytes[i + 14] << 16;
+          case 14:
+            k4 ^= bytes[i + 13] << 8;
+          case 13:
+            k4 ^= bytes[i + 12];
+            k4 = _x86Multiply(k4, c4);
+            k4 = _x86Rotl(k4, 18);
+            k4 = _x86Multiply(k4, c1);
+            h4 ^= k4;
+          case 12:
+            k3 ^= bytes[i + 11] << 24;
+          case 11:
+            k3 ^= bytes[i + 10] << 16;
+          case 10:
+            k3 ^= bytes[i + 9] << 8;
+          case 9:
+            k3 ^= bytes[i + 8];
+            k3 = _x86Multiply(k3, c3);
+            k3 = _x86Rotl(k3, 17);
+            k3 = _x86Multiply(k3, c4);
+            h3 ^= k3;
+          case 8:
+            k2 ^= bytes[i + 7] << 24;
+          case 7:
+            k2 ^= bytes[i + 6] << 16;
+          case 6:
+            k2 ^= bytes[i + 5] << 8;
+          case 5:
+            k2 ^= bytes[i + 4];
+            k2 = _x86Multiply(k2, c2);
+            k2 = _x86Rotl(k2, 16);
+            k2 = _x86Multiply(k2, c3);
+            h2 ^= k2;
+          case 4:
+            k1 ^= bytes[i + 3] << 24;
+          case 3:
+            k1 ^= bytes[i + 2] << 16;
+          case 2:
+            k1 ^= bytes[i + 1] << 8;
+          case 1:
+            k1 ^= bytes[i];
+            k1 = _x86Multiply(k1, c1);
+            k1 = _x86Rotl(k1, 15);
+            k1 = _x86Multiply(k1, c2);
+            h1 ^= k1;
+        }
+        h1 ^= bytes.length;
+        h2 ^= bytes.length;
+        h3 ^= bytes.length;
+        h4 ^= bytes.length;
+        h1 += h2;
+        h1 += h3;
+        h1 += h4;
+        h2 += h1;
+        h3 += h1;
+        h4 += h1;
+        h1 = _x86Fmix(h1);
+        h2 = _x86Fmix(h2);
+        h3 = _x86Fmix(h3);
+        h4 = _x86Fmix(h4);
+        h1 += h2;
+        h1 += h3;
+        h1 += h4;
+        h2 += h1;
+        h3 += h1;
+        h4 += h1;
+        return ("00000000" + (h1 >>> 0).toString(16)).slice(-8) + ("00000000" + (h2 >>> 0).toString(16)).slice(-8) + ("00000000" + (h3 >>> 0).toString(16)).slice(-8) + ("00000000" + (h4 >>> 0).toString(16)).slice(-8);
+      };
+      library.x64.hash128 = function(bytes, seed) {
+        if (library.inputValidation && !_validBytes(bytes)) {
+          return undefined2;
+        }
+        seed = seed || 0;
+        var remainder = bytes.length % 16;
+        var blocks = bytes.length - remainder;
+        var h1 = [0, seed];
+        var h2 = [0, seed];
+        var k1 = [0, 0];
+        var k2 = [0, 0];
+        var c1 = [2277735313, 289559509];
+        var c2 = [1291169091, 658871167];
+        for (var i = 0; i < blocks; i = i + 16) {
+          k1 = [bytes[i + 4] | bytes[i + 5] << 8 | bytes[i + 6] << 16 | bytes[i + 7] << 24, bytes[i] | bytes[i + 1] << 8 | bytes[i + 2] << 16 | bytes[i + 3] << 24];
+          k2 = [bytes[i + 12] | bytes[i + 13] << 8 | bytes[i + 14] << 16 | bytes[i + 15] << 24, bytes[i + 8] | bytes[i + 9] << 8 | bytes[i + 10] << 16 | bytes[i + 11] << 24];
+          k1 = _x64Multiply(k1, c1);
+          k1 = _x64Rotl(k1, 31);
+          k1 = _x64Multiply(k1, c2);
+          h1 = _x64Xor(h1, k1);
+          h1 = _x64Rotl(h1, 27);
+          h1 = _x64Add(h1, h2);
+          h1 = _x64Add(_x64Multiply(h1, [0, 5]), [0, 1390208809]);
+          k2 = _x64Multiply(k2, c2);
+          k2 = _x64Rotl(k2, 33);
+          k2 = _x64Multiply(k2, c1);
+          h2 = _x64Xor(h2, k2);
+          h2 = _x64Rotl(h2, 31);
+          h2 = _x64Add(h2, h1);
+          h2 = _x64Add(_x64Multiply(h2, [0, 5]), [0, 944331445]);
+        }
+        k1 = [0, 0];
+        k2 = [0, 0];
+        switch (remainder) {
+          case 15:
+            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 14]], 48));
+          case 14:
+            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 13]], 40));
+          case 13:
+            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 12]], 32));
+          case 12:
+            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 11]], 24));
+          case 11:
+            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 10]], 16));
+          case 10:
+            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 9]], 8));
+          case 9:
+            k2 = _x64Xor(k2, [0, bytes[i + 8]]);
+            k2 = _x64Multiply(k2, c2);
+            k2 = _x64Rotl(k2, 33);
+            k2 = _x64Multiply(k2, c1);
+            h2 = _x64Xor(h2, k2);
+          case 8:
+            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 7]], 56));
+          case 7:
+            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 6]], 48));
+          case 6:
+            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 5]], 40));
+          case 5:
+            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 4]], 32));
+          case 4:
+            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 3]], 24));
+          case 3:
+            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 2]], 16));
+          case 2:
+            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 1]], 8));
+          case 1:
+            k1 = _x64Xor(k1, [0, bytes[i]]);
+            k1 = _x64Multiply(k1, c1);
+            k1 = _x64Rotl(k1, 31);
+            k1 = _x64Multiply(k1, c2);
+            h1 = _x64Xor(h1, k1);
+        }
+        h1 = _x64Xor(h1, [0, bytes.length]);
+        h2 = _x64Xor(h2, [0, bytes.length]);
+        h1 = _x64Add(h1, h2);
+        h2 = _x64Add(h2, h1);
+        h1 = _x64Fmix(h1);
+        h2 = _x64Fmix(h2);
+        h1 = _x64Add(h1, h2);
+        h2 = _x64Add(h2, h1);
+        return ("00000000" + (h1[0] >>> 0).toString(16)).slice(-8) + ("00000000" + (h1[1] >>> 0).toString(16)).slice(-8) + ("00000000" + (h2[0] >>> 0).toString(16)).slice(-8) + ("00000000" + (h2[1] >>> 0).toString(16)).slice(-8);
+      };
+      if (typeof exports !== "undefined") {
+        if (typeof module !== "undefined" && module.exports) {
+          exports = module.exports = library;
+        }
+        exports.murmurHash3 = library;
+      } else if (typeof define === "function" && define.amd) {
+        define([], function() {
+          return library;
+        });
+      } else {
+        library._murmurHash3 = root.murmurHash3;
+        library.noConflict = function() {
+          root.murmurHash3 = library._murmurHash3;
+          library._murmurHash3 = undefined2;
+          library.noConflict = undefined2;
+          return library;
+        };
+        root.murmurHash3 = library;
+      }
+    })(exports);
+  }
+});
+
+// node_modules/murmurhash3js-revisited/index.js
+var require_murmurhash3js_revisited = __commonJS({
+  "node_modules/murmurhash3js-revisited/index.js"(exports, module) {
+    module.exports = require_murmurHash3js();
+  }
+});
+
+// node_modules/sparse-array/index.js
+var require_sparse_array = __commonJS({
+  "node_modules/sparse-array/index.js"(exports, module) {
+    "use strict";
+    var BITS_PER_BYTE = 7;
+    module.exports = class SparseArray {
+      constructor() {
+        this._bitArrays = [];
+        this._data = [];
+        this._length = 0;
+        this._changedLength = false;
+        this._changedData = false;
+      }
+      set(index, value) {
+        let pos = this._internalPositionFor(index, false);
+        if (value === void 0) {
+          if (pos !== -1) {
+            this._unsetInternalPos(pos);
+            this._unsetBit(index);
+            this._changedLength = true;
+            this._changedData = true;
+          }
+        } else {
+          let needsSort = false;
+          if (pos === -1) {
+            pos = this._data.length;
+            this._setBit(index);
+            this._changedData = true;
+          } else {
+            needsSort = true;
+          }
+          this._setInternalPos(pos, index, value, needsSort);
+          this._changedLength = true;
+        }
+      }
+      unset(index) {
+        this.set(index, void 0);
+      }
+      get(index) {
+        this._sortData();
+        const pos = this._internalPositionFor(index, true);
+        if (pos === -1) {
+          return void 0;
+        }
+        return this._data[pos][1];
+      }
+      push(value) {
+        this.set(this.length, value);
+        return this.length;
+      }
+      get length() {
+        this._sortData();
+        if (this._changedLength) {
+          const last2 = this._data[this._data.length - 1];
+          this._length = last2 ? last2[0] + 1 : 0;
+          this._changedLength = false;
+        }
+        return this._length;
+      }
+      forEach(iterator) {
+        let i = 0;
+        while (i < this.length) {
+          iterator(this.get(i), i, this);
+          i++;
+        }
+      }
+      map(iterator) {
+        let i = 0;
+        let mapped = new Array(this.length);
+        while (i < this.length) {
+          mapped[i] = iterator(this.get(i), i, this);
+          i++;
+        }
+        return mapped;
+      }
+      reduce(reducer, initialValue) {
+        let i = 0;
+        let acc = initialValue;
+        while (i < this.length) {
+          const value = this.get(i);
+          acc = reducer(acc, value, i);
+          i++;
+        }
+        return acc;
+      }
+      find(finder) {
+        let i = 0, found, last2;
+        while (i < this.length && !found) {
+          last2 = this.get(i);
+          found = finder(last2);
+          i++;
+        }
+        return found ? last2 : void 0;
+      }
+      _internalPositionFor(index, noCreate) {
+        const bytePos = this._bytePosFor(index, noCreate);
+        if (bytePos >= this._bitArrays.length) {
+          return -1;
+        }
+        const byte = this._bitArrays[bytePos];
+        const bitPos = index - bytePos * BITS_PER_BYTE;
+        const exists2 = (byte & 1 << bitPos) > 0;
+        if (!exists2) {
+          return -1;
+        }
+        const previousPopCount = this._bitArrays.slice(0, bytePos).reduce(popCountReduce, 0);
+        const mask = ~(4294967295 << bitPos + 1);
+        const bytePopCount = popCount(byte & mask);
+        const arrayPos = previousPopCount + bytePopCount - 1;
+        return arrayPos;
+      }
+      _bytePosFor(index, noCreate) {
+        const bytePos = Math.floor(index / BITS_PER_BYTE);
+        const targetLength = bytePos + 1;
+        while (!noCreate && this._bitArrays.length < targetLength) {
+          this._bitArrays.push(0);
+        }
+        return bytePos;
+      }
+      _setBit(index) {
+        const bytePos = this._bytePosFor(index, false);
+        this._bitArrays[bytePos] |= 1 << index - bytePos * BITS_PER_BYTE;
+      }
+      _unsetBit(index) {
+        const bytePos = this._bytePosFor(index, false);
+        this._bitArrays[bytePos] &= ~(1 << index - bytePos * BITS_PER_BYTE);
+      }
+      _setInternalPos(pos, index, value, needsSort) {
+        const data = this._data;
+        const elem = [index, value];
+        if (needsSort) {
+          this._sortData();
+          data[pos] = elem;
+        } else {
+          if (data.length) {
+            if (data[data.length - 1][0] >= index) {
+              data.push(elem);
+            } else if (data[0][0] <= index) {
+              data.unshift(elem);
+            } else {
+              const randomIndex = Math.round(data.length / 2);
+              this._data = data.slice(0, randomIndex).concat(elem).concat(data.slice(randomIndex));
+            }
+          } else {
+            this._data.push(elem);
+          }
+          this._changedData = true;
+          this._changedLength = true;
+        }
+      }
+      _unsetInternalPos(pos) {
+        this._data.splice(pos, 1);
+      }
+      _sortData() {
+        if (this._changedData) {
+          this._data.sort(sortInternal);
+        }
+        this._changedData = false;
+      }
+      bitField() {
+        const bytes = [];
+        let pendingBitsForResultingByte = 8;
+        let pendingBitsForNewByte = 0;
+        let resultingByte = 0;
+        let newByte;
+        const pending = this._bitArrays.slice();
+        while (pending.length || pendingBitsForNewByte) {
+          if (pendingBitsForNewByte === 0) {
+            newByte = pending.shift();
+            pendingBitsForNewByte = 7;
+          }
+          const usingBits = Math.min(pendingBitsForNewByte, pendingBitsForResultingByte);
+          const mask = ~(255 << usingBits);
+          const masked = newByte & mask;
+          resultingByte |= masked << 8 - pendingBitsForResultingByte;
+          newByte = newByte >>> usingBits;
+          pendingBitsForNewByte -= usingBits;
+          pendingBitsForResultingByte -= usingBits;
+          if (!pendingBitsForResultingByte || !pendingBitsForNewByte && !pending.length) {
+            bytes.push(resultingByte);
+            resultingByte = 0;
+            pendingBitsForResultingByte = 8;
+          }
+        }
+        for (var i = bytes.length - 1; i > 0; i--) {
+          const value = bytes[i];
+          if (value === 0) {
+            bytes.pop();
+          } else {
+            break;
+          }
+        }
+        return bytes;
+      }
+      compactArray() {
+        this._sortData();
+        return this._data.map(valueOnly);
+      }
+    };
+    function popCountReduce(count, byte) {
+      return count + popCount(byte);
+    }
+    function popCount(_v) {
+      let v = _v;
+      v = v - (v >> 1 & 1431655765);
+      v = (v & 858993459) + (v >> 2 & 858993459);
+      return (v + (v >> 4) & 252645135) * 16843009 >> 24;
+    }
+    function sortInternal(a, b) {
+      return a[0] - b[0];
+    }
+    function valueOnly(elem) {
+      return elem[1];
+    }
+  }
+});
+
+// node_modules/rabin-wasm/src/rabin.js
+var require_rabin = __commonJS({
+  "node_modules/rabin-wasm/src/rabin.js"(exports, module) {
+    var Rabin = class {
+      /**
+       * Creates an instance of Rabin.
+       * @param { import("./../dist/rabin-wasm") } asModule
+       * @param {number} [bits=12]
+       * @param {number} [min=8 * 1024]
+       * @param {number} [max=32 * 1024]
+       * @param {number} polynomial
+       * @memberof Rabin
+       */
+      constructor(asModule, bits = 12, min = 8 * 1024, max = 32 * 1024, windowSize = 64, polynomial) {
+        this.bits = bits;
+        this.min = min;
+        this.max = max;
+        this.asModule = asModule;
+        this.rabin = new asModule.Rabin(bits, min, max, windowSize, polynomial);
+        this.polynomial = polynomial;
+      }
+      /**
+       * Fingerprints the buffer
+       *
+       * @param {Uint8Array} buf
+       * @returns {Array}
+       * @memberof Rabin
+       */
+      fingerprint(buf2) {
+        const {
+          __retain,
+          __release,
+          __allocArray,
+          __getInt32Array,
+          Int32Array_ID,
+          Uint8Array_ID
+        } = this.asModule;
+        const lengths = new Int32Array(Math.ceil(buf2.length / this.min));
+        const lengthsPtr = __retain(__allocArray(Int32Array_ID, lengths));
+        const pointer = __retain(__allocArray(Uint8Array_ID, buf2));
+        const out = this.rabin.fingerprint(pointer, lengthsPtr);
+        const processed = __getInt32Array(out);
+        __release(pointer);
+        __release(lengthsPtr);
+        const end = processed.indexOf(0);
+        return end >= 0 ? processed.subarray(0, end) : processed;
+      }
+    };
+    module.exports = Rabin;
+  }
+});
+
+// node_modules/@assemblyscript/loader/index.js
+var require_loader = __commonJS({
+  "node_modules/@assemblyscript/loader/index.js"(exports) {
+    "use strict";
+    var ID_OFFSET = -8;
+    var SIZE_OFFSET = -4;
+    var ARRAYBUFFER_ID = 0;
+    var STRING_ID = 1;
+    var ARRAYBUFFERVIEW = 1 << 0;
+    var ARRAY = 1 << 1;
+    var SET = 1 << 2;
+    var MAP = 1 << 3;
+    var VAL_ALIGN_OFFSET = 5;
+    var VAL_ALIGN = 1 << VAL_ALIGN_OFFSET;
+    var VAL_SIGNED = 1 << 10;
+    var VAL_FLOAT = 1 << 11;
+    var VAL_NULLABLE = 1 << 12;
+    var VAL_MANAGED = 1 << 13;
+    var KEY_ALIGN_OFFSET = 14;
+    var KEY_ALIGN = 1 << KEY_ALIGN_OFFSET;
+    var KEY_SIGNED = 1 << 19;
+    var KEY_FLOAT = 1 << 20;
+    var KEY_NULLABLE = 1 << 21;
+    var KEY_MANAGED = 1 << 22;
+    var ARRAYBUFFERVIEW_BUFFER_OFFSET = 0;
+    var ARRAYBUFFERVIEW_DATASTART_OFFSET = 4;
+    var ARRAYBUFFERVIEW_DATALENGTH_OFFSET = 8;
+    var ARRAYBUFFERVIEW_SIZE = 12;
+    var ARRAY_LENGTH_OFFSET = 12;
+    var ARRAY_SIZE = 16;
+    var BIGINT = typeof BigUint64Array !== "undefined";
+    var THIS = Symbol();
+    var CHUNKSIZE = 1024;
+    function getStringImpl(buffer2, ptr) {
+      const U32 = new Uint32Array(buffer2);
+      const U16 = new Uint16Array(buffer2);
+      var length4 = U32[ptr + SIZE_OFFSET >>> 2] >>> 1;
+      var offset = ptr >>> 1;
+      if (length4 <= CHUNKSIZE)
+        return String.fromCharCode.apply(String, U16.subarray(offset, offset + length4));
+      const parts = [];
+      do {
+        const last2 = U16[offset + CHUNKSIZE - 1];
+        const size = last2 >= 55296 && last2 < 56320 ? CHUNKSIZE - 1 : CHUNKSIZE;
+        parts.push(String.fromCharCode.apply(String, U16.subarray(offset, offset += size)));
+        length4 -= size;
+      } while (length4 > CHUNKSIZE);
+      return parts.join("") + String.fromCharCode.apply(String, U16.subarray(offset, offset + length4));
+    }
+    function preInstantiate(imports) {
+      const baseModule = {};
+      function getString(memory, ptr) {
+        if (!memory)
+          return "";
+        return getStringImpl(memory.buffer, ptr);
+      }
+      const env = imports.env = imports.env || {};
+      env.abort = env.abort || function abort(mesg, file, line, colm) {
+        const memory = baseModule.memory || env.memory;
+        throw Error("abort: " + getString(memory, mesg) + " at " + getString(memory, file) + ":" + line + ":" + colm);
+      };
+      env.trace = env.trace || function trace(mesg, n) {
+        const memory = baseModule.memory || env.memory;
+        console.log("trace: " + getString(memory, mesg) + (n ? " " : "") + Array.prototype.slice.call(arguments, 2, 2 + n).join(", "));
+      };
+      imports.Math = imports.Math || Math;
+      imports.Date = imports.Date || Date;
+      return baseModule;
+    }
+    function postInstantiate(baseModule, instance) {
+      const rawExports = instance.exports;
+      const memory = rawExports.memory;
+      const table = rawExports.table;
+      const alloc4 = rawExports["__alloc"];
+      const retain = rawExports["__retain"];
+      const rttiBase = rawExports["__rtti_base"] || ~0;
+      function getInfo(id) {
+        const U32 = new Uint32Array(memory.buffer);
+        const count = U32[rttiBase >>> 2];
+        if ((id >>>= 0) >= count)
+          throw Error("invalid id: " + id);
+        return U32[(rttiBase + 4 >>> 2) + id * 2];
+      }
+      function getBase(id) {
+        const U32 = new Uint32Array(memory.buffer);
+        const count = U32[rttiBase >>> 2];
+        if ((id >>>= 0) >= count)
+          throw Error("invalid id: " + id);
+        return U32[(rttiBase + 4 >>> 2) + id * 2 + 1];
+      }
+      function getValueAlign(info) {
+        return 31 - Math.clz32(info >>> VAL_ALIGN_OFFSET & 31);
+      }
+      function getKeyAlign(info) {
+        return 31 - Math.clz32(info >>> KEY_ALIGN_OFFSET & 31);
+      }
+      function __allocString(str) {
+        const length4 = str.length;
+        const ptr = alloc4(length4 << 1, STRING_ID);
+        const U16 = new Uint16Array(memory.buffer);
+        for (var i = 0, p = ptr >>> 1; i < length4; ++i)
+          U16[p + i] = str.charCodeAt(i);
+        return ptr;
+      }
+      baseModule.__allocString = __allocString;
+      function __getString(ptr) {
+        const buffer2 = memory.buffer;
+        const id = new Uint32Array(buffer2)[ptr + ID_OFFSET >>> 2];
+        if (id !== STRING_ID)
+          throw Error("not a string: " + ptr);
+        return getStringImpl(buffer2, ptr);
+      }
+      baseModule.__getString = __getString;
+      function getView(alignLog2, signed, float) {
+        const buffer2 = memory.buffer;
+        if (float) {
+          switch (alignLog2) {
+            case 2:
+              return new Float32Array(buffer2);
+            case 3:
+              return new Float64Array(buffer2);
+          }
+        } else {
+          switch (alignLog2) {
+            case 0:
+              return new (signed ? Int8Array : Uint8Array)(buffer2);
+            case 1:
+              return new (signed ? Int16Array : Uint16Array)(buffer2);
+            case 2:
+              return new (signed ? Int32Array : Uint32Array)(buffer2);
+            case 3:
+              return new (signed ? BigInt64Array : BigUint64Array)(buffer2);
+          }
+        }
+        throw Error("unsupported align: " + alignLog2);
+      }
+      function __allocArray(id, values) {
+        const info = getInfo(id);
+        if (!(info & (ARRAYBUFFERVIEW | ARRAY)))
+          throw Error("not an array: " + id + " @ " + info);
+        const align = getValueAlign(info);
+        const length4 = values.length;
+        const buf2 = alloc4(length4 << align, ARRAYBUFFER_ID);
+        const arr = alloc4(info & ARRAY ? ARRAY_SIZE : ARRAYBUFFERVIEW_SIZE, id);
+        const U32 = new Uint32Array(memory.buffer);
+        U32[arr + ARRAYBUFFERVIEW_BUFFER_OFFSET >>> 2] = retain(buf2);
+        U32[arr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2] = buf2;
+        U32[arr + ARRAYBUFFERVIEW_DATALENGTH_OFFSET >>> 2] = length4 << align;
+        if (info & ARRAY)
+          U32[arr + ARRAY_LENGTH_OFFSET >>> 2] = length4;
+        const view = getView(align, info & VAL_SIGNED, info & VAL_FLOAT);
+        if (info & VAL_MANAGED) {
+          for (let i = 0; i < length4; ++i)
+            view[(buf2 >>> align) + i] = retain(values[i]);
+        } else {
+          view.set(values, buf2 >>> align);
+        }
+        return arr;
+      }
+      baseModule.__allocArray = __allocArray;
+      function __getArrayView(arr) {
+        const U32 = new Uint32Array(memory.buffer);
+        const id = U32[arr + ID_OFFSET >>> 2];
+        const info = getInfo(id);
+        if (!(info & ARRAYBUFFERVIEW))
+          throw Error("not an array: " + id);
+        const align = getValueAlign(info);
+        var buf2 = U32[arr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2];
+        const length4 = info & ARRAY ? U32[arr + ARRAY_LENGTH_OFFSET >>> 2] : U32[buf2 + SIZE_OFFSET >>> 2] >>> align;
+        return getView(align, info & VAL_SIGNED, info & VAL_FLOAT).subarray(buf2 >>>= align, buf2 + length4);
+      }
+      baseModule.__getArrayView = __getArrayView;
+      function __getArray(arr) {
+        const input = __getArrayView(arr);
+        const len = input.length;
+        const out = new Array(len);
+        for (let i = 0; i < len; i++)
+          out[i] = input[i];
+        return out;
+      }
+      baseModule.__getArray = __getArray;
+      function __getArrayBuffer(ptr) {
+        const buffer2 = memory.buffer;
+        const length4 = new Uint32Array(buffer2)[ptr + SIZE_OFFSET >>> 2];
+        return buffer2.slice(ptr, ptr + length4);
+      }
+      baseModule.__getArrayBuffer = __getArrayBuffer;
+      function getTypedArray(Type2, alignLog2, ptr) {
+        return new Type2(getTypedArrayView(Type2, alignLog2, ptr));
+      }
+      function getTypedArrayView(Type2, alignLog2, ptr) {
+        const buffer2 = memory.buffer;
+        const U32 = new Uint32Array(buffer2);
+        const bufPtr = U32[ptr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2];
+        return new Type2(buffer2, bufPtr, U32[bufPtr + SIZE_OFFSET >>> 2] >>> alignLog2);
+      }
+      baseModule.__getInt8Array = getTypedArray.bind(null, Int8Array, 0);
+      baseModule.__getInt8ArrayView = getTypedArrayView.bind(null, Int8Array, 0);
+      baseModule.__getUint8Array = getTypedArray.bind(null, Uint8Array, 0);
+      baseModule.__getUint8ArrayView = getTypedArrayView.bind(null, Uint8Array, 0);
+      baseModule.__getUint8ClampedArray = getTypedArray.bind(null, Uint8ClampedArray, 0);
+      baseModule.__getUint8ClampedArrayView = getTypedArrayView.bind(null, Uint8ClampedArray, 0);
+      baseModule.__getInt16Array = getTypedArray.bind(null, Int16Array, 1);
+      baseModule.__getInt16ArrayView = getTypedArrayView.bind(null, Int16Array, 1);
+      baseModule.__getUint16Array = getTypedArray.bind(null, Uint16Array, 1);
+      baseModule.__getUint16ArrayView = getTypedArrayView.bind(null, Uint16Array, 1);
+      baseModule.__getInt32Array = getTypedArray.bind(null, Int32Array, 2);
+      baseModule.__getInt32ArrayView = getTypedArrayView.bind(null, Int32Array, 2);
+      baseModule.__getUint32Array = getTypedArray.bind(null, Uint32Array, 2);
+      baseModule.__getUint32ArrayView = getTypedArrayView.bind(null, Uint32Array, 2);
+      if (BIGINT) {
+        baseModule.__getInt64Array = getTypedArray.bind(null, BigInt64Array, 3);
+        baseModule.__getInt64ArrayView = getTypedArrayView.bind(null, BigInt64Array, 3);
+        baseModule.__getUint64Array = getTypedArray.bind(null, BigUint64Array, 3);
+        baseModule.__getUint64ArrayView = getTypedArrayView.bind(null, BigUint64Array, 3);
+      }
+      baseModule.__getFloat32Array = getTypedArray.bind(null, Float32Array, 2);
+      baseModule.__getFloat32ArrayView = getTypedArrayView.bind(null, Float32Array, 2);
+      baseModule.__getFloat64Array = getTypedArray.bind(null, Float64Array, 3);
+      baseModule.__getFloat64ArrayView = getTypedArrayView.bind(null, Float64Array, 3);
+      function __instanceof(ptr, baseId) {
+        const U32 = new Uint32Array(memory.buffer);
+        var id = U32[ptr + ID_OFFSET >>> 2];
+        if (id <= U32[rttiBase >>> 2]) {
+          do
+            if (id == baseId)
+              return true;
+          while (id = getBase(id));
+        }
+        return false;
+      }
+      baseModule.__instanceof = __instanceof;
+      baseModule.memory = baseModule.memory || memory;
+      baseModule.table = baseModule.table || table;
+      return demangle(rawExports, baseModule);
+    }
+    function isResponse(o) {
+      return typeof Response !== "undefined" && o instanceof Response;
+    }
+    async function instantiate(source, imports) {
+      if (isResponse(source = await source))
+        return instantiateStreaming(source, imports);
+      return postInstantiate(
+        preInstantiate(imports || (imports = {})),
+        await WebAssembly.instantiate(
+          source instanceof WebAssembly.Module ? source : await WebAssembly.compile(source),
+          imports
+        )
+      );
+    }
+    exports.instantiate = instantiate;
+    function instantiateSync(source, imports) {
+      return postInstantiate(
+        preInstantiate(imports || (imports = {})),
+        new WebAssembly.Instance(
+          source instanceof WebAssembly.Module ? source : new WebAssembly.Module(source),
+          imports
+        )
+      );
+    }
+    exports.instantiateSync = instantiateSync;
+    async function instantiateStreaming(source, imports) {
+      if (!WebAssembly.instantiateStreaming) {
+        return instantiate(
+          isResponse(source = await source) ? source.arrayBuffer() : source,
+          imports
+        );
+      }
+      return postInstantiate(
+        preInstantiate(imports || (imports = {})),
+        (await WebAssembly.instantiateStreaming(source, imports)).instance
+      );
+    }
+    exports.instantiateStreaming = instantiateStreaming;
+    function demangle(exports2, baseModule) {
+      var module2 = baseModule ? Object.create(baseModule) : {};
+      var setArgumentsLength = exports2["__argumentsLength"] ? function(length4) {
+        exports2["__argumentsLength"].value = length4;
+      } : exports2["__setArgumentsLength"] || exports2["__setargc"] || function() {
+      };
+      for (let internalName in exports2) {
+        if (!Object.prototype.hasOwnProperty.call(exports2, internalName))
+          continue;
+        const elem = exports2[internalName];
+        let parts = internalName.split(".");
+        let curr = module2;
+        while (parts.length > 1) {
+          let part = parts.shift();
+          if (!Object.prototype.hasOwnProperty.call(curr, part))
+            curr[part] = {};
+          curr = curr[part];
+        }
+        let name4 = parts[0];
+        let hash = name4.indexOf("#");
+        if (hash >= 0) {
+          let className = name4.substring(0, hash);
+          let classElem = curr[className];
+          if (typeof classElem === "undefined" || !classElem.prototype) {
+            let ctor = function(...args) {
+              return ctor.wrap(ctor.prototype.constructor(0, ...args));
+            };
+            ctor.prototype = {
+              valueOf: function valueOf() {
+                return this[THIS];
+              }
+            };
+            ctor.wrap = function(thisValue) {
+              return Object.create(ctor.prototype, { [THIS]: { value: thisValue, writable: false } });
+            };
+            if (classElem)
+              Object.getOwnPropertyNames(classElem).forEach(
+                (name5) => Object.defineProperty(ctor, name5, Object.getOwnPropertyDescriptor(classElem, name5))
+              );
+            curr[className] = ctor;
+          }
+          name4 = name4.substring(hash + 1);
+          curr = curr[className].prototype;
+          if (/^(get|set):/.test(name4)) {
+            if (!Object.prototype.hasOwnProperty.call(curr, name4 = name4.substring(4))) {
+              let getter = exports2[internalName.replace("set:", "get:")];
+              let setter = exports2[internalName.replace("get:", "set:")];
+              Object.defineProperty(curr, name4, {
+                get: function() {
+                  return getter(this[THIS]);
+                },
+                set: function(value) {
+                  setter(this[THIS], value);
+                },
+                enumerable: true
+              });
+            }
+          } else {
+            if (name4 === "constructor") {
+              (curr[name4] = (...args) => {
+                setArgumentsLength(args.length);
+                return elem(...args);
+              }).original = elem;
+            } else {
+              (curr[name4] = function(...args) {
+                setArgumentsLength(args.length);
+                return elem(this[THIS], ...args);
+              }).original = elem;
+            }
+          }
+        } else {
+          if (/^(get|set):/.test(name4)) {
+            if (!Object.prototype.hasOwnProperty.call(curr, name4 = name4.substring(4))) {
+              Object.defineProperty(curr, name4, {
+                get: exports2[internalName.replace("set:", "get:")],
+                set: exports2[internalName.replace("get:", "set:")],
+                enumerable: true
+              });
+            }
+          } else if (typeof elem === "function" && elem !== setArgumentsLength) {
+            (curr[name4] = (...args) => {
+              setArgumentsLength(args.length);
+              return elem(...args);
+            }).original = elem;
+          } else {
+            curr[name4] = elem;
+          }
+        }
+      }
+      return module2;
+    }
+    exports.demangle = demangle;
+  }
+});
+
+// node_modules/rabin-wasm/dist/rabin-wasm.node.js
+var require_rabin_wasm_node = __commonJS({
+  "node_modules/rabin-wasm/dist/rabin-wasm.node.js"(exports, module) {
+    var { instantiateSync } = require_loader();
+    var fs6 = __require("fs");
+    loadWebAssembly.supported = typeof WebAssembly !== "undefined";
+    async function loadWebAssembly(imp = {}) {
+      if (!loadWebAssembly.supported)
+        return null;
+      return instantiateSync(fs6.readFileSync(__dirname + "/../dist/rabin.wasm"), imp);
+    }
+    module.exports = loadWebAssembly;
+  }
+});
+
+// node_modules/rabin-wasm/src/index.js
+var require_src = __commonJS({
+  "node_modules/rabin-wasm/src/index.js"(exports, module) {
+    var Rabin = require_rabin();
+    var getRabin = require_rabin_wasm_node();
+    var create5 = async (avg, min, max, windowSize, polynomial) => {
+      const compiled = await getRabin();
+      return new Rabin(compiled, avg, min, max, windowSize, polynomial);
+    };
+    module.exports = {
+      Rabin,
+      create: create5
+    };
+  }
+});
+
+// node_modules/is-plain-obj/index.js
+var require_is_plain_obj = __commonJS({
+  "node_modules/is-plain-obj/index.js"(exports, module) {
+    "use strict";
+    module.exports = (value) => {
+      if (Object.prototype.toString.call(value) !== "[object Object]") {
+        return false;
+      }
+      const prototype = Object.getPrototypeOf(value);
+      return prototype === null || prototype === Object.prototype;
+    };
+  }
+});
+
+// node_modules/merge-options/index.js
+var require_merge_options = __commonJS({
+  "node_modules/merge-options/index.js"(exports, module) {
+    "use strict";
+    var isOptionObject = require_is_plain_obj();
+    var { hasOwnProperty } = Object.prototype;
+    var { propertyIsEnumerable } = Object;
+    var defineProperty = (object, name4, value) => Object.defineProperty(object, name4, {
+      value,
+      writable: true,
+      enumerable: true,
+      configurable: true
+    });
+    var globalThis2 = exports;
+    var defaultMergeOptions = {
+      concatArrays: false,
+      ignoreUndefined: false
+    };
+    var getEnumerableOwnPropertyKeys = (value) => {
+      const keys = [];
+      for (const key in value) {
+        if (hasOwnProperty.call(value, key)) {
+          keys.push(key);
+        }
+      }
+      if (Object.getOwnPropertySymbols) {
+        const symbols = Object.getOwnPropertySymbols(value);
+        for (const symbol2 of symbols) {
+          if (propertyIsEnumerable.call(value, symbol2)) {
+            keys.push(symbol2);
+          }
+        }
+      }
+      return keys;
+    };
+    function clone(value) {
+      if (Array.isArray(value)) {
+        return cloneArray(value);
+      }
+      if (isOptionObject(value)) {
+        return cloneOptionObject(value);
+      }
+      return value;
+    }
+    function cloneArray(array) {
+      const result = array.slice(0, 0);
+      getEnumerableOwnPropertyKeys(array).forEach((key) => {
+        defineProperty(result, key, clone(array[key]));
+      });
+      return result;
+    }
+    function cloneOptionObject(object) {
+      const result = Object.getPrototypeOf(object) === null ? /* @__PURE__ */ Object.create(null) : {};
+      getEnumerableOwnPropertyKeys(object).forEach((key) => {
+        defineProperty(result, key, clone(object[key]));
+      });
+      return result;
+    }
+    var mergeKeys = (merged, source, keys, config) => {
+      keys.forEach((key) => {
+        if (typeof source[key] === "undefined" && config.ignoreUndefined) {
+          return;
+        }
+        if (key in merged && merged[key] !== Object.getPrototypeOf(merged)) {
+          defineProperty(merged, key, merge2(merged[key], source[key], config));
+        } else {
+          defineProperty(merged, key, clone(source[key]));
+        }
+      });
+      return merged;
+    };
+    var concatArrays = (merged, source, config) => {
+      let result = merged.slice(0, 0);
+      let resultIndex = 0;
+      [merged, source].forEach((array) => {
+        const indices = [];
+        for (let k = 0; k < array.length; k++) {
+          if (!hasOwnProperty.call(array, k)) {
+            continue;
+          }
+          indices.push(String(k));
+          if (array === merged) {
+            defineProperty(result, resultIndex++, array[k]);
+          } else {
+            defineProperty(result, resultIndex++, clone(array[k]));
+          }
+        }
+        result = mergeKeys(result, array, getEnumerableOwnPropertyKeys(array).filter((key) => !indices.includes(key)), config);
+      });
+      return result;
+    };
+    function merge2(merged, source, config) {
+      if (config.concatArrays && Array.isArray(merged) && Array.isArray(source)) {
+        return concatArrays(merged, source, config);
+      }
+      if (!isOptionObject(source) || !isOptionObject(merged)) {
+        return clone(source);
+      }
+      return mergeKeys(merged, source, getEnumerableOwnPropertyKeys(source), config);
+    }
+    module.exports = function(...options) {
+      const config = merge2(clone(defaultMergeOptions), this !== globalThis2 && this || {}, defaultMergeOptions);
+      let merged = { _: {} };
+      for (const option of options) {
+        if (option === void 0) {
+          continue;
+        }
+        if (!isOptionObject(option)) {
+          throw new TypeError("`" + option + "` is not an Option Object");
+        }
+        merged = merge2(merged, { _: option }, config);
+      }
+      return merged._;
+    };
+  }
+});
+
+// node_modules/ms/index.js
+var require_ms = __commonJS({
+  "node_modules/ms/index.js"(exports, module) {
+    var s = 1e3;
+    var m = s * 60;
+    var h = m * 60;
+    var d = h * 24;
+    var w = d * 7;
+    var y = d * 365.25;
+    module.exports = function(val, options) {
+      options = options || {};
+      var type = typeof val;
+      if (type === "string" && val.length > 0) {
+        return parse(val);
+      } else if (type === "number" && isFinite(val)) {
+        return options.long ? fmtLong(val) : fmtShort(val);
+      }
+      throw new Error(
+        "val is not a non-empty string or a valid number. val=" + JSON.stringify(val)
+      );
+    };
+    function parse(str) {
+      str = String(str);
+      if (str.length > 100) {
+        return;
+      }
+      var match2 = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
+        str
+      );
+      if (!match2) {
+        return;
+      }
+      var n = parseFloat(match2[1]);
+      var type = (match2[2] || "ms").toLowerCase();
+      switch (type) {
+        case "years":
+        case "year":
+        case "yrs":
+        case "yr":
+        case "y":
+          return n * y;
+        case "weeks":
+        case "week":
+        case "w":
+          return n * w;
+        case "days":
+        case "day":
+        case "d":
+          return n * d;
+        case "hours":
+        case "hour":
+        case "hrs":
+        case "hr":
+        case "h":
+          return n * h;
+        case "minutes":
+        case "minute":
+        case "mins":
+        case "min":
+        case "m":
+          return n * m;
+        case "seconds":
+        case "second":
+        case "secs":
+        case "sec":
+        case "s":
+          return n * s;
+        case "milliseconds":
+        case "millisecond":
+        case "msecs":
+        case "msec":
+        case "ms":
+          return n;
+        default:
+          return void 0;
+      }
+    }
+    function fmtShort(ms) {
+      var msAbs = Math.abs(ms);
+      if (msAbs >= d) {
+        return Math.round(ms / d) + "d";
+      }
+      if (msAbs >= h) {
+        return Math.round(ms / h) + "h";
+      }
+      if (msAbs >= m) {
+        return Math.round(ms / m) + "m";
+      }
+      if (msAbs >= s) {
+        return Math.round(ms / s) + "s";
+      }
+      return ms + "ms";
+    }
+    function fmtLong(ms) {
+      var msAbs = Math.abs(ms);
+      if (msAbs >= d) {
+        return plural(ms, msAbs, d, "day");
+      }
+      if (msAbs >= h) {
+        return plural(ms, msAbs, h, "hour");
+      }
+      if (msAbs >= m) {
+        return plural(ms, msAbs, m, "minute");
+      }
+      if (msAbs >= s) {
+        return plural(ms, msAbs, s, "second");
+      }
+      return ms + " ms";
+    }
+    function plural(ms, msAbs, n, name4) {
+      var isPlural = msAbs >= n * 1.5;
+      return Math.round(ms / n) + " " + name4 + (isPlural ? "s" : "");
+    }
+  }
+});
+
+// node_modules/debug/src/common.js
+var require_common = __commonJS({
+  "node_modules/debug/src/common.js"(exports, module) {
+    function setup(env) {
+      createDebug.debug = createDebug;
+      createDebug.default = createDebug;
+      createDebug.coerce = coerce3;
+      createDebug.disable = disable;
+      createDebug.enable = enable;
+      createDebug.enabled = enabled;
+      createDebug.humanize = require_ms();
+      createDebug.destroy = destroy;
+      Object.keys(env).forEach((key) => {
+        createDebug[key] = env[key];
+      });
+      createDebug.names = [];
+      createDebug.skips = [];
+      createDebug.formatters = {};
+      function selectColor(namespace) {
+        let hash = 0;
+        for (let i = 0; i < namespace.length; i++) {
+          hash = (hash << 5) - hash + namespace.charCodeAt(i);
+          hash |= 0;
+        }
+        return createDebug.colors[Math.abs(hash) % createDebug.colors.length];
+      }
+      createDebug.selectColor = selectColor;
+      function createDebug(namespace) {
+        let prevTime;
+        let enableOverride = null;
+        let namespacesCache;
+        let enabledCache;
+        function debug3(...args) {
+          if (!debug3.enabled) {
+            return;
+          }
+          const self = debug3;
+          const curr = Number(/* @__PURE__ */ new Date());
+          const ms = curr - (prevTime || curr);
+          self.diff = ms;
+          self.prev = prevTime;
+          self.curr = curr;
+          prevTime = curr;
+          args[0] = createDebug.coerce(args[0]);
+          if (typeof args[0] !== "string") {
+            args.unshift("%O");
+          }
+          let index = 0;
+          args[0] = args[0].replace(/%([a-zA-Z%])/g, (match2, format3) => {
+            if (match2 === "%%") {
+              return "%";
+            }
+            index++;
+            const formatter = createDebug.formatters[format3];
+            if (typeof formatter === "function") {
+              const val = args[index];
+              match2 = formatter.call(self, val);
+              args.splice(index, 1);
+              index--;
+            }
+            return match2;
+          });
+          createDebug.formatArgs.call(self, args);
+          const logFn = self.log || createDebug.log;
+          logFn.apply(self, args);
+        }
+        debug3.namespace = namespace;
+        debug3.useColors = createDebug.useColors();
+        debug3.color = createDebug.selectColor(namespace);
+        debug3.extend = extend;
+        debug3.destroy = createDebug.destroy;
+        Object.defineProperty(debug3, "enabled", {
+          enumerable: true,
+          configurable: false,
+          get: () => {
+            if (enableOverride !== null) {
+              return enableOverride;
+            }
+            if (namespacesCache !== createDebug.namespaces) {
+              namespacesCache = createDebug.namespaces;
+              enabledCache = createDebug.enabled(namespace);
+            }
+            return enabledCache;
+          },
+          set: (v) => {
+            enableOverride = v;
+          }
+        });
+        if (typeof createDebug.init === "function") {
+          createDebug.init(debug3);
+        }
+        return debug3;
+      }
+      function extend(namespace, delimiter) {
+        const newDebug = createDebug(this.namespace + (typeof delimiter === "undefined" ? ":" : delimiter) + namespace);
+        newDebug.log = this.log;
+        return newDebug;
+      }
+      function enable(namespaces) {
+        createDebug.save(namespaces);
+        createDebug.namespaces = namespaces;
+        createDebug.names = [];
+        createDebug.skips = [];
+        let i;
+        const split = (typeof namespaces === "string" ? namespaces : "").split(/[\s,]+/);
+        const len = split.length;
+        for (i = 0; i < len; i++) {
+          if (!split[i]) {
+            continue;
+          }
+          namespaces = split[i].replace(/\*/g, ".*?");
+          if (namespaces[0] === "-") {
+            createDebug.skips.push(new RegExp("^" + namespaces.slice(1) + "$"));
+          } else {
+            createDebug.names.push(new RegExp("^" + namespaces + "$"));
+          }
+        }
+      }
+      function disable() {
+        const namespaces = [
+          ...createDebug.names.map(toNamespace),
+          ...createDebug.skips.map(toNamespace).map((namespace) => "-" + namespace)
+        ].join(",");
+        createDebug.enable("");
+        return namespaces;
+      }
+      function enabled(name4) {
+        if (name4[name4.length - 1] === "*") {
+          return true;
+        }
+        let i;
+        let len;
+        for (i = 0, len = createDebug.skips.length; i < len; i++) {
+          if (createDebug.skips[i].test(name4)) {
+            return false;
+          }
+        }
+        for (i = 0, len = createDebug.names.length; i < len; i++) {
+          if (createDebug.names[i].test(name4)) {
+            return true;
+          }
+        }
+        return false;
+      }
+      function toNamespace(regexp) {
+        return regexp.toString().substring(2, regexp.toString().length - 2).replace(/\.\*\?$/, "*");
+      }
+      function coerce3(val) {
+        if (val instanceof Error) {
+          return val.stack || val.message;
+        }
+        return val;
+      }
+      function destroy() {
+        console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.");
+      }
+      createDebug.enable(createDebug.load());
+      return createDebug;
+    }
+    module.exports = setup;
+  }
+});
+
+// node_modules/debug/src/browser.js
+var require_browser = __commonJS({
+  "node_modules/debug/src/browser.js"(exports, module) {
+    exports.formatArgs = formatArgs;
+    exports.save = save;
+    exports.load = load;
+    exports.useColors = useColors;
+    exports.storage = localstorage();
+    exports.destroy = /* @__PURE__ */ (() => {
+      let warned = false;
+      return () => {
+        if (!warned) {
+          warned = true;
+          console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.");
+        }
+      };
+    })();
+    exports.colors = [
+      "#0000CC",
+      "#0000FF",
+      "#0033CC",
+      "#0033FF",
+      "#0066CC",
+      "#0066FF",
+      "#0099CC",
+      "#0099FF",
+      "#00CC00",
+      "#00CC33",
+      "#00CC66",
+      "#00CC99",
+      "#00CCCC",
+      "#00CCFF",
+      "#3300CC",
+      "#3300FF",
+      "#3333CC",
+      "#3333FF",
+      "#3366CC",
+      "#3366FF",
+      "#3399CC",
+      "#3399FF",
+      "#33CC00",
+      "#33CC33",
+      "#33CC66",
+      "#33CC99",
+      "#33CCCC",
+      "#33CCFF",
+      "#6600CC",
+      "#6600FF",
+      "#6633CC",
+      "#6633FF",
+      "#66CC00",
+      "#66CC33",
+      "#9900CC",
+      "#9900FF",
+      "#9933CC",
+      "#9933FF",
+      "#99CC00",
+      "#99CC33",
+      "#CC0000",
+      "#CC0033",
+      "#CC0066",
+      "#CC0099",
+      "#CC00CC",
+      "#CC00FF",
+      "#CC3300",
+      "#CC3333",
+      "#CC3366",
+      "#CC3399",
+      "#CC33CC",
+      "#CC33FF",
+      "#CC6600",
+      "#CC6633",
+      "#CC9900",
+      "#CC9933",
+      "#CCCC00",
+      "#CCCC33",
+      "#FF0000",
+      "#FF0033",
+      "#FF0066",
+      "#FF0099",
+      "#FF00CC",
+      "#FF00FF",
+      "#FF3300",
+      "#FF3333",
+      "#FF3366",
+      "#FF3399",
+      "#FF33CC",
+      "#FF33FF",
+      "#FF6600",
+      "#FF6633",
+      "#FF9900",
+      "#FF9933",
+      "#FFCC00",
+      "#FFCC33"
+    ];
+    function useColors() {
+      if (typeof window !== "undefined" && window.process && (window.process.type === "renderer" || window.process.__nwjs)) {
+        return true;
+      }
+      if (typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) {
+        return false;
+      }
+      return typeof document !== "undefined" && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773
+      typeof window !== "undefined" && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31?
+      // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
+      typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker
+      typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/);
+    }
+    function formatArgs(args) {
+      args[0] = (this.useColors ? "%c" : "") + this.namespace + (this.useColors ? " %c" : " ") + args[0] + (this.useColors ? "%c " : " ") + "+" + module.exports.humanize(this.diff);
+      if (!this.useColors) {
+        return;
+      }
+      const c = "color: " + this.color;
+      args.splice(1, 0, c, "color: inherit");
+      let index = 0;
+      let lastC = 0;
+      args[0].replace(/%[a-zA-Z%]/g, (match2) => {
+        if (match2 === "%%") {
+          return;
+        }
+        index++;
+        if (match2 === "%c") {
+          lastC = index;
+        }
+      });
+      args.splice(lastC, 0, c);
+    }
+    exports.log = console.debug || console.log || (() => {
+    });
+    function save(namespaces) {
+      try {
+        if (namespaces) {
+          exports.storage.setItem("debug", namespaces);
+        } else {
+          exports.storage.removeItem("debug");
+        }
+      } catch (error) {
+      }
+    }
+    function load() {
+      let r;
+      try {
+        r = exports.storage.getItem("debug");
+      } catch (error) {
+      }
+      if (!r && typeof process !== "undefined" && "env" in process) {
+        r = process.env.DEBUG;
+      }
+      return r;
+    }
+    function localstorage() {
+      try {
+        return localStorage;
+      } catch (error) {
+      }
+    }
+    module.exports = require_common()(exports);
+    var { formatters } = module.exports;
+    formatters.j = function(v) {
+      try {
+        return JSON.stringify(v);
+      } catch (error) {
+        return "[UnexpectedJSONParseError]: " + error.message;
+      }
+    };
+  }
+});
+
+// node_modules/debug/src/node.js
+var require_node = __commonJS({
+  "node_modules/debug/src/node.js"(exports, module) {
+    var tty = __require("tty");
+    var util = __require("util");
+    exports.init = init;
+    exports.log = log12;
+    exports.formatArgs = formatArgs;
+    exports.save = save;
+    exports.load = load;
+    exports.useColors = useColors;
+    exports.destroy = util.deprecate(
+      () => {
+      },
+      "Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."
+    );
+    exports.colors = [6, 2, 3, 4, 5, 1];
+    try {
+      const supportsColor = __require("supports-color");
+      if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {
+        exports.colors = [
+          20,
+          21,
+          26,
+          27,
+          32,
+          33,
+          38,
+          39,
+          40,
+          41,
+          42,
+          43,
+          44,
+          45,
+          56,
+          57,
+          62,
+          63,
+          68,
+          69,
+          74,
+          75,
+          76,
+          77,
+          78,
+          79,
+          80,
+          81,
+          92,
+          93,
+          98,
+          99,
+          112,
+          113,
+          128,
+          129,
+          134,
+          135,
+          148,
+          149,
+          160,
+          161,
+          162,
+          163,
+          164,
+          165,
+          166,
+          167,
+          168,
+          169,
+          170,
+          171,
+          172,
+          173,
+          178,
+          179,
+          184,
+          185,
+          196,
+          197,
+          198,
+          199,
+          200,
+          201,
+          202,
+          203,
+          204,
+          205,
+          206,
+          207,
+          208,
+          209,
+          214,
+          215,
+          220,
+          221
+        ];
+      }
+    } catch (error) {
+    }
+    exports.inspectOpts = Object.keys(process.env).filter((key) => {
+      return /^debug_/i.test(key);
+    }).reduce((obj, key) => {
+      const prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, (_, k) => {
+        return k.toUpperCase();
+      });
+      let val = process.env[key];
+      if (/^(yes|on|true|enabled)$/i.test(val)) {
+        val = true;
+      } else if (/^(no|off|false|disabled)$/i.test(val)) {
+        val = false;
+      } else if (val === "null") {
+        val = null;
+      } else {
+        val = Number(val);
+      }
+      obj[prop] = val;
+      return obj;
+    }, {});
+    function useColors() {
+      return "colors" in exports.inspectOpts ? Boolean(exports.inspectOpts.colors) : tty.isatty(process.stderr.fd);
+    }
+    function formatArgs(args) {
+      const { namespace: name4, useColors: useColors2 } = this;
+      if (useColors2) {
+        const c = this.color;
+        const colorCode = "\x1B[3" + (c < 8 ? c : "8;5;" + c);
+        const prefix = `  ${colorCode};1m${name4} \x1B[0m`;
+        args[0] = prefix + args[0].split("\n").join("\n" + prefix);
+        args.push(colorCode + "m+" + module.exports.humanize(this.diff) + "\x1B[0m");
+      } else {
+        args[0] = getDate() + name4 + " " + args[0];
+      }
+    }
+    function getDate() {
+      if (exports.inspectOpts.hideDate) {
+        return "";
+      }
+      return (/* @__PURE__ */ new Date()).toISOString() + " ";
+    }
+    function log12(...args) {
+      return process.stderr.write(util.format(...args) + "\n");
+    }
+    function save(namespaces) {
+      if (namespaces) {
+        process.env.DEBUG = namespaces;
+      } else {
+        delete process.env.DEBUG;
+      }
+    }
+    function load() {
+      return process.env.DEBUG;
+    }
+    function init(debug3) {
+      debug3.inspectOpts = {};
+      const keys = Object.keys(exports.inspectOpts);
+      for (let i = 0; i < keys.length; i++) {
+        debug3.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
+      }
+    }
+    module.exports = require_common()(exports);
+    var { formatters } = module.exports;
+    formatters.o = function(v) {
+      this.inspectOpts.colors = this.useColors;
+      return util.inspect(v, this.inspectOpts).split("\n").map((str) => str.trim()).join(" ");
+    };
+    formatters.O = function(v) {
+      this.inspectOpts.colors = this.useColors;
+      return util.inspect(v, this.inspectOpts);
+    };
+  }
+});
+
+// node_modules/debug/src/index.js
+var require_src2 = __commonJS({
+  "node_modules/debug/src/index.js"(exports, module) {
+    if (typeof process === "undefined" || process.type === "renderer" || process.browser === true || process.__nwjs) {
+      module.exports = require_browser();
+    } else {
+      module.exports = require_node();
+    }
+  }
+});
+
+// node_modules/balanced-match/index.js
+var require_balanced_match = __commonJS({
+  "node_modules/balanced-match/index.js"(exports, module) {
+    "use strict";
+    module.exports = balanced2;
+    function balanced2(a, b, str) {
+      if (a instanceof RegExp)
+        a = maybeMatch(a, str);
+      if (b instanceof RegExp)
+        b = maybeMatch(b, str);
+      var r = range(a, b, str);
+      return r && {
+        start: r[0],
+        end: r[1],
+        pre: str.slice(0, r[0]),
+        body: str.slice(r[0] + a.length, r[1]),
+        post: str.slice(r[1] + b.length)
+      };
+    }
+    function maybeMatch(reg, str) {
+      var m = str.match(reg);
+      return m ? m[0] : null;
+    }
+    balanced2.range = range;
+    function range(a, b, str) {
+      var begs, beg, left, right, result;
+      var ai = str.indexOf(a);
+      var bi = str.indexOf(b, ai + 1);
+      var i = ai;
+      if (ai >= 0 && bi > 0) {
+        if (a === b) {
+          return [ai, bi];
+        }
+        begs = [];
+        left = str.length;
+        while (i >= 0 && !result) {
+          if (i == ai) {
+            begs.push(i);
+            ai = str.indexOf(a, i + 1);
+          } else if (begs.length == 1) {
+            result = [begs.pop(), bi];
+          } else {
+            beg = begs.pop();
+            if (beg < left) {
+              left = beg;
+              right = bi;
+            }
+            bi = str.indexOf(b, i + 1);
+          }
+          i = ai < bi && ai >= 0 ? ai : bi;
+        }
+        if (begs.length) {
+          result = [left, right];
+        }
+      }
+      return result;
+    }
+  }
+});
+
+// node_modules/brace-expansion/index.js
+var require_brace_expansion = __commonJS({
+  "node_modules/brace-expansion/index.js"(exports, module) {
+    var balanced2 = require_balanced_match();
+    module.exports = expandTop;
+    var escSlash = "\0SLASH" + Math.random() + "\0";
+    var escOpen = "\0OPEN" + Math.random() + "\0";
+    var escClose = "\0CLOSE" + Math.random() + "\0";
+    var escComma = "\0COMMA" + Math.random() + "\0";
+    var escPeriod = "\0PERIOD" + Math.random() + "\0";
+    function numeric(str) {
+      return parseInt(str, 10) == str ? parseInt(str, 10) : str.charCodeAt(0);
+    }
+    function escapeBraces(str) {
+      return str.split("\\\\").join(escSlash).split("\\{").join(escOpen).split("\\}").join(escClose).split("\\,").join(escComma).split("\\.").join(escPeriod);
+    }
+    function unescapeBraces(str) {
+      return str.split(escSlash).join("\\").split(escOpen).join("{").split(escClose).join("}").split(escComma).join(",").split(escPeriod).join(".");
+    }
+    function parseCommaParts(str) {
+      if (!str)
+        return [""];
+      var parts = [];
+      var m = balanced2("{", "}", str);
+      if (!m)
+        return str.split(",");
+      var pre = m.pre;
+      var body = m.body;
+      var post = m.post;
+      var p = pre.split(",");
+      p[p.length - 1] += "{" + body + "}";
+      var postParts = parseCommaParts(post);
+      if (post.length) {
+        p[p.length - 1] += postParts.shift();
+        p.push.apply(p, postParts);
+      }
+      parts.push.apply(parts, p);
+      return parts;
+    }
+    function expandTop(str) {
+      if (!str)
+        return [];
+      if (str.substr(0, 2) === "{}") {
+        str = "\\{\\}" + str.substr(2);
+      }
+      return expand2(escapeBraces(str), true).map(unescapeBraces);
+    }
+    function embrace(str) {
+      return "{" + str + "}";
+    }
+    function isPadded(el) {
+      return /^-?0\d/.test(el);
+    }
+    function lte(i, y) {
+      return i <= y;
+    }
+    function gte(i, y) {
+      return i >= y;
+    }
+    function expand2(str, isTop) {
+      var expansions = [];
+      var m = balanced2("{", "}", str);
+      if (!m)
+        return [str];
+      var pre = m.pre;
+      var post = m.post.length ? expand2(m.post, false) : [""];
+      if (/\$$/.test(m.pre)) {
+        for (var k = 0; k < post.length; k++) {
+          var expansion = pre + "{" + m.body + "}" + post[k];
+          expansions.push(expansion);
+        }
+      } else {
+        var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
+        var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
+        var isSequence = isNumericSequence || isAlphaSequence;
+        var isOptions = m.body.indexOf(",") >= 0;
+        if (!isSequence && !isOptions) {
+          if (m.post.match(/,.*\}/)) {
+            str = m.pre + "{" + m.body + escClose + m.post;
+            return expand2(str);
+          }
+          return [str];
+        }
+        var n;
+        if (isSequence) {
+          n = m.body.split(/\.\./);
+        } else {
+          n = parseCommaParts(m.body);
+          if (n.length === 1) {
+            n = expand2(n[0], false).map(embrace);
+            if (n.length === 1) {
+              return post.map(function(p) {
+                return m.pre + n[0] + p;
+              });
+            }
+          }
+        }
+        var N;
+        if (isSequence) {
+          var x = numeric(n[0]);
+          var y = numeric(n[1]);
+          var width = Math.max(n[0].length, n[1].length);
+          var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1;
+          var test = lte;
+          var reverse = y < x;
+          if (reverse) {
+            incr *= -1;
+            test = gte;
+          }
+          var pad = n.some(isPadded);
+          N = [];
+          for (var i = x; test(i, y); i += incr) {
+            var c;
+            if (isAlphaSequence) {
+              c = String.fromCharCode(i);
+              if (c === "\\")
+                c = "";
+            } else {
+              c = String(i);
+              if (pad) {
+                var need = width - c.length;
+                if (need > 0) {
+                  var z = new Array(need + 1).join("0");
+                  if (i < 0)
+                    c = "-" + z + c.slice(1);
+                  else
+                    c = z + c;
+                }
+              }
+            }
+            N.push(c);
+          }
+        } else {
+          N = [];
+          for (var j = 0; j < n.length; j++) {
+            N.push.apply(N, expand2(n[j], false));
+          }
+        }
+        for (var j = 0; j < N.length; j++) {
+          for (var k = 0; k < post.length; k++) {
+            var expansion = pre + N[j] + post[k];
+            if (!isTop || isSequence || expansion)
+              expansions.push(expansion);
+          }
+        }
+      }
+      return expansions;
+    }
+  }
+});
+
+// node_modules/fast-write-atomic/index.js
+var require_fast_write_atomic = __commonJS({
+  "node_modules/fast-write-atomic/index.js"(exports, module) {
+    "use strict";
+    var { open, write: write2, close, rename, fsync, unlink } = __require("fs");
+    var { join, dirname } = __require("path");
+    var counter = 0;
+    function cleanup(dest, err, cb) {
+      unlink(dest, function() {
+        cb(err);
+      });
+    }
+    function closeAndCleanup(fd, dest, err, cb) {
+      close(fd, cleanup.bind(null, dest, err, cb));
+    }
+    function writeLoop(fd, content, contentLength, offset, cb) {
+      write2(fd, content, offset, function(err, bytesWritten) {
+        if (err) {
+          cb(err);
+          return;
+        }
+        return bytesWritten < contentLength - offset ? writeLoop(fd, content, contentLength, offset + bytesWritten, cb) : cb(null);
+      });
+    }
+    function openLoop(dest, cb) {
+      open(dest, "w", function(err, fd) {
+        if (err) {
+          return err.code === "EMFILE" ? openLoop(dest, cb) : cb(err);
+        }
+        cb(null, fd);
+      });
+    }
+    function writeAtomic2(path6, content, cb) {
+      const tmp = join(dirname(path6), "." + process.pid + "." + counter++);
+      openLoop(tmp, function(err, fd) {
+        if (err) {
+          cb(err);
+          return;
+        }
+        const contentLength = Buffer.byteLength(content);
+        writeLoop(fd, content, contentLength, 0, function(err2) {
+          if (err2) {
+            closeAndCleanup(fd, tmp, err2, cb);
+            return;
+          }
+          fsync(fd, function(err3) {
+            if (err3) {
+              closeAndCleanup(fd, tmp, err3, cb);
+              return;
+            }
+            close(fd, function(err4) {
+              if (err4) {
+                cleanup(tmp, err4, cb);
+                return;
+              }
+              rename(tmp, path6, (err5) => {
+                if (err5) {
+                  cleanup(tmp, err5, cb);
+                  return;
+                }
+                cb(null);
+              });
+            });
+          });
+        });
+        content = null;
+      });
+    }
+    module.exports = writeAtomic2;
+  }
+});
+
+// src/bucketManager.js
+import {
+  CreateBucketCommand,
+  DeleteBucketCommand,
+  GetBucketAclCommand,
+  ListBucketsCommand,
+  PutBucketAclCommand,
+  S3Client
+} from "@aws-sdk/client-s3";
+var BucketManager = class {
+  #DEFAULT_ENDPOINT = "https://s3.filebase.com";
+  #DEFAULT_REGION = "us-east-1";
+  #client;
+  /**
+   * @summary Creates a new instance of the constructor.
+   * @param {string} clientKey - The access key ID for authentication.
+   * @param {string} clientSecret - The secret access key for authentication.
+   * @tutorial quickstart-bucket
+   * @example
+   * import { BucketManager } from "@filebase/sdk";
+   * const bucketManager = new BucketManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
+   */
+  constructor(clientKey, clientSecret) {
+    const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, clientConfiguration = {
+      credentials: {
+        accessKeyId: clientKey,
+        secretAccessKey: clientSecret
+      },
+      endpoint: clientEndpoint,
+      region: this.#DEFAULT_REGION,
+      forcePathStyle: true
+    };
+    this.#client = new S3Client(clientConfiguration);
+  }
+  /**
+   * @typedef {Object} bucket
+   * @property {string} Name The name of the bucket
+   * @property {date} Date the bucket was created
+   */
+  /**
+   * @summary Creates a new bucket with the specified name.
+   * @param {string} name - The name of the bucket to create.
+   * @returns {Promise} - A promise that resolves when the bucket is created.
+   * @example
+   * // Create bucket with name of `create-bucket-example`
+   * await bucketManager.create(`create-bucket-example`);
+   */
+  async create(name4) {
+    const command = new CreateBucketCommand({
+      Bucket: name4
+    });
+    return await this.#client.send(command);
+  }
+  /**
+   * @summary Lists the buckets in the client.
+   * @returns {Promise>} - A promise that resolves with an array of objects representing the buckets in the client.
+   * @example
+   * // List all buckets
+   * await bucketManager.list();
+   */
+  async list() {
+    const command = new ListBucketsCommand({}), { Buckets } = await this.#client.send(command);
+    return Buckets;
+  }
+  /**
+   * @summary Deletes the specified bucket.
+   * @param {string} name - The name of the bucket to delete.
+   * @returns {Promise} - A promise that resolves when the bucket is deleted.
+   * @example
+   * // Delete bucket with name of `bucket-name-to-delete`
+   * await bucketManager.delete(`bucket-name-to-delete`);
+   */
+  async delete(name4) {
+    const command = new DeleteBucketCommand({
+      Bucket: name4
+    });
+    await this.#client.send(command);
+    return true;
+  }
+  /**
+   * @summary Sets the privacy of a given bucket.
+   * @param {string} name - The name of the bucket to toggle.
+   * @param {boolean} targetState - The new target state. [true=private,false=public]
+   * @returns {Promise} A promise that resolves to true if the bucket was successfully toggled.
+   * @example
+   * // Toggle bucket with label of `toggle-bucket-example`
+   * await bucketManager.setPrivacy(`toggle-bucket-example`, true);  // Enabled
+   * await bucketManager.setPrivacy(`toggle-bucket-example`, false); // Disabled
+   */
+  async setPrivacy(name4, targetState) {
+    const command = new PutBucketAclCommand({
+      Bucket: name4,
+      ACL: targetState ? "private" : "public-read"
+    });
+    await this.#client.send(command);
+    return true;
+  }
+  /**
+   * @summary Gets the privacy of a given bucket
+   * @param {string} name - The name of the bucket to query.
+   * @returns {Promise} A promise that resolves to true if the bucket is private.
+   */
+  async getPrivacy(name4) {
+    const command = new GetBucketAclCommand({
+      Bucket: name4
+    });
+    const response = await this.#client.send(command), readPermission = response.Grants.find((grant) => {
+      return grant.Grantee.Type === "Group" && grant.Permission === "READ";
+    });
+    return !(typeof readPermission !== "undefined");
+  }
+};
+var bucketManager_default = BucketManager;
+
+// src/gatewayManager.js
+import axios2 from "axios";
+
+// src/helpers.js
+import axios from "axios";
+var GATEWAY_DEFAULT_TIMEOUT = 6e4;
+async function downloadFromGateway(cid, options) {
+  if (typeof options.endpoint !== "string") {
+    throw new Error(`Default Gateway must be set`);
+  }
+  const downloadHeaders = {};
+  if (options.token) {
+    downloadHeaders["x-filebase-gateway-token"] = options.token;
+  }
+  const downloadResponse = await axios.request({
+    method: "GET",
+    baseURL: options.endpoint,
+    url: `/ipfs/${cid}`,
+    headers: downloadHeaders,
+    type: "stream",
+    timeout: (options == null ? void 0 : options.timeout) || GATEWAY_DEFAULT_TIMEOUT
+  });
+  return downloadResponse.data;
+}
+function apiErrorHandler(err) {
+  var _a, _b, _c;
+  if ((err == null ? void 0 : err.response) && ((_a = err == null ? void 0 : err.response) == null ? void 0 : _a.status) && (err.response.status.toString()[0] === "4" || err.response.status.toString()[0] === "5")) {
+    throw new Error(
+      ((_b = err.response.data.error) == null ? void 0 : _b.details) || ((_c = err.response.data.error) == null ? void 0 : _c.reason) || err
+    );
+  }
+  throw err;
+}
+
+// src/gatewayManager.js
+var GatewayManager = class {
+  #DEFAULT_ENDPOINT = "https://api.filebase.io";
+  #DEFAULT_TIMEOUT = 6e4;
+  #client;
+  /**
+   * @summary Creates a new instance of the constructor.
+   * @param {string} clientKey - The access key ID for authentication.
+   * @param {string} clientSecret - The secret access key for authentication.
+   * @tutorial quickstart-gateway
+   * @example
+   * import { GatewayManager } from "@filebase/sdk";
+   * const gatewayManager = new GatewayManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
+   */
+  constructor(clientKey, clientSecret) {
+    const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_GW_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString(
+      "base64"
+    ), baseURL = `${clientEndpoint}/v1/gateways`;
+    this.#client = axios2.create({
+      baseURL,
+      timeout: this.#DEFAULT_TIMEOUT,
+      headers: { Authorization: `Bearer ${encodedToken}` }
+    });
+  }
+  /**
+   * @typedef {Object} gateway
+   * @property {string} name Name for the gateway
+   * @property {string} domain Custom Domain for the gateway
+   * @property {boolean} enabled Whether the gateway is enabled or not
+   * @property {string} private Whether the gateway is scoped to users content
+   * @property {date} created_at Date the gateway was created
+   * @property {date} updated_at Date the gateway was last updated
+   */
+  /**
+   * @typedef {Object} gatewayOptions
+   * @property {boolean} [domain] Optional Domain to allow for using a Custom Domain
+   * @property {string} [enabled] Optional Toggle to use for enabling the gateway
+   * @property {boolean} [private] Optional Boolean determining if gateway is Public or Private
+   */
+  /**
+   * @summary Creates a gateway with the given name and options
+   * @param {string} name Unique name across entire platform for the gateway.  Must be a valid subdomain name.
+   * @param {gatewayOptions} [options]
+   * @returns {Promise} - A promise that resolves to the value of a gateway.
+   * @example
+   * // Create gateway with name of `create-gateway-example` and a custom domain of `cname.mycustomdomain.com`.
+   * // The custom domain must already exist and have a CNAME record pointed at `create-gateway-example.myfilebase.com`.
+   * await gatewayManager.create(`create-gateway-example`, {
+   *   domain: `cname.mycustomdomain.com`
+   * });
+   */
+  async create(name4, options = {}) {
+    try {
+      let createOptions = {
+        name: name4
+      };
+      if (typeof options.domain === "string") {
+        createOptions.domain = options.domain;
+      }
+      if (typeof options.enabled === "boolean") {
+        createOptions.enabled = options.enabled;
+      }
+      if (typeof options.private === "boolean") {
+        createOptions.private = options.private;
+      }
+      const createResponse = await this.#client.request({
+        method: "POST",
+        data: createOptions
+      });
+      return createResponse.data;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Deletes a gateway with the given name.
+   * @param {string} name - The name of the gateway to delete.
+   * @returns {Promise} - A promise that resolves to true if the gateway was successfully deleted.
+   * @example
+   * // Delete gateway with name of `delete-gateway-example`
+   * await gatewayManager.delete(`delete-name-example`);
+   */
+  async delete(name4) {
+    try {
+      await this.#client.request({
+        method: "DELETE",
+        url: `/${name4}`,
+        validateStatus: (status) => {
+          return status === 204;
+        }
+      });
+      return true;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Returns the value of a gateway
+   * @param {string} name - Parameter representing the name to get.
+   * @returns {Promise} - A promise that resolves to the value of a gateway.
+   * @example
+   * // Get gateway with name of `gateway-get-example`
+   * await gatewayManager.get(`gateway-get-example`);
+   */
+  async get(name4) {
+    try {
+      const getResponse = await this.#client.request({
+        method: "GET",
+        url: `/${name4}`,
+        validateStatus: (status) => {
+          return status === 200 || status === 404;
+        }
+      });
+      return getResponse.status === 200 ? getResponse.data : false;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Returns a list of gateways
+   * @returns {Promise>} - A promise that resolves to an array of gateways.
+   * @example
+   * // List all gateways
+   * await gatewayManager.list();
+   */
+  async list() {
+    try {
+      const getResponse = await this.#client.request({
+        method: "GET"
+      });
+      return getResponse.data;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Updates the specified gateway.
+   * @param {string} name - The name of the gateway to update.
+   * @param {gatewayOptions} options - The options for the update operation.
+   *
+   * @returns {Promise} - A Promise that resolves to true if the gateway was updated.
+   * @example
+   * // Update gateway with name of `update-gateway-example` and set the gateway to only serve CIDs pinned by user.
+   * await gatewayManager.update(`update-gateway-example`, {
+   *   private: true
+   * });
+   */
+  async update(name4, options) {
+    try {
+      const updateOptions = {
+        name: name4
+      };
+      if (options == null ? void 0 : options.domain) {
+        updateOptions.domain = String(options.private);
+      }
+      if (options == null ? void 0 : options.enabled) {
+        updateOptions.enabled = Boolean(options.enabled);
+      }
+      if (options == null ? void 0 : options.private) {
+        updateOptions.private = Boolean(options.private);
+      }
+      await this.#client.request({
+        method: "PUT",
+        url: `/${name4}`,
+        data: updateOptions,
+        validateStatus: (status) => {
+          return status === 200;
+        }
+      });
+      return true;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Toggles the enabled state of a given gateway.
+   * @param {string} name - The name of the gateway to toggle.
+   * @param {boolean} targetState - The new target state.
+   * @returns {Promise} A promise that resolves to true if the gateway was successfully toggled.
+   * @example
+   * // Toggle gateway with label of `toggle-gateway-example`
+   * await gatewayManager.toggle(`toggle-gateway-example`, true);  // Enabled
+   * await gatewayManager.toggle(`toggle-gateway-example`, false); // Disabled
+   */
+  async toggle(name4, targetState) {
+    try {
+      await this.#client.request({
+        method: "PUT",
+        url: `/${name4}`,
+        data: {
+          enabled: Boolean(targetState)
+        },
+        validateStatus: (status) => {
+          return status === 200;
+        }
+      });
+      return true;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+};
+var gatewayManager_default = GatewayManager;
+
+// src/nameManager.js
+import axios3 from "axios";
+var NameManager = class {
+  #DEFAULT_ENDPOINT = "https://api.filebase.io";
+  #DEFAULT_TIMEOUT = 6e4;
+  #client;
+  /**
+   * @summary Creates a new instance of the constructor.
+   * @param {string} clientKey - The access key ID for authentication.
+   * @param {string} clientSecret - The secret access key for authentication.
+   * @tutorial quickstart-name
+   * @example
+   * import { NameManager } from "@filebase/sdk";
+   * const nameManager = new NameManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
+   */
+  constructor(clientKey, clientSecret) {
+    const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString(
+      "base64"
+    ), baseURL = `${clientEndpoint}/v1/names`;
+    this.#client = axios3.create({
+      baseURL,
+      timeout: this.#DEFAULT_TIMEOUT,
+      headers: { Authorization: `Bearer ${encodedToken}` }
+    });
+  }
+  /**
+   * @typedef {Object} name
+   * @property {string} label Descriptive label for the Key
+   * @property {string} network_key IPNS Key CID
+   * @property {string} cid Value that name Publishes
+   * @property {number} sequence Version Number for the name
+   * @property {boolean} enabled Whether the name is being Published or not
+   * @property {date} published_at Date the name was last published to the DHT
+   * @property {date} created_at Date the name was created
+   * @property {date} updated_at Date the name was last updated
+   */
+  /**
+   * @typedef {Object} nameOptions
+   * @property {boolean} [enabled] Whether the name is enabled or not.
+   */
+  /**
+   * @summary Creates a new IPNS name with the given name as the label and CID.
+   * @param {string} label - The label of the new IPNS name.
+   * @param {string} cid - The CID of the IPNS name.
+   * @param {nameOptions} [options] - Additional options for the IPNS name.
+   * @returns {Promise} - A Promise that resolves with the response JSON.
+   * @example
+   * // Create IPNS name with label of `create-name-example` and CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
+   * await nameManager.create(`create-name-example`, `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`);
+   */
+  async create(label, cid, options = {
+    enabled: true
+  }) {
+    try {
+      const createResponse = await this.#client.request({
+        method: "POST",
+        data: {
+          label,
+          cid,
+          enabled: (options == null ? void 0 : options.enabled) !== false
+        }
+      });
+      return createResponse.data;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Imports a user's IPNS private key.
+   * @param {string} label - The label for the IPNS name.
+   * @param {string} cid - The CID (Content Identifier) of the data.
+   * @param {string} privateKey - The existing private key encoded in Base64.
+   * @param {nameOptions} [options] - Additional options for the IPNS name.
+   * @returns {Promise} - A Promise that resolves to the server response.
+   * @example
+   * // Import IPNS private key with label of `create-name-example`, CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
+   * // and a private key encoded with base64
+   * await nameManager.import(
+   *  `create-name-example`,
+   *  `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
+   *  `BASE64_ENCODED_PRIVATEKEY`
+   * );
+   */
+  async import(label, cid, privateKey, options = {
+    enabled: true
+  }) {
+    try {
+      const importResponse = await this.#client.request({
+        method: "POST",
+        data: {
+          label,
+          cid,
+          network_private_key: privateKey,
+          enabled: (options == null ? void 0 : options.enabled) !== false
+        }
+      });
+      return importResponse.data;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Updates the specified name with the given CID.
+   * @param {string} label - The label of the name to update.
+   * @param {string} cid - The cid to associate with the name.
+   * @param {nameOptions} options - The options for the set operation.
+   *
+   * @returns {Promise} - A Promise that resolves to true if the IPNS name was updated.
+   * @example
+   * // Update name with label of `update-name-example` and set the value of the IPNS name.
+   * await nameManager.update(`update-name-example`, `bafybeidt4nmaci476lyon2mvgfmwyzysdazienhxs2bqnfpdainzjuwjom`);
+   */
+  async update(label, cid, options = {}) {
+    try {
+      const updateOptions = {
+        cid
+      };
+      if (options == null ? void 0 : options.enabled) {
+        updateOptions.enabled = Boolean(options.enabled);
+      }
+      await this.#client.request({
+        method: "PUT",
+        url: `/${label}`,
+        data: updateOptions,
+        validateStatus: (status) => {
+          return status === 200;
+        }
+      });
+      return true;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Returns the value of an IPNS name
+   * @param {string} label - Parameter representing the label of the name to resolve.
+   * @returns {Promise} - A promise that resolves to the value of a name.
+   * @example
+   * // Get IPNS name with label of `list-name-example`
+   * await nameManager.get(`list-name-example`);
+   */
+  async get(label) {
+    try {
+      const getResponse = await this.#client.request({
+        method: "GET",
+        url: `/${label}`,
+        validateStatus: (status) => {
+          return status === 200 || status === 404;
+        }
+      });
+      return getResponse.status === 200 ? getResponse.data : false;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Returns a list of IPNS names
+   * @returns {Promise>} - A promise that resolves to an array of names.
+   * @example
+   * // List all IPNS names
+   * await nameManager.list();
+   */
+  async list() {
+    try {
+      const listResponse = await this.#client.request({
+        method: "GET"
+      });
+      return listResponse.data;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Deletes an IPNS name with the given label.
+   * @param {string} label - The label of the IPNS name to delete.
+   * @returns {Promise} - A promise that resolves to true if the IPNS name was successfully deleted.
+   * @example
+   * // List IPNS name with label of `delete-name-example`
+   * await nameManager.delete(`delete-name-example`);
+   */
+  async delete(label) {
+    try {
+      await this.#client.request({
+        method: "DELETE",
+        url: `/${label}`,
+        validateStatus: (status) => {
+          return status === 204;
+        }
+      });
+      return true;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Toggles the enabled state of a given IPNS name.
+   * @param {string} label - The label of the IPNS name to toggle.
+   * @param {boolean} targetState - The new target state.
+   * @returns {Promise} A promise that resolves to true if the IPNS name was successfully toggled.
+   * @example
+   * // Toggle IPNS name with label of `toggle-name-example`
+   * await nameManager.toggle(`toggle-name-example`, true);  // Enabled
+   * await nameManager.toggle(`toggle-name-example`, false); // Disabled
+   */
+  async toggle(label, targetState) {
+    try {
+      await this.#client.request({
+        method: "PUT",
+        url: `/${label}`,
+        data: {
+          enabled: targetState
+        },
+        validateStatus: (status) => {
+          return status === 200;
+        }
+      });
+      return true;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+};
+var nameManager_default = NameManager;
+
+// src/objectManager.js
+import {
+  CopyObjectCommand,
+  DeleteObjectCommand,
+  GetObjectCommand,
+  HeadObjectCommand,
+  ListObjectsV2Command,
+  S3Client as S3Client2
+} from "@aws-sdk/client-s3";
+import { Upload } from "@aws-sdk/lib-storage";
+
+// node_modules/@ipld/car/src/buffer-reader.js
+import fs from "fs";
+
+// node_modules/cborg/lib/is.js
+var typeofs = [
+  "string",
+  "number",
+  "bigint",
+  "symbol"
+];
+var objectTypeNames = [
+  "Function",
+  "Generator",
+  "AsyncGenerator",
+  "GeneratorFunction",
+  "AsyncGeneratorFunction",
+  "AsyncFunction",
+  "Observable",
+  "Array",
+  "Buffer",
+  "Object",
+  "RegExp",
+  "Date",
+  "Error",
+  "Map",
+  "Set",
+  "WeakMap",
+  "WeakSet",
+  "ArrayBuffer",
+  "SharedArrayBuffer",
+  "DataView",
+  "Promise",
+  "URL",
+  "HTMLElement",
+  "Int8Array",
+  "Uint8Array",
+  "Uint8ClampedArray",
+  "Int16Array",
+  "Uint16Array",
+  "Int32Array",
+  "Uint32Array",
+  "Float32Array",
+  "Float64Array",
+  "BigInt64Array",
+  "BigUint64Array"
+];
+function is(value) {
+  if (value === null) {
+    return "null";
+  }
+  if (value === void 0) {
+    return "undefined";
+  }
+  if (value === true || value === false) {
+    return "boolean";
+  }
+  const typeOf = typeof value;
+  if (typeofs.includes(typeOf)) {
+    return typeOf;
+  }
+  if (typeOf === "function") {
+    return "Function";
+  }
+  if (Array.isArray(value)) {
+    return "Array";
+  }
+  if (isBuffer(value)) {
+    return "Buffer";
+  }
+  const objectType = getObjectType(value);
+  if (objectType) {
+    return objectType;
+  }
+  return "Object";
+}
+function isBuffer(value) {
+  return value && value.constructor && value.constructor.isBuffer && value.constructor.isBuffer.call(null, value);
+}
+function getObjectType(value) {
+  const objectTypeName = Object.prototype.toString.call(value).slice(8, -1);
+  if (objectTypeNames.includes(objectTypeName)) {
+    return objectTypeName;
+  }
+  return void 0;
+}
+
+// node_modules/cborg/lib/token.js
+var Type = class {
+  /**
+   * @param {number} major
+   * @param {string} name
+   * @param {boolean} terminal
+   */
+  constructor(major, name4, terminal) {
+    this.major = major;
+    this.majorEncoded = major << 5;
+    this.name = name4;
+    this.terminal = terminal;
+  }
+  /* c8 ignore next 3 */
+  toString() {
+    return `Type[${this.major}].${this.name}`;
+  }
+  /**
+   * @param {Type} typ
+   * @returns {number}
+   */
+  compare(typ) {
+    return this.major < typ.major ? -1 : this.major > typ.major ? 1 : 0;
+  }
+};
+Type.uint = new Type(0, "uint", true);
+Type.negint = new Type(1, "negint", true);
+Type.bytes = new Type(2, "bytes", true);
+Type.string = new Type(3, "string", true);
+Type.array = new Type(4, "array", false);
+Type.map = new Type(5, "map", false);
+Type.tag = new Type(6, "tag", false);
+Type.float = new Type(7, "float", true);
+Type.false = new Type(7, "false", true);
+Type.true = new Type(7, "true", true);
+Type.null = new Type(7, "null", true);
+Type.undefined = new Type(7, "undefined", true);
+Type.break = new Type(7, "break", true);
+var Token = class {
+  /**
+   * @param {Type} type
+   * @param {any} [value]
+   * @param {number} [encodedLength]
+   */
+  constructor(type, value, encodedLength) {
+    this.type = type;
+    this.value = value;
+    this.encodedLength = encodedLength;
+    this.encodedBytes = void 0;
+    this.byteValue = void 0;
+  }
+  /* c8 ignore next 3 */
+  toString() {
+    return `Token[${this.type}].${this.value}`;
+  }
+};
+
+// node_modules/cborg/lib/byte-utils.js
+var useBuffer = globalThis.process && // @ts-ignore
+!globalThis.process.browser && // @ts-ignore
+globalThis.Buffer && // @ts-ignore
+typeof globalThis.Buffer.isBuffer === "function";
+var textDecoder = new TextDecoder();
+var textEncoder = new TextEncoder();
+function isBuffer2(buf2) {
+  return useBuffer && globalThis.Buffer.isBuffer(buf2);
+}
+function asU8A(buf2) {
+  if (!(buf2 instanceof Uint8Array)) {
+    return Uint8Array.from(buf2);
+  }
+  return isBuffer2(buf2) ? new Uint8Array(buf2.buffer, buf2.byteOffset, buf2.byteLength) : buf2;
+}
+var toString = useBuffer ? (
+  // eslint-disable-line operator-linebreak
+  /**
+   * @param {Uint8Array} bytes
+   * @param {number} start
+   * @param {number} end
+   */
+  (bytes, start, end) => {
+    return end - start > 64 ? (
+      // eslint-disable-line operator-linebreak
+      // @ts-ignore
+      globalThis.Buffer.from(bytes.subarray(start, end)).toString("utf8")
+    ) : utf8Slice(bytes, start, end);
+  }
+) : (
+  // eslint-disable-line operator-linebreak
+  /**
+   * @param {Uint8Array} bytes
+   * @param {number} start
+   * @param {number} end
+   */
+  (bytes, start, end) => {
+    return end - start > 64 ? textDecoder.decode(bytes.subarray(start, end)) : utf8Slice(bytes, start, end);
+  }
+);
+var fromString = useBuffer ? (
+  // eslint-disable-line operator-linebreak
+  /**
+   * @param {string} string
+   */
+  (string2) => {
+    return string2.length > 64 ? (
+      // eslint-disable-line operator-linebreak
+      // @ts-ignore
+      globalThis.Buffer.from(string2)
+    ) : utf8ToBytes(string2);
+  }
+) : (
+  // eslint-disable-line operator-linebreak
+  /**
+   * @param {string} string
+   */
+  (string2) => {
+    return string2.length > 64 ? textEncoder.encode(string2) : utf8ToBytes(string2);
+  }
+);
+var fromArray = (arr) => {
+  return Uint8Array.from(arr);
+};
+var slice = useBuffer ? (
+  // eslint-disable-line operator-linebreak
+  /**
+   * @param {Uint8Array} bytes
+   * @param {number} start
+   * @param {number} end
+   */
+  (bytes, start, end) => {
+    if (isBuffer2(bytes)) {
+      return new Uint8Array(bytes.subarray(start, end));
+    }
+    return bytes.slice(start, end);
+  }
+) : (
+  // eslint-disable-line operator-linebreak
+  /**
+   * @param {Uint8Array} bytes
+   * @param {number} start
+   * @param {number} end
+   */
+  (bytes, start, end) => {
+    return bytes.slice(start, end);
+  }
+);
+var concat = useBuffer ? (
+  // eslint-disable-line operator-linebreak
+  /**
+   * @param {Uint8Array[]} chunks
+   * @param {number} length
+   * @returns {Uint8Array}
+   */
+  (chunks, length4) => {
+    chunks = chunks.map((c) => c instanceof Uint8Array ? c : (
+      // eslint-disable-line operator-linebreak
+      // @ts-ignore
+      globalThis.Buffer.from(c)
+    ));
+    return asU8A(globalThis.Buffer.concat(chunks, length4));
+  }
+) : (
+  // eslint-disable-line operator-linebreak
+  /**
+   * @param {Uint8Array[]} chunks
+   * @param {number} length
+   * @returns {Uint8Array}
+   */
+  (chunks, length4) => {
+    const out = new Uint8Array(length4);
+    let off = 0;
+    for (let b of chunks) {
+      if (off + b.length > out.length) {
+        b = b.subarray(0, out.length - off);
+      }
+      out.set(b, off);
+      off += b.length;
+    }
+    return out;
+  }
+);
+var alloc = useBuffer ? (
+  // eslint-disable-line operator-linebreak
+  /**
+   * @param {number} size
+   * @returns {Uint8Array}
+   */
+  (size) => {
+    return globalThis.Buffer.allocUnsafe(size);
+  }
+) : (
+  // eslint-disable-line operator-linebreak
+  /**
+   * @param {number} size
+   * @returns {Uint8Array}
+   */
+  (size) => {
+    return new Uint8Array(size);
+  }
+);
+function compare(b1, b2) {
+  if (isBuffer2(b1) && isBuffer2(b2)) {
+    return b1.compare(b2);
+  }
+  for (let i = 0; i < b1.length; i++) {
+    if (b1[i] === b2[i]) {
+      continue;
+    }
+    return b1[i] < b2[i] ? -1 : 1;
+  }
+  return 0;
+}
+function utf8ToBytes(str) {
+  const out = [];
+  let p = 0;
+  for (let i = 0; i < str.length; i++) {
+    let c = str.charCodeAt(i);
+    if (c < 128) {
+      out[p++] = c;
+    } else if (c < 2048) {
+      out[p++] = c >> 6 | 192;
+      out[p++] = c & 63 | 128;
+    } else if ((c & 64512) === 55296 && i + 1 < str.length && (str.charCodeAt(i + 1) & 64512) === 56320) {
+      c = 65536 + ((c & 1023) << 10) + (str.charCodeAt(++i) & 1023);
+      out[p++] = c >> 18 | 240;
+      out[p++] = c >> 12 & 63 | 128;
+      out[p++] = c >> 6 & 63 | 128;
+      out[p++] = c & 63 | 128;
+    } else {
+      out[p++] = c >> 12 | 224;
+      out[p++] = c >> 6 & 63 | 128;
+      out[p++] = c & 63 | 128;
+    }
+  }
+  return out;
+}
+function utf8Slice(buf2, offset, end) {
+  const res = [];
+  while (offset < end) {
+    const firstByte = buf2[offset];
+    let codePoint = null;
+    let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
+    if (offset + bytesPerSequence <= end) {
+      let secondByte, thirdByte, fourthByte, tempCodePoint;
+      switch (bytesPerSequence) {
+        case 1:
+          if (firstByte < 128) {
+            codePoint = firstByte;
+          }
+          break;
+        case 2:
+          secondByte = buf2[offset + 1];
+          if ((secondByte & 192) === 128) {
+            tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
+            if (tempCodePoint > 127) {
+              codePoint = tempCodePoint;
+            }
+          }
+          break;
+        case 3:
+          secondByte = buf2[offset + 1];
+          thirdByte = buf2[offset + 2];
+          if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
+            tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
+            if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
+              codePoint = tempCodePoint;
+            }
+          }
+          break;
+        case 4:
+          secondByte = buf2[offset + 1];
+          thirdByte = buf2[offset + 2];
+          fourthByte = buf2[offset + 3];
+          if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
+            tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
+            if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
+              codePoint = tempCodePoint;
+            }
+          }
+      }
+    }
+    if (codePoint === null) {
+      codePoint = 65533;
+      bytesPerSequence = 1;
+    } else if (codePoint > 65535) {
+      codePoint -= 65536;
+      res.push(codePoint >>> 10 & 1023 | 55296);
+      codePoint = 56320 | codePoint & 1023;
+    }
+    res.push(codePoint);
+    offset += bytesPerSequence;
+  }
+  return decodeCodePointsArray(res);
+}
+var MAX_ARGUMENTS_LENGTH = 4096;
+function decodeCodePointsArray(codePoints) {
+  const len = codePoints.length;
+  if (len <= MAX_ARGUMENTS_LENGTH) {
+    return String.fromCharCode.apply(String, codePoints);
+  }
+  let res = "";
+  let i = 0;
+  while (i < len) {
+    res += String.fromCharCode.apply(
+      String,
+      codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)
+    );
+  }
+  return res;
+}
+
+// node_modules/cborg/lib/bl.js
+var defaultChunkSize = 256;
+var Bl = class {
+  /**
+   * @param {number} [chunkSize]
+   */
+  constructor(chunkSize = defaultChunkSize) {
+    this.chunkSize = chunkSize;
+    this.cursor = 0;
+    this.maxCursor = -1;
+    this.chunks = [];
+    this._initReuseChunk = null;
+  }
+  reset() {
+    this.cursor = 0;
+    this.maxCursor = -1;
+    if (this.chunks.length) {
+      this.chunks = [];
+    }
+    if (this._initReuseChunk !== null) {
+      this.chunks.push(this._initReuseChunk);
+      this.maxCursor = this._initReuseChunk.length - 1;
+    }
+  }
+  /**
+   * @param {Uint8Array|number[]} bytes
+   */
+  push(bytes) {
+    let topChunk = this.chunks[this.chunks.length - 1];
+    const newMax = this.cursor + bytes.length;
+    if (newMax <= this.maxCursor + 1) {
+      const chunkPos = topChunk.length - (this.maxCursor - this.cursor) - 1;
+      topChunk.set(bytes, chunkPos);
+    } else {
+      if (topChunk) {
+        const chunkPos = topChunk.length - (this.maxCursor - this.cursor) - 1;
+        if (chunkPos < topChunk.length) {
+          this.chunks[this.chunks.length - 1] = topChunk.subarray(0, chunkPos);
+          this.maxCursor = this.cursor - 1;
+        }
+      }
+      if (bytes.length < 64 && bytes.length < this.chunkSize) {
+        topChunk = alloc(this.chunkSize);
+        this.chunks.push(topChunk);
+        this.maxCursor += topChunk.length;
+        if (this._initReuseChunk === null) {
+          this._initReuseChunk = topChunk;
+        }
+        topChunk.set(bytes, 0);
+      } else {
+        this.chunks.push(bytes);
+        this.maxCursor += bytes.length;
+      }
+    }
+    this.cursor += bytes.length;
+  }
+  /**
+   * @param {boolean} [reset]
+   * @returns {Uint8Array}
+   */
+  toBytes(reset = false) {
+    let byts;
+    if (this.chunks.length === 1) {
+      const chunk = this.chunks[0];
+      if (reset && this.cursor > chunk.length / 2) {
+        byts = this.cursor === chunk.length ? chunk : chunk.subarray(0, this.cursor);
+        this._initReuseChunk = null;
+        this.chunks = [];
+      } else {
+        byts = slice(chunk, 0, this.cursor);
+      }
+    } else {
+      byts = concat(this.chunks, this.cursor);
+    }
+    if (reset) {
+      this.reset();
+    }
+    return byts;
+  }
+};
+
+// node_modules/cborg/lib/common.js
+var decodeErrPrefix = "CBOR decode error:";
+var encodeErrPrefix = "CBOR encode error:";
+var uintMinorPrefixBytes = [];
+uintMinorPrefixBytes[23] = 1;
+uintMinorPrefixBytes[24] = 2;
+uintMinorPrefixBytes[25] = 3;
+uintMinorPrefixBytes[26] = 5;
+uintMinorPrefixBytes[27] = 9;
+function assertEnoughData(data, pos, need) {
+  if (data.length - pos < need) {
+    throw new Error(`${decodeErrPrefix} not enough data for type`);
+  }
+}
+
+// node_modules/cborg/lib/0uint.js
+var uintBoundaries = [24, 256, 65536, 4294967296, BigInt("18446744073709551616")];
+function readUint8(data, offset, options) {
+  assertEnoughData(data, offset, 1);
+  const value = data[offset];
+  if (options.strict === true && value < uintBoundaries[0]) {
+    throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`);
+  }
+  return value;
+}
+function readUint16(data, offset, options) {
+  assertEnoughData(data, offset, 2);
+  const value = data[offset] << 8 | data[offset + 1];
+  if (options.strict === true && value < uintBoundaries[1]) {
+    throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`);
+  }
+  return value;
+}
+function readUint32(data, offset, options) {
+  assertEnoughData(data, offset, 4);
+  const value = data[offset] * 16777216 + (data[offset + 1] << 16) + (data[offset + 2] << 8) + data[offset + 3];
+  if (options.strict === true && value < uintBoundaries[2]) {
+    throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`);
+  }
+  return value;
+}
+function readUint64(data, offset, options) {
+  assertEnoughData(data, offset, 8);
+  const hi = data[offset] * 16777216 + (data[offset + 1] << 16) + (data[offset + 2] << 8) + data[offset + 3];
+  const lo = data[offset + 4] * 16777216 + (data[offset + 5] << 16) + (data[offset + 6] << 8) + data[offset + 7];
+  const value = (BigInt(hi) << BigInt(32)) + BigInt(lo);
+  if (options.strict === true && value < uintBoundaries[3]) {
+    throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`);
+  }
+  if (value <= Number.MAX_SAFE_INTEGER) {
+    return Number(value);
+  }
+  if (options.allowBigInt === true) {
+    return value;
+  }
+  throw new Error(`${decodeErrPrefix} integers outside of the safe integer range are not supported`);
+}
+function decodeUint8(data, pos, _minor, options) {
+  return new Token(Type.uint, readUint8(data, pos + 1, options), 2);
+}
+function decodeUint16(data, pos, _minor, options) {
+  return new Token(Type.uint, readUint16(data, pos + 1, options), 3);
+}
+function decodeUint32(data, pos, _minor, options) {
+  return new Token(Type.uint, readUint32(data, pos + 1, options), 5);
+}
+function decodeUint64(data, pos, _minor, options) {
+  return new Token(Type.uint, readUint64(data, pos + 1, options), 9);
+}
+function encodeUint(buf2, token) {
+  return encodeUintValue(buf2, 0, token.value);
+}
+function encodeUintValue(buf2, major, uint) {
+  if (uint < uintBoundaries[0]) {
+    const nuint = Number(uint);
+    buf2.push([major | nuint]);
+  } else if (uint < uintBoundaries[1]) {
+    const nuint = Number(uint);
+    buf2.push([major | 24, nuint]);
+  } else if (uint < uintBoundaries[2]) {
+    const nuint = Number(uint);
+    buf2.push([major | 25, nuint >>> 8, nuint & 255]);
+  } else if (uint < uintBoundaries[3]) {
+    const nuint = Number(uint);
+    buf2.push([major | 26, nuint >>> 24 & 255, nuint >>> 16 & 255, nuint >>> 8 & 255, nuint & 255]);
+  } else {
+    const buint = BigInt(uint);
+    if (buint < uintBoundaries[4]) {
+      const set = [major | 27, 0, 0, 0, 0, 0, 0, 0];
+      let lo = Number(buint & BigInt(4294967295));
+      let hi = Number(buint >> BigInt(32) & BigInt(4294967295));
+      set[8] = lo & 255;
+      lo = lo >> 8;
+      set[7] = lo & 255;
+      lo = lo >> 8;
+      set[6] = lo & 255;
+      lo = lo >> 8;
+      set[5] = lo & 255;
+      set[4] = hi & 255;
+      hi = hi >> 8;
+      set[3] = hi & 255;
+      hi = hi >> 8;
+      set[2] = hi & 255;
+      hi = hi >> 8;
+      set[1] = hi & 255;
+      buf2.push(set);
+    } else {
+      throw new Error(`${decodeErrPrefix} encountered BigInt larger than allowable range`);
+    }
+  }
+}
+encodeUint.encodedSize = function encodedSize(token) {
+  return encodeUintValue.encodedSize(token.value);
+};
+encodeUintValue.encodedSize = function encodedSize2(uint) {
+  if (uint < uintBoundaries[0]) {
+    return 1;
+  }
+  if (uint < uintBoundaries[1]) {
+    return 2;
+  }
+  if (uint < uintBoundaries[2]) {
+    return 3;
+  }
+  if (uint < uintBoundaries[3]) {
+    return 5;
+  }
+  return 9;
+};
+encodeUint.compareTokens = function compareTokens(tok1, tok2) {
+  return tok1.value < tok2.value ? -1 : tok1.value > tok2.value ? 1 : (
+    /* c8 ignore next */
+    0
+  );
+};
+
+// node_modules/cborg/lib/1negint.js
+function decodeNegint8(data, pos, _minor, options) {
+  return new Token(Type.negint, -1 - readUint8(data, pos + 1, options), 2);
+}
+function decodeNegint16(data, pos, _minor, options) {
+  return new Token(Type.negint, -1 - readUint16(data, pos + 1, options), 3);
+}
+function decodeNegint32(data, pos, _minor, options) {
+  return new Token(Type.negint, -1 - readUint32(data, pos + 1, options), 5);
+}
+var neg1b = BigInt(-1);
+var pos1b = BigInt(1);
+function decodeNegint64(data, pos, _minor, options) {
+  const int = readUint64(data, pos + 1, options);
+  if (typeof int !== "bigint") {
+    const value = -1 - int;
+    if (value >= Number.MIN_SAFE_INTEGER) {
+      return new Token(Type.negint, value, 9);
+    }
+  }
+  if (options.allowBigInt !== true) {
+    throw new Error(`${decodeErrPrefix} integers outside of the safe integer range are not supported`);
+  }
+  return new Token(Type.negint, neg1b - BigInt(int), 9);
+}
+function encodeNegint(buf2, token) {
+  const negint = token.value;
+  const unsigned = typeof negint === "bigint" ? negint * neg1b - pos1b : negint * -1 - 1;
+  encodeUintValue(buf2, token.type.majorEncoded, unsigned);
+}
+encodeNegint.encodedSize = function encodedSize3(token) {
+  const negint = token.value;
+  const unsigned = typeof negint === "bigint" ? negint * neg1b - pos1b : negint * -1 - 1;
+  if (unsigned < uintBoundaries[0]) {
+    return 1;
+  }
+  if (unsigned < uintBoundaries[1]) {
+    return 2;
+  }
+  if (unsigned < uintBoundaries[2]) {
+    return 3;
+  }
+  if (unsigned < uintBoundaries[3]) {
+    return 5;
+  }
+  return 9;
+};
+encodeNegint.compareTokens = function compareTokens2(tok1, tok2) {
+  return tok1.value < tok2.value ? 1 : tok1.value > tok2.value ? -1 : (
+    /* c8 ignore next */
+    0
+  );
+};
+
+// node_modules/cborg/lib/2bytes.js
+function toToken(data, pos, prefix, length4) {
+  assertEnoughData(data, pos, prefix + length4);
+  const buf2 = slice(data, pos + prefix, pos + prefix + length4);
+  return new Token(Type.bytes, buf2, prefix + length4);
+}
+function decodeBytesCompact(data, pos, minor, _options) {
+  return toToken(data, pos, 1, minor);
+}
+function decodeBytes8(data, pos, _minor, options) {
+  return toToken(data, pos, 2, readUint8(data, pos + 1, options));
+}
+function decodeBytes16(data, pos, _minor, options) {
+  return toToken(data, pos, 3, readUint16(data, pos + 1, options));
+}
+function decodeBytes32(data, pos, _minor, options) {
+  return toToken(data, pos, 5, readUint32(data, pos + 1, options));
+}
+function decodeBytes64(data, pos, _minor, options) {
+  const l = readUint64(data, pos + 1, options);
+  if (typeof l === "bigint") {
+    throw new Error(`${decodeErrPrefix} 64-bit integer bytes lengths not supported`);
+  }
+  return toToken(data, pos, 9, l);
+}
+function tokenBytes(token) {
+  if (token.encodedBytes === void 0) {
+    token.encodedBytes = token.type === Type.string ? fromString(token.value) : token.value;
+  }
+  return token.encodedBytes;
+}
+function encodeBytes(buf2, token) {
+  const bytes = tokenBytes(token);
+  encodeUintValue(buf2, token.type.majorEncoded, bytes.length);
+  buf2.push(bytes);
+}
+encodeBytes.encodedSize = function encodedSize4(token) {
+  const bytes = tokenBytes(token);
+  return encodeUintValue.encodedSize(bytes.length) + bytes.length;
+};
+encodeBytes.compareTokens = function compareTokens3(tok1, tok2) {
+  return compareBytes(tokenBytes(tok1), tokenBytes(tok2));
+};
+function compareBytes(b1, b2) {
+  return b1.length < b2.length ? -1 : b1.length > b2.length ? 1 : compare(b1, b2);
+}
+
+// node_modules/cborg/lib/3string.js
+function toToken2(data, pos, prefix, length4, options) {
+  const totLength = prefix + length4;
+  assertEnoughData(data, pos, totLength);
+  const tok = new Token(Type.string, toString(data, pos + prefix, pos + totLength), totLength);
+  if (options.retainStringBytes === true) {
+    tok.byteValue = slice(data, pos + prefix, pos + totLength);
+  }
+  return tok;
+}
+function decodeStringCompact(data, pos, minor, options) {
+  return toToken2(data, pos, 1, minor, options);
+}
+function decodeString8(data, pos, _minor, options) {
+  return toToken2(data, pos, 2, readUint8(data, pos + 1, options), options);
+}
+function decodeString16(data, pos, _minor, options) {
+  return toToken2(data, pos, 3, readUint16(data, pos + 1, options), options);
+}
+function decodeString32(data, pos, _minor, options) {
+  return toToken2(data, pos, 5, readUint32(data, pos + 1, options), options);
+}
+function decodeString64(data, pos, _minor, options) {
+  const l = readUint64(data, pos + 1, options);
+  if (typeof l === "bigint") {
+    throw new Error(`${decodeErrPrefix} 64-bit integer string lengths not supported`);
+  }
+  return toToken2(data, pos, 9, l, options);
+}
+var encodeString = encodeBytes;
+
+// node_modules/cborg/lib/4array.js
+function toToken3(_data, _pos, prefix, length4) {
+  return new Token(Type.array, length4, prefix);
+}
+function decodeArrayCompact(data, pos, minor, _options) {
+  return toToken3(data, pos, 1, minor);
+}
+function decodeArray8(data, pos, _minor, options) {
+  return toToken3(data, pos, 2, readUint8(data, pos + 1, options));
+}
+function decodeArray16(data, pos, _minor, options) {
+  return toToken3(data, pos, 3, readUint16(data, pos + 1, options));
+}
+function decodeArray32(data, pos, _minor, options) {
+  return toToken3(data, pos, 5, readUint32(data, pos + 1, options));
+}
+function decodeArray64(data, pos, _minor, options) {
+  const l = readUint64(data, pos + 1, options);
+  if (typeof l === "bigint") {
+    throw new Error(`${decodeErrPrefix} 64-bit integer array lengths not supported`);
+  }
+  return toToken3(data, pos, 9, l);
+}
+function decodeArrayIndefinite(data, pos, _minor, options) {
+  if (options.allowIndefinite === false) {
+    throw new Error(`${decodeErrPrefix} indefinite length items not allowed`);
+  }
+  return toToken3(data, pos, 1, Infinity);
+}
+function encodeArray(buf2, token) {
+  encodeUintValue(buf2, Type.array.majorEncoded, token.value);
+}
+encodeArray.compareTokens = encodeUint.compareTokens;
+encodeArray.encodedSize = function encodedSize5(token) {
+  return encodeUintValue.encodedSize(token.value);
+};
+
+// node_modules/cborg/lib/5map.js
+function toToken4(_data, _pos, prefix, length4) {
+  return new Token(Type.map, length4, prefix);
+}
+function decodeMapCompact(data, pos, minor, _options) {
+  return toToken4(data, pos, 1, minor);
+}
+function decodeMap8(data, pos, _minor, options) {
+  return toToken4(data, pos, 2, readUint8(data, pos + 1, options));
+}
+function decodeMap16(data, pos, _minor, options) {
+  return toToken4(data, pos, 3, readUint16(data, pos + 1, options));
+}
+function decodeMap32(data, pos, _minor, options) {
+  return toToken4(data, pos, 5, readUint32(data, pos + 1, options));
+}
+function decodeMap64(data, pos, _minor, options) {
+  const l = readUint64(data, pos + 1, options);
+  if (typeof l === "bigint") {
+    throw new Error(`${decodeErrPrefix} 64-bit integer map lengths not supported`);
+  }
+  return toToken4(data, pos, 9, l);
+}
+function decodeMapIndefinite(data, pos, _minor, options) {
+  if (options.allowIndefinite === false) {
+    throw new Error(`${decodeErrPrefix} indefinite length items not allowed`);
+  }
+  return toToken4(data, pos, 1, Infinity);
+}
+function encodeMap(buf2, token) {
+  encodeUintValue(buf2, Type.map.majorEncoded, token.value);
+}
+encodeMap.compareTokens = encodeUint.compareTokens;
+encodeMap.encodedSize = function encodedSize6(token) {
+  return encodeUintValue.encodedSize(token.value);
+};
+
+// node_modules/cborg/lib/6tag.js
+function decodeTagCompact(_data, _pos, minor, _options) {
+  return new Token(Type.tag, minor, 1);
+}
+function decodeTag8(data, pos, _minor, options) {
+  return new Token(Type.tag, readUint8(data, pos + 1, options), 2);
+}
+function decodeTag16(data, pos, _minor, options) {
+  return new Token(Type.tag, readUint16(data, pos + 1, options), 3);
+}
+function decodeTag32(data, pos, _minor, options) {
+  return new Token(Type.tag, readUint32(data, pos + 1, options), 5);
+}
+function decodeTag64(data, pos, _minor, options) {
+  return new Token(Type.tag, readUint64(data, pos + 1, options), 9);
+}
+function encodeTag(buf2, token) {
+  encodeUintValue(buf2, Type.tag.majorEncoded, token.value);
+}
+encodeTag.compareTokens = encodeUint.compareTokens;
+encodeTag.encodedSize = function encodedSize7(token) {
+  return encodeUintValue.encodedSize(token.value);
+};
+
+// node_modules/cborg/lib/7float.js
+var MINOR_FALSE = 20;
+var MINOR_TRUE = 21;
+var MINOR_NULL = 22;
+var MINOR_UNDEFINED = 23;
+function decodeUndefined(_data, _pos, _minor, options) {
+  if (options.allowUndefined === false) {
+    throw new Error(`${decodeErrPrefix} undefined values are not supported`);
+  } else if (options.coerceUndefinedToNull === true) {
+    return new Token(Type.null, null, 1);
+  }
+  return new Token(Type.undefined, void 0, 1);
+}
+function decodeBreak(_data, _pos, _minor, options) {
+  if (options.allowIndefinite === false) {
+    throw new Error(`${decodeErrPrefix} indefinite length items not allowed`);
+  }
+  return new Token(Type.break, void 0, 1);
+}
+function createToken(value, bytes, options) {
+  if (options) {
+    if (options.allowNaN === false && Number.isNaN(value)) {
+      throw new Error(`${decodeErrPrefix} NaN values are not supported`);
+    }
+    if (options.allowInfinity === false && (value === Infinity || value === -Infinity)) {
+      throw new Error(`${decodeErrPrefix} Infinity values are not supported`);
+    }
+  }
+  return new Token(Type.float, value, bytes);
+}
+function decodeFloat16(data, pos, _minor, options) {
+  return createToken(readFloat16(data, pos + 1), 3, options);
+}
+function decodeFloat32(data, pos, _minor, options) {
+  return createToken(readFloat32(data, pos + 1), 5, options);
+}
+function decodeFloat64(data, pos, _minor, options) {
+  return createToken(readFloat64(data, pos + 1), 9, options);
+}
+function encodeFloat(buf2, token, options) {
+  const float = token.value;
+  if (float === false) {
+    buf2.push([Type.float.majorEncoded | MINOR_FALSE]);
+  } else if (float === true) {
+    buf2.push([Type.float.majorEncoded | MINOR_TRUE]);
+  } else if (float === null) {
+    buf2.push([Type.float.majorEncoded | MINOR_NULL]);
+  } else if (float === void 0) {
+    buf2.push([Type.float.majorEncoded | MINOR_UNDEFINED]);
+  } else {
+    let decoded;
+    let success = false;
+    if (!options || options.float64 !== true) {
+      encodeFloat16(float);
+      decoded = readFloat16(ui8a, 1);
+      if (float === decoded || Number.isNaN(float)) {
+        ui8a[0] = 249;
+        buf2.push(ui8a.slice(0, 3));
+        success = true;
+      } else {
+        encodeFloat32(float);
+        decoded = readFloat32(ui8a, 1);
+        if (float === decoded) {
+          ui8a[0] = 250;
+          buf2.push(ui8a.slice(0, 5));
+          success = true;
+        }
+      }
+    }
+    if (!success) {
+      encodeFloat64(float);
+      decoded = readFloat64(ui8a, 1);
+      ui8a[0] = 251;
+      buf2.push(ui8a.slice(0, 9));
+    }
+  }
+}
+encodeFloat.encodedSize = function encodedSize8(token, options) {
+  const float = token.value;
+  if (float === false || float === true || float === null || float === void 0) {
+    return 1;
+  }
+  if (!options || options.float64 !== true) {
+    encodeFloat16(float);
+    let decoded = readFloat16(ui8a, 1);
+    if (float === decoded || Number.isNaN(float)) {
+      return 3;
+    }
+    encodeFloat32(float);
+    decoded = readFloat32(ui8a, 1);
+    if (float === decoded) {
+      return 5;
+    }
+  }
+  return 9;
+};
+var buffer = new ArrayBuffer(9);
+var dataView = new DataView(buffer, 1);
+var ui8a = new Uint8Array(buffer, 0);
+function encodeFloat16(inp) {
+  if (inp === Infinity) {
+    dataView.setUint16(0, 31744, false);
+  } else if (inp === -Infinity) {
+    dataView.setUint16(0, 64512, false);
+  } else if (Number.isNaN(inp)) {
+    dataView.setUint16(0, 32256, false);
+  } else {
+    dataView.setFloat32(0, inp);
+    const valu32 = dataView.getUint32(0);
+    const exponent = (valu32 & 2139095040) >> 23;
+    const mantissa = valu32 & 8388607;
+    if (exponent === 255) {
+      dataView.setUint16(0, 31744, false);
+    } else if (exponent === 0) {
+      dataView.setUint16(0, (inp & 2147483648) >> 16 | mantissa >> 13, false);
+    } else {
+      const logicalExponent = exponent - 127;
+      if (logicalExponent < -24) {
+        dataView.setUint16(0, 0);
+      } else if (logicalExponent < -14) {
+        dataView.setUint16(0, (valu32 & 2147483648) >> 16 | /* sign bit */
+        1 << 24 + logicalExponent, false);
+      } else {
+        dataView.setUint16(0, (valu32 & 2147483648) >> 16 | logicalExponent + 15 << 10 | mantissa >> 13, false);
+      }
+    }
+  }
+}
+function readFloat16(ui8a2, pos) {
+  if (ui8a2.length - pos < 2) {
+    throw new Error(`${decodeErrPrefix} not enough data for float16`);
+  }
+  const half = (ui8a2[pos] << 8) + ui8a2[pos + 1];
+  if (half === 31744) {
+    return Infinity;
+  }
+  if (half === 64512) {
+    return -Infinity;
+  }
+  if (half === 32256) {
+    return NaN;
+  }
+  const exp = half >> 10 & 31;
+  const mant = half & 1023;
+  let val;
+  if (exp === 0) {
+    val = mant * 2 ** -24;
+  } else if (exp !== 31) {
+    val = (mant + 1024) * 2 ** (exp - 25);
+  } else {
+    val = mant === 0 ? Infinity : NaN;
+  }
+  return half & 32768 ? -val : val;
+}
+function encodeFloat32(inp) {
+  dataView.setFloat32(0, inp, false);
+}
+function readFloat32(ui8a2, pos) {
+  if (ui8a2.length - pos < 4) {
+    throw new Error(`${decodeErrPrefix} not enough data for float32`);
+  }
+  const offset = (ui8a2.byteOffset || 0) + pos;
+  return new DataView(ui8a2.buffer, offset, 4).getFloat32(0, false);
+}
+function encodeFloat64(inp) {
+  dataView.setFloat64(0, inp, false);
+}
+function readFloat64(ui8a2, pos) {
+  if (ui8a2.length - pos < 8) {
+    throw new Error(`${decodeErrPrefix} not enough data for float64`);
+  }
+  const offset = (ui8a2.byteOffset || 0) + pos;
+  return new DataView(ui8a2.buffer, offset, 8).getFloat64(0, false);
+}
+encodeFloat.compareTokens = encodeUint.compareTokens;
+
+// node_modules/cborg/lib/jump.js
+function invalidMinor(data, pos, minor) {
+  throw new Error(`${decodeErrPrefix} encountered invalid minor (${minor}) for major ${data[pos] >>> 5}`);
+}
+function errorer(msg) {
+  return () => {
+    throw new Error(`${decodeErrPrefix} ${msg}`);
+  };
+}
+var jump = [];
+for (let i = 0; i <= 23; i++) {
+  jump[i] = invalidMinor;
+}
+jump[24] = decodeUint8;
+jump[25] = decodeUint16;
+jump[26] = decodeUint32;
+jump[27] = decodeUint64;
+jump[28] = invalidMinor;
+jump[29] = invalidMinor;
+jump[30] = invalidMinor;
+jump[31] = invalidMinor;
+for (let i = 32; i <= 55; i++) {
+  jump[i] = invalidMinor;
+}
+jump[56] = decodeNegint8;
+jump[57] = decodeNegint16;
+jump[58] = decodeNegint32;
+jump[59] = decodeNegint64;
+jump[60] = invalidMinor;
+jump[61] = invalidMinor;
+jump[62] = invalidMinor;
+jump[63] = invalidMinor;
+for (let i = 64; i <= 87; i++) {
+  jump[i] = decodeBytesCompact;
+}
+jump[88] = decodeBytes8;
+jump[89] = decodeBytes16;
+jump[90] = decodeBytes32;
+jump[91] = decodeBytes64;
+jump[92] = invalidMinor;
+jump[93] = invalidMinor;
+jump[94] = invalidMinor;
+jump[95] = errorer("indefinite length bytes/strings are not supported");
+for (let i = 96; i <= 119; i++) {
+  jump[i] = decodeStringCompact;
+}
+jump[120] = decodeString8;
+jump[121] = decodeString16;
+jump[122] = decodeString32;
+jump[123] = decodeString64;
+jump[124] = invalidMinor;
+jump[125] = invalidMinor;
+jump[126] = invalidMinor;
+jump[127] = errorer("indefinite length bytes/strings are not supported");
+for (let i = 128; i <= 151; i++) {
+  jump[i] = decodeArrayCompact;
+}
+jump[152] = decodeArray8;
+jump[153] = decodeArray16;
+jump[154] = decodeArray32;
+jump[155] = decodeArray64;
+jump[156] = invalidMinor;
+jump[157] = invalidMinor;
+jump[158] = invalidMinor;
+jump[159] = decodeArrayIndefinite;
+for (let i = 160; i <= 183; i++) {
+  jump[i] = decodeMapCompact;
+}
+jump[184] = decodeMap8;
+jump[185] = decodeMap16;
+jump[186] = decodeMap32;
+jump[187] = decodeMap64;
+jump[188] = invalidMinor;
+jump[189] = invalidMinor;
+jump[190] = invalidMinor;
+jump[191] = decodeMapIndefinite;
+for (let i = 192; i <= 215; i++) {
+  jump[i] = decodeTagCompact;
+}
+jump[216] = decodeTag8;
+jump[217] = decodeTag16;
+jump[218] = decodeTag32;
+jump[219] = decodeTag64;
+jump[220] = invalidMinor;
+jump[221] = invalidMinor;
+jump[222] = invalidMinor;
+jump[223] = invalidMinor;
+for (let i = 224; i <= 243; i++) {
+  jump[i] = errorer("simple values are not supported");
+}
+jump[244] = invalidMinor;
+jump[245] = invalidMinor;
+jump[246] = invalidMinor;
+jump[247] = decodeUndefined;
+jump[248] = errorer("simple values are not supported");
+jump[249] = decodeFloat16;
+jump[250] = decodeFloat32;
+jump[251] = decodeFloat64;
+jump[252] = invalidMinor;
+jump[253] = invalidMinor;
+jump[254] = invalidMinor;
+jump[255] = decodeBreak;
+var quick = [];
+for (let i = 0; i < 24; i++) {
+  quick[i] = new Token(Type.uint, i, 1);
+}
+for (let i = -1; i >= -24; i--) {
+  quick[31 - i] = new Token(Type.negint, i, 1);
+}
+quick[64] = new Token(Type.bytes, new Uint8Array(0), 1);
+quick[96] = new Token(Type.string, "", 1);
+quick[128] = new Token(Type.array, 0, 1);
+quick[160] = new Token(Type.map, 0, 1);
+quick[244] = new Token(Type.false, false, 1);
+quick[245] = new Token(Type.true, true, 1);
+quick[246] = new Token(Type.null, null, 1);
+function quickEncodeToken(token) {
+  switch (token.type) {
+    case Type.false:
+      return fromArray([244]);
+    case Type.true:
+      return fromArray([245]);
+    case Type.null:
+      return fromArray([246]);
+    case Type.bytes:
+      if (!token.value.length) {
+        return fromArray([64]);
+      }
+      return;
+    case Type.string:
+      if (token.value === "") {
+        return fromArray([96]);
+      }
+      return;
+    case Type.array:
+      if (token.value === 0) {
+        return fromArray([128]);
+      }
+      return;
+    case Type.map:
+      if (token.value === 0) {
+        return fromArray([160]);
+      }
+      return;
+    case Type.uint:
+      if (token.value < 24) {
+        return fromArray([Number(token.value)]);
+      }
+      return;
+    case Type.negint:
+      if (token.value >= -24) {
+        return fromArray([31 - Number(token.value)]);
+      }
+  }
+}
+
+// node_modules/cborg/lib/encode.js
+var defaultEncodeOptions = {
+  float64: false,
+  mapSorter,
+  quickEncodeToken
+};
+function makeCborEncoders() {
+  const encoders = [];
+  encoders[Type.uint.major] = encodeUint;
+  encoders[Type.negint.major] = encodeNegint;
+  encoders[Type.bytes.major] = encodeBytes;
+  encoders[Type.string.major] = encodeString;
+  encoders[Type.array.major] = encodeArray;
+  encoders[Type.map.major] = encodeMap;
+  encoders[Type.tag.major] = encodeTag;
+  encoders[Type.float.major] = encodeFloat;
+  return encoders;
+}
+var cborEncoders = makeCborEncoders();
+var buf = new Bl();
+var Ref = class _Ref {
+  /**
+   * @param {object|any[]} obj
+   * @param {Reference|undefined} parent
+   */
+  constructor(obj, parent) {
+    this.obj = obj;
+    this.parent = parent;
+  }
+  /**
+   * @param {object|any[]} obj
+   * @returns {boolean}
+   */
+  includes(obj) {
+    let p = this;
+    do {
+      if (p.obj === obj) {
+        return true;
+      }
+    } while (p = p.parent);
+    return false;
+  }
+  /**
+   * @param {Reference|undefined} stack
+   * @param {object|any[]} obj
+   * @returns {Reference}
+   */
+  static createCheck(stack, obj) {
+    if (stack && stack.includes(obj)) {
+      throw new Error(`${encodeErrPrefix} object contains circular references`);
+    }
+    return new _Ref(obj, stack);
+  }
+};
+var simpleTokens = {
+  null: new Token(Type.null, null),
+  undefined: new Token(Type.undefined, void 0),
+  true: new Token(Type.true, true),
+  false: new Token(Type.false, false),
+  emptyArray: new Token(Type.array, 0),
+  emptyMap: new Token(Type.map, 0)
+};
+var typeEncoders = {
+  /**
+   * @param {any} obj
+   * @param {string} _typ
+   * @param {EncodeOptions} _options
+   * @param {Reference} [_refStack]
+   * @returns {TokenOrNestedTokens}
+   */
+  number(obj, _typ, _options, _refStack) {
+    if (!Number.isInteger(obj) || !Number.isSafeInteger(obj)) {
+      return new Token(Type.float, obj);
+    } else if (obj >= 0) {
+      return new Token(Type.uint, obj);
+    } else {
+      return new Token(Type.negint, obj);
+    }
+  },
+  /**
+   * @param {any} obj
+   * @param {string} _typ
+   * @param {EncodeOptions} _options
+   * @param {Reference} [_refStack]
+   * @returns {TokenOrNestedTokens}
+   */
+  bigint(obj, _typ, _options, _refStack) {
+    if (obj >= BigInt(0)) {
+      return new Token(Type.uint, obj);
+    } else {
+      return new Token(Type.negint, obj);
+    }
+  },
+  /**
+   * @param {any} obj
+   * @param {string} _typ
+   * @param {EncodeOptions} _options
+   * @param {Reference} [_refStack]
+   * @returns {TokenOrNestedTokens}
+   */
+  Uint8Array(obj, _typ, _options, _refStack) {
+    return new Token(Type.bytes, obj);
+  },
+  /**
+   * @param {any} obj
+   * @param {string} _typ
+   * @param {EncodeOptions} _options
+   * @param {Reference} [_refStack]
+   * @returns {TokenOrNestedTokens}
+   */
+  string(obj, _typ, _options, _refStack) {
+    return new Token(Type.string, obj);
+  },
+  /**
+   * @param {any} obj
+   * @param {string} _typ
+   * @param {EncodeOptions} _options
+   * @param {Reference} [_refStack]
+   * @returns {TokenOrNestedTokens}
+   */
+  boolean(obj, _typ, _options, _refStack) {
+    return obj ? simpleTokens.true : simpleTokens.false;
+  },
+  /**
+   * @param {any} _obj
+   * @param {string} _typ
+   * @param {EncodeOptions} _options
+   * @param {Reference} [_refStack]
+   * @returns {TokenOrNestedTokens}
+   */
+  null(_obj, _typ, _options, _refStack) {
+    return simpleTokens.null;
+  },
+  /**
+   * @param {any} _obj
+   * @param {string} _typ
+   * @param {EncodeOptions} _options
+   * @param {Reference} [_refStack]
+   * @returns {TokenOrNestedTokens}
+   */
+  undefined(_obj, _typ, _options, _refStack) {
+    return simpleTokens.undefined;
+  },
+  /**
+   * @param {any} obj
+   * @param {string} _typ
+   * @param {EncodeOptions} _options
+   * @param {Reference} [_refStack]
+   * @returns {TokenOrNestedTokens}
+   */
+  ArrayBuffer(obj, _typ, _options, _refStack) {
+    return new Token(Type.bytes, new Uint8Array(obj));
+  },
+  /**
+   * @param {any} obj
+   * @param {string} _typ
+   * @param {EncodeOptions} _options
+   * @param {Reference} [_refStack]
+   * @returns {TokenOrNestedTokens}
+   */
+  DataView(obj, _typ, _options, _refStack) {
+    return new Token(Type.bytes, new Uint8Array(obj.buffer, obj.byteOffset, obj.byteLength));
+  },
+  /**
+   * @param {any} obj
+   * @param {string} _typ
+   * @param {EncodeOptions} options
+   * @param {Reference} [refStack]
+   * @returns {TokenOrNestedTokens}
+   */
+  Array(obj, _typ, options, refStack) {
+    if (!obj.length) {
+      if (options.addBreakTokens === true) {
+        return [simpleTokens.emptyArray, new Token(Type.break)];
+      }
+      return simpleTokens.emptyArray;
+    }
+    refStack = Ref.createCheck(refStack, obj);
+    const entries = [];
+    let i = 0;
+    for (const e of obj) {
+      entries[i++] = objectToTokens(e, options, refStack);
+    }
+    if (options.addBreakTokens) {
+      return [new Token(Type.array, obj.length), entries, new Token(Type.break)];
+    }
+    return [new Token(Type.array, obj.length), entries];
+  },
+  /**
+   * @param {any} obj
+   * @param {string} typ
+   * @param {EncodeOptions} options
+   * @param {Reference} [refStack]
+   * @returns {TokenOrNestedTokens}
+   */
+  Object(obj, typ, options, refStack) {
+    const isMap = typ !== "Object";
+    const keys = isMap ? obj.keys() : Object.keys(obj);
+    const length4 = isMap ? obj.size : keys.length;
+    if (!length4) {
+      if (options.addBreakTokens === true) {
+        return [simpleTokens.emptyMap, new Token(Type.break)];
+      }
+      return simpleTokens.emptyMap;
+    }
+    refStack = Ref.createCheck(refStack, obj);
+    const entries = [];
+    let i = 0;
+    for (const key of keys) {
+      entries[i++] = [
+        objectToTokens(key, options, refStack),
+        objectToTokens(isMap ? obj.get(key) : obj[key], options, refStack)
+      ];
+    }
+    sortMapEntries(entries, options);
+    if (options.addBreakTokens) {
+      return [new Token(Type.map, length4), entries, new Token(Type.break)];
+    }
+    return [new Token(Type.map, length4), entries];
+  }
+};
+typeEncoders.Map = typeEncoders.Object;
+typeEncoders.Buffer = typeEncoders.Uint8Array;
+for (const typ of "Uint8Clamped Uint16 Uint32 Int8 Int16 Int32 BigUint64 BigInt64 Float32 Float64".split(" ")) {
+  typeEncoders[`${typ}Array`] = typeEncoders.DataView;
+}
+function objectToTokens(obj, options = {}, refStack) {
+  const typ = is(obj);
+  const customTypeEncoder = options && options.typeEncoders && /** @type {OptionalTypeEncoder} */
+  options.typeEncoders[typ] || typeEncoders[typ];
+  if (typeof customTypeEncoder === "function") {
+    const tokens = customTypeEncoder(obj, typ, options, refStack);
+    if (tokens != null) {
+      return tokens;
+    }
+  }
+  const typeEncoder = typeEncoders[typ];
+  if (!typeEncoder) {
+    throw new Error(`${encodeErrPrefix} unsupported type: ${typ}`);
+  }
+  return typeEncoder(obj, typ, options, refStack);
+}
+function sortMapEntries(entries, options) {
+  if (options.mapSorter) {
+    entries.sort(options.mapSorter);
+  }
+}
+function mapSorter(e1, e2) {
+  const keyToken1 = Array.isArray(e1[0]) ? e1[0][0] : e1[0];
+  const keyToken2 = Array.isArray(e2[0]) ? e2[0][0] : e2[0];
+  if (keyToken1.type !== keyToken2.type) {
+    return keyToken1.type.compare(keyToken2.type);
+  }
+  const major = keyToken1.type.major;
+  const tcmp = cborEncoders[major].compareTokens(keyToken1, keyToken2);
+  if (tcmp === 0) {
+    console.warn("WARNING: complex key types used, CBOR key sorting guarantees are gone");
+  }
+  return tcmp;
+}
+function tokensToEncoded(buf2, tokens, encoders, options) {
+  if (Array.isArray(tokens)) {
+    for (const token of tokens) {
+      tokensToEncoded(buf2, token, encoders, options);
+    }
+  } else {
+    encoders[tokens.type.major](buf2, tokens, options);
+  }
+}
+function encodeCustom(data, encoders, options) {
+  const tokens = objectToTokens(data, options);
+  if (!Array.isArray(tokens) && options.quickEncodeToken) {
+    const quickBytes = options.quickEncodeToken(tokens);
+    if (quickBytes) {
+      return quickBytes;
+    }
+    const encoder = encoders[tokens.type.major];
+    if (encoder.encodedSize) {
+      const size = encoder.encodedSize(tokens, options);
+      const buf2 = new Bl(size);
+      encoder(buf2, tokens, options);
+      if (buf2.chunks.length !== 1) {
+        throw new Error(`Unexpected error: pre-calculated length for ${tokens} was wrong`);
+      }
+      return asU8A(buf2.chunks[0]);
+    }
+  }
+  buf.reset();
+  tokensToEncoded(buf, tokens, encoders, options);
+  return buf.toBytes(true);
+}
+function encode(data, options) {
+  options = Object.assign({}, defaultEncodeOptions, options);
+  return encodeCustom(data, cborEncoders, options);
+}
+
+// node_modules/cborg/lib/decode.js
+var defaultDecodeOptions = {
+  strict: false,
+  allowIndefinite: true,
+  allowUndefined: true,
+  allowBigInt: true
+};
+var Tokeniser = class {
+  /**
+   * @param {Uint8Array} data
+   * @param {DecodeOptions} options
+   */
+  constructor(data, options = {}) {
+    this._pos = 0;
+    this.data = data;
+    this.options = options;
+  }
+  pos() {
+    return this._pos;
+  }
+  done() {
+    return this._pos >= this.data.length;
+  }
+  next() {
+    const byt = this.data[this._pos];
+    let token = quick[byt];
+    if (token === void 0) {
+      const decoder = jump[byt];
+      if (!decoder) {
+        throw new Error(`${decodeErrPrefix} no decoder for major type ${byt >>> 5} (byte 0x${byt.toString(16).padStart(2, "0")})`);
+      }
+      const minor = byt & 31;
+      token = decoder(this.data, this._pos, minor, this.options);
+    }
+    this._pos += token.encodedLength;
+    return token;
+  }
+};
+var DONE = Symbol.for("DONE");
+var BREAK = Symbol.for("BREAK");
+function tokenToArray(token, tokeniser, options) {
+  const arr = [];
+  for (let i = 0; i < token.value; i++) {
+    const value = tokensToObject(tokeniser, options);
+    if (value === BREAK) {
+      if (token.value === Infinity) {
+        break;
+      }
+      throw new Error(`${decodeErrPrefix} got unexpected break to lengthed array`);
+    }
+    if (value === DONE) {
+      throw new Error(`${decodeErrPrefix} found array but not enough entries (got ${i}, expected ${token.value})`);
+    }
+    arr[i] = value;
+  }
+  return arr;
+}
+function tokenToMap(token, tokeniser, options) {
+  const useMaps = options.useMaps === true;
+  const obj = useMaps ? void 0 : {};
+  const m = useMaps ? /* @__PURE__ */ new Map() : void 0;
+  for (let i = 0; i < token.value; i++) {
+    const key = tokensToObject(tokeniser, options);
+    if (key === BREAK) {
+      if (token.value === Infinity) {
+        break;
+      }
+      throw new Error(`${decodeErrPrefix} got unexpected break to lengthed map`);
+    }
+    if (key === DONE) {
+      throw new Error(`${decodeErrPrefix} found map but not enough entries (got ${i} [no key], expected ${token.value})`);
+    }
+    if (useMaps !== true && typeof key !== "string") {
+      throw new Error(`${decodeErrPrefix} non-string keys not supported (got ${typeof key})`);
+    }
+    if (options.rejectDuplicateMapKeys === true) {
+      if (useMaps && m.has(key) || !useMaps && key in obj) {
+        throw new Error(`${decodeErrPrefix} found repeat map key "${key}"`);
+      }
+    }
+    const value = tokensToObject(tokeniser, options);
+    if (value === DONE) {
+      throw new Error(`${decodeErrPrefix} found map but not enough entries (got ${i} [no value], expected ${token.value})`);
+    }
+    if (useMaps) {
+      m.set(key, value);
+    } else {
+      obj[key] = value;
+    }
+  }
+  return useMaps ? m : obj;
+}
+function tokensToObject(tokeniser, options) {
+  if (tokeniser.done()) {
+    return DONE;
+  }
+  const token = tokeniser.next();
+  if (token.type === Type.break) {
+    return BREAK;
+  }
+  if (token.type.terminal) {
+    return token.value;
+  }
+  if (token.type === Type.array) {
+    return tokenToArray(token, tokeniser, options);
+  }
+  if (token.type === Type.map) {
+    return tokenToMap(token, tokeniser, options);
+  }
+  if (token.type === Type.tag) {
+    if (options.tags && typeof options.tags[token.value] === "function") {
+      const tagged = tokensToObject(tokeniser, options);
+      return options.tags[token.value](tagged);
+    }
+    throw new Error(`${decodeErrPrefix} tag not supported (${token.value})`);
+  }
+  throw new Error("unsupported");
+}
+function decodeFirst(data, options) {
+  if (!(data instanceof Uint8Array)) {
+    throw new Error(`${decodeErrPrefix} data to decode must be a Uint8Array`);
+  }
+  options = Object.assign({}, defaultDecodeOptions, options);
+  const tokeniser = options.tokenizer || new Tokeniser(data, options);
+  const decoded = tokensToObject(tokeniser, options);
+  if (decoded === DONE) {
+    throw new Error(`${decodeErrPrefix} did not find any content to decode`);
+  }
+  if (decoded === BREAK) {
+    throw new Error(`${decodeErrPrefix} got unexpected break`);
+  }
+  return [decoded, data.subarray(tokeniser.pos())];
+}
+function decode(data, options) {
+  const [decoded, remainder] = decodeFirst(data, options);
+  if (remainder.length > 0) {
+    throw new Error(`${decodeErrPrefix} too many terminals, data makes no sense`);
+  }
+  return decoded;
+}
+
+// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bytes.js
+var empty = new Uint8Array(0);
+function equals(aa, bb) {
+  if (aa === bb)
+    return true;
+  if (aa.byteLength !== bb.byteLength) {
+    return false;
+  }
+  for (let ii = 0; ii < aa.byteLength; ii++) {
+    if (aa[ii] !== bb[ii]) {
+      return false;
+    }
+  }
+  return true;
+}
+function coerce(o) {
+  if (o instanceof Uint8Array && o.constructor.name === "Uint8Array")
+    return o;
+  if (o instanceof ArrayBuffer)
+    return new Uint8Array(o);
+  if (ArrayBuffer.isView(o)) {
+    return new Uint8Array(o.buffer, o.byteOffset, o.byteLength);
+  }
+  throw new Error("Unknown type, must be binary type");
+}
+
+// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/vendor/base-x.js
+function base(ALPHABET, name4) {
+  if (ALPHABET.length >= 255) {
+    throw new TypeError("Alphabet too long");
+  }
+  var BASE_MAP = new Uint8Array(256);
+  for (var j = 0; j < BASE_MAP.length; j++) {
+    BASE_MAP[j] = 255;
+  }
+  for (var i = 0; i < ALPHABET.length; i++) {
+    var x = ALPHABET.charAt(i);
+    var xc = x.charCodeAt(0);
+    if (BASE_MAP[xc] !== 255) {
+      throw new TypeError(x + " is ambiguous");
+    }
+    BASE_MAP[xc] = i;
+  }
+  var BASE = ALPHABET.length;
+  var LEADER = ALPHABET.charAt(0);
+  var FACTOR = Math.log(BASE) / Math.log(256);
+  var iFACTOR = Math.log(256) / Math.log(BASE);
+  function encode12(source) {
+    if (source instanceof Uint8Array)
+      ;
+    else if (ArrayBuffer.isView(source)) {
+      source = new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
+    } else if (Array.isArray(source)) {
+      source = Uint8Array.from(source);
+    }
+    if (!(source instanceof Uint8Array)) {
+      throw new TypeError("Expected Uint8Array");
+    }
+    if (source.length === 0) {
+      return "";
+    }
+    var zeroes = 0;
+    var length4 = 0;
+    var pbegin = 0;
+    var pend = source.length;
+    while (pbegin !== pend && source[pbegin] === 0) {
+      pbegin++;
+      zeroes++;
+    }
+    var size = (pend - pbegin) * iFACTOR + 1 >>> 0;
+    var b58 = new Uint8Array(size);
+    while (pbegin !== pend) {
+      var carry = source[pbegin];
+      var i2 = 0;
+      for (var it1 = size - 1; (carry !== 0 || i2 < length4) && it1 !== -1; it1--, i2++) {
+        carry += 256 * b58[it1] >>> 0;
+        b58[it1] = carry % BASE >>> 0;
+        carry = carry / BASE >>> 0;
+      }
+      if (carry !== 0) {
+        throw new Error("Non-zero carry");
+      }
+      length4 = i2;
+      pbegin++;
+    }
+    var it2 = size - length4;
+    while (it2 !== size && b58[it2] === 0) {
+      it2++;
+    }
+    var str = LEADER.repeat(zeroes);
+    for (; it2 < size; ++it2) {
+      str += ALPHABET.charAt(b58[it2]);
+    }
+    return str;
+  }
+  function decodeUnsafe(source) {
+    if (typeof source !== "string") {
+      throw new TypeError("Expected String");
+    }
+    if (source.length === 0) {
+      return new Uint8Array();
+    }
+    var psz = 0;
+    if (source[psz] === " ") {
+      return;
+    }
+    var zeroes = 0;
+    var length4 = 0;
+    while (source[psz] === LEADER) {
+      zeroes++;
+      psz++;
+    }
+    var size = (source.length - psz) * FACTOR + 1 >>> 0;
+    var b256 = new Uint8Array(size);
+    while (source[psz]) {
+      var carry = BASE_MAP[source.charCodeAt(psz)];
+      if (carry === 255) {
+        return;
+      }
+      var i2 = 0;
+      for (var it3 = size - 1; (carry !== 0 || i2 < length4) && it3 !== -1; it3--, i2++) {
+        carry += BASE * b256[it3] >>> 0;
+        b256[it3] = carry % 256 >>> 0;
+        carry = carry / 256 >>> 0;
+      }
+      if (carry !== 0) {
+        throw new Error("Non-zero carry");
+      }
+      length4 = i2;
+      psz++;
+    }
+    if (source[psz] === " ") {
+      return;
+    }
+    var it4 = size - length4;
+    while (it4 !== size && b256[it4] === 0) {
+      it4++;
+    }
+    var vch = new Uint8Array(zeroes + (size - it4));
+    var j2 = zeroes;
+    while (it4 !== size) {
+      vch[j2++] = b256[it4++];
+    }
+    return vch;
+  }
+  function decode15(string2) {
+    var buffer2 = decodeUnsafe(string2);
+    if (buffer2) {
+      return buffer2;
+    }
+    throw new Error(`Non-${name4} character`);
+  }
+  return {
+    encode: encode12,
+    decodeUnsafe,
+    decode: decode15
+  };
+}
+var src = base;
+var _brrp__multiformats_scope_baseX = src;
+var base_x_default = _brrp__multiformats_scope_baseX;
+
+// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bases/base.js
+var Encoder = class {
+  name;
+  prefix;
+  baseEncode;
+  constructor(name4, prefix, baseEncode) {
+    this.name = name4;
+    this.prefix = prefix;
+    this.baseEncode = baseEncode;
+  }
+  encode(bytes) {
+    if (bytes instanceof Uint8Array) {
+      return `${this.prefix}${this.baseEncode(bytes)}`;
+    } else {
+      throw Error("Unknown type, must be binary type");
+    }
+  }
+};
+var Decoder = class {
+  name;
+  prefix;
+  baseDecode;
+  prefixCodePoint;
+  constructor(name4, prefix, baseDecode) {
+    this.name = name4;
+    this.prefix = prefix;
+    if (prefix.codePointAt(0) === void 0) {
+      throw new Error("Invalid prefix character");
+    }
+    this.prefixCodePoint = prefix.codePointAt(0);
+    this.baseDecode = baseDecode;
+  }
+  decode(text) {
+    if (typeof text === "string") {
+      if (text.codePointAt(0) !== this.prefixCodePoint) {
+        throw Error(`Unable to decode multibase string ${JSON.stringify(text)}, ${this.name} decoder only supports inputs prefixed with ${this.prefix}`);
+      }
+      return this.baseDecode(text.slice(this.prefix.length));
+    } else {
+      throw Error("Can only multibase decode strings");
+    }
+  }
+  or(decoder) {
+    return or(this, decoder);
+  }
+};
+var ComposedDecoder = class {
+  decoders;
+  constructor(decoders) {
+    this.decoders = decoders;
+  }
+  or(decoder) {
+    return or(this, decoder);
+  }
+  decode(input) {
+    const prefix = input[0];
+    const decoder = this.decoders[prefix];
+    if (decoder != null) {
+      return decoder.decode(input);
+    } else {
+      throw RangeError(`Unable to decode multibase string ${JSON.stringify(input)}, only inputs prefixed with ${Object.keys(this.decoders)} are supported`);
+    }
+  }
+};
+function or(left, right) {
+  return new ComposedDecoder({
+    ...left.decoders ?? { [left.prefix]: left },
+    ...right.decoders ?? { [right.prefix]: right }
+  });
+}
+var Codec = class {
+  name;
+  prefix;
+  baseEncode;
+  baseDecode;
+  encoder;
+  decoder;
+  constructor(name4, prefix, baseEncode, baseDecode) {
+    this.name = name4;
+    this.prefix = prefix;
+    this.baseEncode = baseEncode;
+    this.baseDecode = baseDecode;
+    this.encoder = new Encoder(name4, prefix, baseEncode);
+    this.decoder = new Decoder(name4, prefix, baseDecode);
+  }
+  encode(input) {
+    return this.encoder.encode(input);
+  }
+  decode(input) {
+    return this.decoder.decode(input);
+  }
+};
+function from({ name: name4, prefix, encode: encode12, decode: decode15 }) {
+  return new Codec(name4, prefix, encode12, decode15);
+}
+function baseX({ name: name4, prefix, alphabet: alphabet2 }) {
+  const { encode: encode12, decode: decode15 } = base_x_default(alphabet2, name4);
+  return from({
+    prefix,
+    name: name4,
+    encode: encode12,
+    decode: (text) => coerce(decode15(text))
+  });
+}
+function decode2(string2, alphabet2, bitsPerChar, name4) {
+  const codes = {};
+  for (let i = 0; i < alphabet2.length; ++i) {
+    codes[alphabet2[i]] = i;
+  }
+  let end = string2.length;
+  while (string2[end - 1] === "=") {
+    --end;
+  }
+  const out = new Uint8Array(end * bitsPerChar / 8 | 0);
+  let bits = 0;
+  let buffer2 = 0;
+  let written = 0;
+  for (let i = 0; i < end; ++i) {
+    const value = codes[string2[i]];
+    if (value === void 0) {
+      throw new SyntaxError(`Non-${name4} character`);
+    }
+    buffer2 = buffer2 << bitsPerChar | value;
+    bits += bitsPerChar;
+    if (bits >= 8) {
+      bits -= 8;
+      out[written++] = 255 & buffer2 >> bits;
+    }
+  }
+  if (bits >= bitsPerChar || (255 & buffer2 << 8 - bits) !== 0) {
+    throw new SyntaxError("Unexpected end of data");
+  }
+  return out;
+}
+function encode2(data, alphabet2, bitsPerChar) {
+  const pad = alphabet2[alphabet2.length - 1] === "=";
+  const mask = (1 << bitsPerChar) - 1;
+  let out = "";
+  let bits = 0;
+  let buffer2 = 0;
+  for (let i = 0; i < data.length; ++i) {
+    buffer2 = buffer2 << 8 | data[i];
+    bits += 8;
+    while (bits > bitsPerChar) {
+      bits -= bitsPerChar;
+      out += alphabet2[mask & buffer2 >> bits];
+    }
+  }
+  if (bits !== 0) {
+    out += alphabet2[mask & buffer2 << bitsPerChar - bits];
+  }
+  if (pad) {
+    while ((out.length * bitsPerChar & 7) !== 0) {
+      out += "=";
+    }
+  }
+  return out;
+}
+function rfc4648({ name: name4, prefix, bitsPerChar, alphabet: alphabet2 }) {
+  return from({
+    prefix,
+    name: name4,
+    encode(input) {
+      return encode2(input, alphabet2, bitsPerChar);
+    },
+    decode(input) {
+      return decode2(input, alphabet2, bitsPerChar, name4);
+    }
+  });
+}
+
+// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bases/base32.js
+var base32 = rfc4648({
+  prefix: "b",
+  name: "base32",
+  alphabet: "abcdefghijklmnopqrstuvwxyz234567",
+  bitsPerChar: 5
+});
+var base32upper = rfc4648({
+  prefix: "B",
+  name: "base32upper",
+  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567",
+  bitsPerChar: 5
+});
+var base32pad = rfc4648({
+  prefix: "c",
+  name: "base32pad",
+  alphabet: "abcdefghijklmnopqrstuvwxyz234567=",
+  bitsPerChar: 5
+});
+var base32padupper = rfc4648({
+  prefix: "C",
+  name: "base32padupper",
+  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=",
+  bitsPerChar: 5
+});
+var base32hex = rfc4648({
+  prefix: "v",
+  name: "base32hex",
+  alphabet: "0123456789abcdefghijklmnopqrstuv",
+  bitsPerChar: 5
+});
+var base32hexupper = rfc4648({
+  prefix: "V",
+  name: "base32hexupper",
+  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV",
+  bitsPerChar: 5
+});
+var base32hexpad = rfc4648({
+  prefix: "t",
+  name: "base32hexpad",
+  alphabet: "0123456789abcdefghijklmnopqrstuv=",
+  bitsPerChar: 5
+});
+var base32hexpadupper = rfc4648({
+  prefix: "T",
+  name: "base32hexpadupper",
+  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV=",
+  bitsPerChar: 5
+});
+var base32z = rfc4648({
+  prefix: "h",
+  name: "base32z",
+  alphabet: "ybndrfg8ejkmcpqxot1uwisza345h769",
+  bitsPerChar: 5
+});
+
+// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bases/base58.js
+var base58btc = baseX({
+  name: "base58btc",
+  prefix: "z",
+  alphabet: "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
+});
+var base58flickr = baseX({
+  name: "base58flickr",
+  prefix: "Z",
+  alphabet: "123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ"
+});
+
+// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/vendor/varint.js
+var encode_1 = encode3;
+var MSB = 128;
+var REST = 127;
+var MSBALL = ~REST;
+var INT = Math.pow(2, 31);
+function encode3(num, out, offset) {
+  out = out || [];
+  offset = offset || 0;
+  var oldOffset = offset;
+  while (num >= INT) {
+    out[offset++] = num & 255 | MSB;
+    num /= 128;
+  }
+  while (num & MSBALL) {
+    out[offset++] = num & 255 | MSB;
+    num >>>= 7;
+  }
+  out[offset] = num | 0;
+  encode3.bytes = offset - oldOffset + 1;
+  return out;
+}
+var decode3 = read;
+var MSB$1 = 128;
+var REST$1 = 127;
+function read(buf2, offset) {
+  var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf2.length;
+  do {
+    if (counter >= l) {
+      read.bytes = 0;
+      throw new RangeError("Could not decode varint");
+    }
+    b = buf2[counter++];
+    res += shift < 28 ? (b & REST$1) << shift : (b & REST$1) * Math.pow(2, shift);
+    shift += 7;
+  } while (b >= MSB$1);
+  read.bytes = counter - offset;
+  return res;
+}
+var N1 = Math.pow(2, 7);
+var N2 = Math.pow(2, 14);
+var N3 = Math.pow(2, 21);
+var N4 = Math.pow(2, 28);
+var N5 = Math.pow(2, 35);
+var N6 = Math.pow(2, 42);
+var N7 = Math.pow(2, 49);
+var N8 = Math.pow(2, 56);
+var N9 = Math.pow(2, 63);
+var length = function(value) {
+  return value < N1 ? 1 : value < N2 ? 2 : value < N3 ? 3 : value < N4 ? 4 : value < N5 ? 5 : value < N6 ? 6 : value < N7 ? 7 : value < N8 ? 8 : value < N9 ? 9 : 10;
+};
+var varint = {
+  encode: encode_1,
+  decode: decode3,
+  encodingLength: length
+};
+var _brrp_varint = varint;
+var varint_default = _brrp_varint;
+
+// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/varint.js
+function decode4(data, offset = 0) {
+  const code5 = varint_default.decode(data, offset);
+  return [code5, varint_default.decode.bytes];
+}
+function encodeTo(int, target, offset = 0) {
+  varint_default.encode(int, target, offset);
+  return target;
+}
+function encodingLength(int) {
+  return varint_default.encodingLength(int);
+}
+
+// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/hashes/digest.js
+function create(code5, digest2) {
+  const size = digest2.byteLength;
+  const sizeOffset = encodingLength(code5);
+  const digestOffset = sizeOffset + encodingLength(size);
+  const bytes = new Uint8Array(digestOffset + size);
+  encodeTo(code5, bytes, 0);
+  encodeTo(size, bytes, sizeOffset);
+  bytes.set(digest2, digestOffset);
+  return new Digest(code5, size, digest2, bytes);
+}
+function decode5(multihash) {
+  const bytes = coerce(multihash);
+  const [code5, sizeOffset] = decode4(bytes);
+  const [size, digestOffset] = decode4(bytes.subarray(sizeOffset));
+  const digest2 = bytes.subarray(sizeOffset + digestOffset);
+  if (digest2.byteLength !== size) {
+    throw new Error("Incorrect length");
+  }
+  return new Digest(code5, size, digest2, bytes);
+}
+function equals2(a, b) {
+  if (a === b) {
+    return true;
+  } else {
+    const data = b;
+    return a.code === data.code && a.size === data.size && data.bytes instanceof Uint8Array && equals(a.bytes, data.bytes);
+  }
+}
+var Digest = class {
+  code;
+  size;
+  digest;
+  bytes;
+  /**
+   * Creates a multihash digest.
+   */
+  constructor(code5, size, digest2, bytes) {
+    this.code = code5;
+    this.size = size;
+    this.digest = digest2;
+    this.bytes = bytes;
+  }
+};
+
+// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/cid.js
+function format(link, base3) {
+  const { bytes, version } = link;
+  switch (version) {
+    case 0:
+      return toStringV0(bytes, baseCache(link), base3 ?? base58btc.encoder);
+    default:
+      return toStringV1(bytes, baseCache(link), base3 ?? base32.encoder);
+  }
+}
+var cache = /* @__PURE__ */ new WeakMap();
+function baseCache(cid) {
+  const baseCache3 = cache.get(cid);
+  if (baseCache3 == null) {
+    const baseCache4 = /* @__PURE__ */ new Map();
+    cache.set(cid, baseCache4);
+    return baseCache4;
+  }
+  return baseCache3;
+}
+var CID = class _CID {
+  code;
+  version;
+  multihash;
+  bytes;
+  "/";
+  /**
+   * @param version - Version of the CID
+   * @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
+   * @param multihash - (Multi)hash of the of the content.
+   */
+  constructor(version, code5, multihash, bytes) {
+    this.code = code5;
+    this.version = version;
+    this.multihash = multihash;
+    this.bytes = bytes;
+    this["/"] = bytes;
+  }
+  /**
+   * Signalling `cid.asCID === cid` has been replaced with `cid['/'] === cid.bytes`
+   * please either use `CID.asCID(cid)` or switch to new signalling mechanism
+   *
+   * @deprecated
+   */
+  get asCID() {
+    return this;
+  }
+  // ArrayBufferView
+  get byteOffset() {
+    return this.bytes.byteOffset;
+  }
+  // ArrayBufferView
+  get byteLength() {
+    return this.bytes.byteLength;
+  }
+  toV0() {
+    switch (this.version) {
+      case 0: {
+        return this;
+      }
+      case 1: {
+        const { code: code5, multihash } = this;
+        if (code5 !== DAG_PB_CODE) {
+          throw new Error("Cannot convert a non dag-pb CID to CIDv0");
+        }
+        if (multihash.code !== SHA_256_CODE) {
+          throw new Error("Cannot convert non sha2-256 multihash CID to CIDv0");
+        }
+        return _CID.createV0(multihash);
+      }
+      default: {
+        throw Error(`Can not convert CID version ${this.version} to version 0. This is a bug please report`);
+      }
+    }
+  }
+  toV1() {
+    switch (this.version) {
+      case 0: {
+        const { code: code5, digest: digest2 } = this.multihash;
+        const multihash = create(code5, digest2);
+        return _CID.createV1(this.code, multihash);
+      }
+      case 1: {
+        return this;
+      }
+      default: {
+        throw Error(`Can not convert CID version ${this.version} to version 1. This is a bug please report`);
+      }
+    }
+  }
+  equals(other) {
+    return _CID.equals(this, other);
+  }
+  static equals(self, other) {
+    const unknown = other;
+    return unknown != null && self.code === unknown.code && self.version === unknown.version && equals2(self.multihash, unknown.multihash);
+  }
+  toString(base3) {
+    return format(this, base3);
+  }
+  toJSON() {
+    return { "/": format(this) };
+  }
+  link() {
+    return this;
+  }
+  [Symbol.toStringTag] = "CID";
+  // Legacy
+  [Symbol.for("nodejs.util.inspect.custom")]() {
+    return `CID(${this.toString()})`;
+  }
+  /**
+   * Takes any input `value` and returns a `CID` instance if it was
+   * a `CID` otherwise returns `null`. If `value` is instanceof `CID`
+   * it will return value back. If `value` is not instance of this CID
+   * class, but is compatible CID it will return new instance of this
+   * `CID` class. Otherwise returns null.
+   *
+   * This allows two different incompatible versions of CID library to
+   * co-exist and interop as long as binary interface is compatible.
+   */
+  static asCID(input) {
+    if (input == null) {
+      return null;
+    }
+    const value = input;
+    if (value instanceof _CID) {
+      return value;
+    } else if (value["/"] != null && value["/"] === value.bytes || value.asCID === value) {
+      const { version, code: code5, multihash, bytes } = value;
+      return new _CID(version, code5, multihash, bytes ?? encodeCID(version, code5, multihash.bytes));
+    } else if (value[cidSymbol] === true) {
+      const { version, multihash, code: code5 } = value;
+      const digest2 = decode5(multihash);
+      return _CID.create(version, code5, digest2);
+    } else {
+      return null;
+    }
+  }
+  /**
+   * @param version - Version of the CID
+   * @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
+   * @param digest - (Multi)hash of the of the content.
+   */
+  static create(version, code5, digest2) {
+    if (typeof code5 !== "number") {
+      throw new Error("String codecs are no longer supported");
+    }
+    if (!(digest2.bytes instanceof Uint8Array)) {
+      throw new Error("Invalid digest");
+    }
+    switch (version) {
+      case 0: {
+        if (code5 !== DAG_PB_CODE) {
+          throw new Error(`Version 0 CID must use dag-pb (code: ${DAG_PB_CODE}) block encoding`);
+        } else {
+          return new _CID(version, code5, digest2, digest2.bytes);
+        }
+      }
+      case 1: {
+        const bytes = encodeCID(version, code5, digest2.bytes);
+        return new _CID(version, code5, digest2, bytes);
+      }
+      default: {
+        throw new Error("Invalid version");
+      }
+    }
+  }
+  /**
+   * Simplified version of `create` for CIDv0.
+   */
+  static createV0(digest2) {
+    return _CID.create(0, DAG_PB_CODE, digest2);
+  }
+  /**
+   * Simplified version of `create` for CIDv1.
+   *
+   * @param code - Content encoding format code.
+   * @param digest - Multihash of the content.
+   */
+  static createV1(code5, digest2) {
+    return _CID.create(1, code5, digest2);
+  }
+  /**
+   * Decoded a CID from its binary representation. The byte array must contain
+   * only the CID with no additional bytes.
+   *
+   * An error will be thrown if the bytes provided do not contain a valid
+   * binary representation of a CID.
+   */
+  static decode(bytes) {
+    const [cid, remainder] = _CID.decodeFirst(bytes);
+    if (remainder.length !== 0) {
+      throw new Error("Incorrect length");
+    }
+    return cid;
+  }
+  /**
+   * Decoded a CID from its binary representation at the beginning of a byte
+   * array.
+   *
+   * Returns an array with the first element containing the CID and the second
+   * element containing the remainder of the original byte array. The remainder
+   * will be a zero-length byte array if the provided bytes only contained a
+   * binary CID representation.
+   */
+  static decodeFirst(bytes) {
+    const specs = _CID.inspectBytes(bytes);
+    const prefixSize = specs.size - specs.multihashSize;
+    const multihashBytes = coerce(bytes.subarray(prefixSize, prefixSize + specs.multihashSize));
+    if (multihashBytes.byteLength !== specs.multihashSize) {
+      throw new Error("Incorrect length");
+    }
+    const digestBytes = multihashBytes.subarray(specs.multihashSize - specs.digestSize);
+    const digest2 = new Digest(specs.multihashCode, specs.digestSize, digestBytes, multihashBytes);
+    const cid = specs.version === 0 ? _CID.createV0(digest2) : _CID.createV1(specs.codec, digest2);
+    return [cid, bytes.subarray(specs.size)];
+  }
+  /**
+   * Inspect the initial bytes of a CID to determine its properties.
+   *
+   * Involves decoding up to 4 varints. Typically this will require only 4 to 6
+   * bytes but for larger multicodec code values and larger multihash digest
+   * lengths these varints can be quite large. It is recommended that at least
+   * 10 bytes be made available in the `initialBytes` argument for a complete
+   * inspection.
+   */
+  static inspectBytes(initialBytes) {
+    let offset = 0;
+    const next = () => {
+      const [i, length4] = decode4(initialBytes.subarray(offset));
+      offset += length4;
+      return i;
+    };
+    let version = next();
+    let codec = DAG_PB_CODE;
+    if (version === 18) {
+      version = 0;
+      offset = 0;
+    } else {
+      codec = next();
+    }
+    if (version !== 0 && version !== 1) {
+      throw new RangeError(`Invalid CID version ${version}`);
+    }
+    const prefixSize = offset;
+    const multihashCode = next();
+    const digestSize = next();
+    const size = offset + digestSize;
+    const multihashSize = size - prefixSize;
+    return { version, codec, multihashCode, digestSize, multihashSize, size };
+  }
+  /**
+   * Takes cid in a string representation and creates an instance. If `base`
+   * decoder is not provided will use a default from the configuration. It will
+   * throw an error if encoding of the CID is not compatible with supplied (or
+   * a default decoder).
+   */
+  static parse(source, base3) {
+    const [prefix, bytes] = parseCIDtoBytes(source, base3);
+    const cid = _CID.decode(bytes);
+    if (cid.version === 0 && source[0] !== "Q") {
+      throw Error("Version 0 CID string must not include multibase prefix");
+    }
+    baseCache(cid).set(prefix, source);
+    return cid;
+  }
+};
+function parseCIDtoBytes(source, base3) {
+  switch (source[0]) {
+    case "Q": {
+      const decoder = base3 ?? base58btc;
+      return [
+        base58btc.prefix,
+        decoder.decode(`${base58btc.prefix}${source}`)
+      ];
+    }
+    case base58btc.prefix: {
+      const decoder = base3 ?? base58btc;
+      return [base58btc.prefix, decoder.decode(source)];
+    }
+    case base32.prefix: {
+      const decoder = base3 ?? base32;
+      return [base32.prefix, decoder.decode(source)];
+    }
+    default: {
+      if (base3 == null) {
+        throw Error("To parse non base32 or base58btc encoded CID multibase decoder must be provided");
+      }
+      return [source[0], base3.decode(source)];
+    }
+  }
+}
+function toStringV0(bytes, cache3, base3) {
+  const { prefix } = base3;
+  if (prefix !== base58btc.prefix) {
+    throw Error(`Cannot string encode V0 in ${base3.name} encoding`);
+  }
+  const cid = cache3.get(prefix);
+  if (cid == null) {
+    const cid2 = base3.encode(bytes).slice(1);
+    cache3.set(prefix, cid2);
+    return cid2;
+  } else {
+    return cid;
+  }
+}
+function toStringV1(bytes, cache3, base3) {
+  const { prefix } = base3;
+  const cid = cache3.get(prefix);
+  if (cid == null) {
+    const cid2 = base3.encode(bytes);
+    cache3.set(prefix, cid2);
+    return cid2;
+  } else {
+    return cid;
+  }
+}
+var DAG_PB_CODE = 112;
+var SHA_256_CODE = 18;
+function encodeCID(version, code5, multihash) {
+  const codeOffset = encodingLength(version);
+  const hashOffset = codeOffset + encodingLength(code5);
+  const bytes = new Uint8Array(hashOffset + multihash.byteLength);
+  encodeTo(version, bytes, 0);
+  encodeTo(code5, bytes, codeOffset);
+  bytes.set(multihash, hashOffset);
+  return bytes;
+}
+var cidSymbol = Symbol.for("@ipld/js-cid/CID");
+
+// node_modules/@ipld/dag-cbor/src/index.js
+var CID_CBOR_TAG = 42;
+function cidEncoder(obj) {
+  if (obj.asCID !== obj && obj["/"] !== obj.bytes) {
+    return null;
+  }
+  const cid = CID.asCID(obj);
+  if (!cid) {
+    return null;
+  }
+  const bytes = new Uint8Array(cid.bytes.byteLength + 1);
+  bytes.set(cid.bytes, 1);
+  return [
+    new Token(Type.tag, CID_CBOR_TAG),
+    new Token(Type.bytes, bytes)
+  ];
+}
+function undefinedEncoder() {
+  throw new Error("`undefined` is not supported by the IPLD Data Model and cannot be encoded");
+}
+function numberEncoder(num) {
+  if (Number.isNaN(num)) {
+    throw new Error("`NaN` is not supported by the IPLD Data Model and cannot be encoded");
+  }
+  if (num === Infinity || num === -Infinity) {
+    throw new Error("`Infinity` and `-Infinity` is not supported by the IPLD Data Model and cannot be encoded");
+  }
+  return null;
+}
+var _encodeOptions = {
+  float64: true,
+  typeEncoders: {
+    Object: cidEncoder,
+    undefined: undefinedEncoder,
+    number: numberEncoder
+  }
+};
+var encodeOptions = {
+  ..._encodeOptions,
+  typeEncoders: {
+    ..._encodeOptions.typeEncoders
+  }
+};
+function cidDecoder(bytes) {
+  if (bytes[0] !== 0) {
+    throw new Error("Invalid CID for CBOR tag 42; expected leading 0x00");
+  }
+  return CID.decode(bytes.subarray(1));
+}
+var _decodeOptions = {
+  allowIndefinite: false,
+  coerceUndefinedToNull: true,
+  allowNaN: false,
+  allowInfinity: false,
+  allowBigInt: true,
+  // this will lead to BigInt for ints outside of
+  // safe-integer range, which may surprise users
+  strict: true,
+  useMaps: false,
+  rejectDuplicateMapKeys: true,
+  /** @type {import('cborg').TagDecoder[]} */
+  tags: []
+};
+_decodeOptions.tags[CID_CBOR_TAG] = cidDecoder;
+var decodeOptions = {
+  ..._decodeOptions,
+  tags: _decodeOptions.tags.slice()
+};
+var code = 113;
+var encode4 = (node) => encode(node, _encodeOptions);
+var decode6 = (data) => decode(data, _decodeOptions);
+
+// node_modules/multiformats/src/bases/base32.js
+var base32_exports = {};
+__export(base32_exports, {
+  base32: () => base322,
+  base32hex: () => base32hex2,
+  base32hexpad: () => base32hexpad2,
+  base32hexpadupper: () => base32hexpadupper2,
+  base32hexupper: () => base32hexupper2,
+  base32pad: () => base32pad2,
+  base32padupper: () => base32padupper2,
+  base32upper: () => base32upper2,
+  base32z: () => base32z2
+});
+
+// node_modules/multiformats/vendor/base-x.js
+function base2(ALPHABET, name4) {
+  if (ALPHABET.length >= 255) {
+    throw new TypeError("Alphabet too long");
+  }
+  var BASE_MAP = new Uint8Array(256);
+  for (var j = 0; j < BASE_MAP.length; j++) {
+    BASE_MAP[j] = 255;
+  }
+  for (var i = 0; i < ALPHABET.length; i++) {
+    var x = ALPHABET.charAt(i);
+    var xc = x.charCodeAt(0);
+    if (BASE_MAP[xc] !== 255) {
+      throw new TypeError(x + " is ambiguous");
+    }
+    BASE_MAP[xc] = i;
+  }
+  var BASE = ALPHABET.length;
+  var LEADER = ALPHABET.charAt(0);
+  var FACTOR = Math.log(BASE) / Math.log(256);
+  var iFACTOR = Math.log(256) / Math.log(BASE);
+  function encode12(source) {
+    if (source instanceof Uint8Array)
+      ;
+    else if (ArrayBuffer.isView(source)) {
+      source = new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
+    } else if (Array.isArray(source)) {
+      source = Uint8Array.from(source);
+    }
+    if (!(source instanceof Uint8Array)) {
+      throw new TypeError("Expected Uint8Array");
+    }
+    if (source.length === 0) {
+      return "";
+    }
+    var zeroes = 0;
+    var length4 = 0;
+    var pbegin = 0;
+    var pend = source.length;
+    while (pbegin !== pend && source[pbegin] === 0) {
+      pbegin++;
+      zeroes++;
+    }
+    var size = (pend - pbegin) * iFACTOR + 1 >>> 0;
+    var b58 = new Uint8Array(size);
+    while (pbegin !== pend) {
+      var carry = source[pbegin];
+      var i2 = 0;
+      for (var it1 = size - 1; (carry !== 0 || i2 < length4) && it1 !== -1; it1--, i2++) {
+        carry += 256 * b58[it1] >>> 0;
+        b58[it1] = carry % BASE >>> 0;
+        carry = carry / BASE >>> 0;
+      }
+      if (carry !== 0) {
+        throw new Error("Non-zero carry");
+      }
+      length4 = i2;
+      pbegin++;
+    }
+    var it2 = size - length4;
+    while (it2 !== size && b58[it2] === 0) {
+      it2++;
+    }
+    var str = LEADER.repeat(zeroes);
+    for (; it2 < size; ++it2) {
+      str += ALPHABET.charAt(b58[it2]);
+    }
+    return str;
+  }
+  function decodeUnsafe(source) {
+    if (typeof source !== "string") {
+      throw new TypeError("Expected String");
+    }
+    if (source.length === 0) {
+      return new Uint8Array();
+    }
+    var psz = 0;
+    if (source[psz] === " ") {
+      return;
+    }
+    var zeroes = 0;
+    var length4 = 0;
+    while (source[psz] === LEADER) {
+      zeroes++;
+      psz++;
+    }
+    var size = (source.length - psz) * FACTOR + 1 >>> 0;
+    var b256 = new Uint8Array(size);
+    while (source[psz]) {
+      var carry = BASE_MAP[source.charCodeAt(psz)];
+      if (carry === 255) {
+        return;
+      }
+      var i2 = 0;
+      for (var it3 = size - 1; (carry !== 0 || i2 < length4) && it3 !== -1; it3--, i2++) {
+        carry += BASE * b256[it3] >>> 0;
+        b256[it3] = carry % 256 >>> 0;
+        carry = carry / 256 >>> 0;
+      }
+      if (carry !== 0) {
+        throw new Error("Non-zero carry");
+      }
+      length4 = i2;
+      psz++;
+    }
+    if (source[psz] === " ") {
+      return;
+    }
+    var it4 = size - length4;
+    while (it4 !== size && b256[it4] === 0) {
+      it4++;
+    }
+    var vch = new Uint8Array(zeroes + (size - it4));
+    var j2 = zeroes;
+    while (it4 !== size) {
+      vch[j2++] = b256[it4++];
+    }
+    return vch;
+  }
+  function decode15(string2) {
+    var buffer2 = decodeUnsafe(string2);
+    if (buffer2) {
+      return buffer2;
+    }
+    throw new Error(`Non-${name4} character`);
+  }
+  return {
+    encode: encode12,
+    decodeUnsafe,
+    decode: decode15
+  };
+}
+var src2 = base2;
+var _brrp__multiformats_scope_baseX2 = src2;
+var base_x_default2 = _brrp__multiformats_scope_baseX2;
+
+// node_modules/multiformats/src/bytes.js
+var bytes_exports2 = {};
+__export(bytes_exports2, {
+  coerce: () => coerce2,
+  empty: () => empty2,
+  equals: () => equals3,
+  fromHex: () => fromHex,
+  fromString: () => fromString2,
+  isBinary: () => isBinary,
+  toHex: () => toHex,
+  toString: () => toString2
+});
+var empty2 = new Uint8Array(0);
+var toHex = (d) => d.reduce((hex, byte) => hex + byte.toString(16).padStart(2, "0"), "");
+var fromHex = (hex) => {
+  const hexes = hex.match(/../g);
+  return hexes ? new Uint8Array(hexes.map((b) => parseInt(b, 16))) : empty2;
+};
+var equals3 = (aa, bb) => {
+  if (aa === bb)
+    return true;
+  if (aa.byteLength !== bb.byteLength) {
+    return false;
+  }
+  for (let ii = 0; ii < aa.byteLength; ii++) {
+    if (aa[ii] !== bb[ii]) {
+      return false;
+    }
+  }
+  return true;
+};
+var coerce2 = (o) => {
+  if (o instanceof Uint8Array && o.constructor.name === "Uint8Array")
+    return o;
+  if (o instanceof ArrayBuffer)
+    return new Uint8Array(o);
+  if (ArrayBuffer.isView(o)) {
+    return new Uint8Array(o.buffer, o.byteOffset, o.byteLength);
+  }
+  throw new Error("Unknown type, must be binary type");
+};
+var isBinary = (o) => o instanceof ArrayBuffer || ArrayBuffer.isView(o);
+var fromString2 = (str) => new TextEncoder().encode(str);
+var toString2 = (b) => new TextDecoder().decode(b);
+
+// node_modules/multiformats/src/bases/base.js
+var Encoder2 = class {
+  /**
+   * @param {Base} name
+   * @param {Prefix} prefix
+   * @param {(bytes:Uint8Array) => string} baseEncode
+   */
+  constructor(name4, prefix, baseEncode) {
+    this.name = name4;
+    this.prefix = prefix;
+    this.baseEncode = baseEncode;
+  }
+  /**
+   * @param {Uint8Array} bytes
+   * @returns {API.Multibase}
+   */
+  encode(bytes) {
+    if (bytes instanceof Uint8Array) {
+      return `${this.prefix}${this.baseEncode(bytes)}`;
+    } else {
+      throw Error("Unknown type, must be binary type");
+    }
+  }
+};
+var Decoder2 = class {
+  /**
+   * @param {Base} name
+   * @param {Prefix} prefix
+   * @param {(text:string) => Uint8Array} baseDecode
+   */
+  constructor(name4, prefix, baseDecode) {
+    this.name = name4;
+    this.prefix = prefix;
+    if (prefix.codePointAt(0) === void 0) {
+      throw new Error("Invalid prefix character");
+    }
+    this.prefixCodePoint = /** @type {number} */
+    prefix.codePointAt(0);
+    this.baseDecode = baseDecode;
+  }
+  /**
+   * @param {string} text
+   */
+  decode(text) {
+    if (typeof text === "string") {
+      if (text.codePointAt(0) !== this.prefixCodePoint) {
+        throw Error(`Unable to decode multibase string ${JSON.stringify(text)}, ${this.name} decoder only supports inputs prefixed with ${this.prefix}`);
+      }
+      return this.baseDecode(text.slice(this.prefix.length));
+    } else {
+      throw Error("Can only multibase decode strings");
+    }
+  }
+  /**
+   * @template {string} OtherPrefix
+   * @param {API.UnibaseDecoder|ComposedDecoder} decoder
+   * @returns {ComposedDecoder}
+   */
+  or(decoder) {
+    return or2(this, decoder);
+  }
+};
+var ComposedDecoder2 = class {
+  /**
+   * @param {Decoders} decoders
+   */
+  constructor(decoders) {
+    this.decoders = decoders;
+  }
+  /**
+   * @template {string} OtherPrefix
+   * @param {API.UnibaseDecoder|ComposedDecoder} decoder
+   * @returns {ComposedDecoder}
+   */
+  or(decoder) {
+    return or2(this, decoder);
+  }
+  /**
+   * @param {string} input
+   * @returns {Uint8Array}
+   */
+  decode(input) {
+    const prefix = (
+      /** @type {Prefix} */
+      input[0]
+    );
+    const decoder = this.decoders[prefix];
+    if (decoder) {
+      return decoder.decode(input);
+    } else {
+      throw RangeError(`Unable to decode multibase string ${JSON.stringify(input)}, only inputs prefixed with ${Object.keys(this.decoders)} are supported`);
+    }
+  }
+};
+var or2 = (left, right) => new ComposedDecoder2(
+  /** @type {Decoders} */
+  {
+    ...left.decoders || { [
+      /** @type API.UnibaseDecoder */
+      left.prefix
+    ]: left },
+    ...right.decoders || { [
+      /** @type API.UnibaseDecoder */
+      right.prefix
+    ]: right }
+  }
+);
+var Codec2 = class {
+  /**
+   * @param {Base} name
+   * @param {Prefix} prefix
+   * @param {(bytes:Uint8Array) => string} baseEncode
+   * @param {(text:string) => Uint8Array} baseDecode
+   */
+  constructor(name4, prefix, baseEncode, baseDecode) {
+    this.name = name4;
+    this.prefix = prefix;
+    this.baseEncode = baseEncode;
+    this.baseDecode = baseDecode;
+    this.encoder = new Encoder2(name4, prefix, baseEncode);
+    this.decoder = new Decoder2(name4, prefix, baseDecode);
+  }
+  /**
+   * @param {Uint8Array} input
+   */
+  encode(input) {
+    return this.encoder.encode(input);
+  }
+  /**
+   * @param {string} input
+   */
+  decode(input) {
+    return this.decoder.decode(input);
+  }
+};
+var from2 = ({ name: name4, prefix, encode: encode12, decode: decode15 }) => new Codec2(name4, prefix, encode12, decode15);
+var baseX2 = ({ prefix, name: name4, alphabet: alphabet2 }) => {
+  const { encode: encode12, decode: decode15 } = base_x_default2(alphabet2, name4);
+  return from2({
+    prefix,
+    name: name4,
+    encode: encode12,
+    /**
+     * @param {string} text
+     */
+    decode: (text) => coerce2(decode15(text))
+  });
+};
+var decode7 = (string2, alphabet2, bitsPerChar, name4) => {
+  const codes = {};
+  for (let i = 0; i < alphabet2.length; ++i) {
+    codes[alphabet2[i]] = i;
+  }
+  let end = string2.length;
+  while (string2[end - 1] === "=") {
+    --end;
+  }
+  const out = new Uint8Array(end * bitsPerChar / 8 | 0);
+  let bits = 0;
+  let buffer2 = 0;
+  let written = 0;
+  for (let i = 0; i < end; ++i) {
+    const value = codes[string2[i]];
+    if (value === void 0) {
+      throw new SyntaxError(`Non-${name4} character`);
+    }
+    buffer2 = buffer2 << bitsPerChar | value;
+    bits += bitsPerChar;
+    if (bits >= 8) {
+      bits -= 8;
+      out[written++] = 255 & buffer2 >> bits;
+    }
+  }
+  if (bits >= bitsPerChar || 255 & buffer2 << 8 - bits) {
+    throw new SyntaxError("Unexpected end of data");
+  }
+  return out;
+};
+var encode5 = (data, alphabet2, bitsPerChar) => {
+  const pad = alphabet2[alphabet2.length - 1] === "=";
+  const mask = (1 << bitsPerChar) - 1;
+  let out = "";
+  let bits = 0;
+  let buffer2 = 0;
+  for (let i = 0; i < data.length; ++i) {
+    buffer2 = buffer2 << 8 | data[i];
+    bits += 8;
+    while (bits > bitsPerChar) {
+      bits -= bitsPerChar;
+      out += alphabet2[mask & buffer2 >> bits];
+    }
+  }
+  if (bits) {
+    out += alphabet2[mask & buffer2 << bitsPerChar - bits];
+  }
+  if (pad) {
+    while (out.length * bitsPerChar & 7) {
+      out += "=";
+    }
+  }
+  return out;
+};
+var rfc46482 = ({ name: name4, prefix, bitsPerChar, alphabet: alphabet2 }) => {
+  return from2({
+    prefix,
+    name: name4,
+    encode(input) {
+      return encode5(input, alphabet2, bitsPerChar);
+    },
+    decode(input) {
+      return decode7(input, alphabet2, bitsPerChar, name4);
+    }
+  });
+};
+
+// node_modules/multiformats/src/bases/base32.js
+var base322 = rfc46482({
+  prefix: "b",
+  name: "base32",
+  alphabet: "abcdefghijklmnopqrstuvwxyz234567",
+  bitsPerChar: 5
+});
+var base32upper2 = rfc46482({
+  prefix: "B",
+  name: "base32upper",
+  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567",
+  bitsPerChar: 5
+});
+var base32pad2 = rfc46482({
+  prefix: "c",
+  name: "base32pad",
+  alphabet: "abcdefghijklmnopqrstuvwxyz234567=",
+  bitsPerChar: 5
+});
+var base32padupper2 = rfc46482({
+  prefix: "C",
+  name: "base32padupper",
+  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=",
+  bitsPerChar: 5
+});
+var base32hex2 = rfc46482({
+  prefix: "v",
+  name: "base32hex",
+  alphabet: "0123456789abcdefghijklmnopqrstuv",
+  bitsPerChar: 5
+});
+var base32hexupper2 = rfc46482({
+  prefix: "V",
+  name: "base32hexupper",
+  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV",
+  bitsPerChar: 5
+});
+var base32hexpad2 = rfc46482({
+  prefix: "t",
+  name: "base32hexpad",
+  alphabet: "0123456789abcdefghijklmnopqrstuv=",
+  bitsPerChar: 5
+});
+var base32hexpadupper2 = rfc46482({
+  prefix: "T",
+  name: "base32hexpadupper",
+  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV=",
+  bitsPerChar: 5
+});
+var base32z2 = rfc46482({
+  prefix: "h",
+  name: "base32z",
+  alphabet: "ybndrfg8ejkmcpqxot1uwisza345h769",
+  bitsPerChar: 5
+});
+
+// node_modules/multiformats/src/bases/base58.js
+var base58_exports = {};
+__export(base58_exports, {
+  base58btc: () => base58btc2,
+  base58flickr: () => base58flickr2
+});
+var base58btc2 = baseX2({
+  name: "base58btc",
+  prefix: "z",
+  alphabet: "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
+});
+var base58flickr2 = baseX2({
+  name: "base58flickr",
+  prefix: "Z",
+  alphabet: "123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ"
+});
+
+// node_modules/multiformats/vendor/varint.js
+var encode_12 = encode6;
+var MSB2 = 128;
+var REST2 = 127;
+var MSBALL2 = ~REST2;
+var INT2 = Math.pow(2, 31);
+function encode6(num, out, offset) {
+  out = out || [];
+  offset = offset || 0;
+  var oldOffset = offset;
+  while (num >= INT2) {
+    out[offset++] = num & 255 | MSB2;
+    num /= 128;
+  }
+  while (num & MSBALL2) {
+    out[offset++] = num & 255 | MSB2;
+    num >>>= 7;
+  }
+  out[offset] = num | 0;
+  encode6.bytes = offset - oldOffset + 1;
+  return out;
+}
+var decode8 = read2;
+var MSB$12 = 128;
+var REST$12 = 127;
+function read2(buf2, offset) {
+  var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf2.length;
+  do {
+    if (counter >= l) {
+      read2.bytes = 0;
+      throw new RangeError("Could not decode varint");
+    }
+    b = buf2[counter++];
+    res += shift < 28 ? (b & REST$12) << shift : (b & REST$12) * Math.pow(2, shift);
+    shift += 7;
+  } while (b >= MSB$12);
+  read2.bytes = counter - offset;
+  return res;
+}
+var N12 = Math.pow(2, 7);
+var N22 = Math.pow(2, 14);
+var N32 = Math.pow(2, 21);
+var N42 = Math.pow(2, 28);
+var N52 = Math.pow(2, 35);
+var N62 = Math.pow(2, 42);
+var N72 = Math.pow(2, 49);
+var N82 = Math.pow(2, 56);
+var N92 = Math.pow(2, 63);
+var length2 = function(value) {
+  return value < N12 ? 1 : value < N22 ? 2 : value < N32 ? 3 : value < N42 ? 4 : value < N52 ? 5 : value < N62 ? 6 : value < N72 ? 7 : value < N82 ? 8 : value < N92 ? 9 : 10;
+};
+var varint2 = {
+  encode: encode_12,
+  decode: decode8,
+  encodingLength: length2
+};
+var _brrp_varint2 = varint2;
+var varint_default2 = _brrp_varint2;
+
+// node_modules/multiformats/src/varint.js
+var decode9 = (data, offset = 0) => {
+  const code5 = varint_default2.decode(data, offset);
+  return [code5, varint_default2.decode.bytes];
+};
+var encodeTo2 = (int, target, offset = 0) => {
+  varint_default2.encode(int, target, offset);
+  return target;
+};
+var encodingLength2 = (int) => {
+  return varint_default2.encodingLength(int);
+};
+
+// node_modules/multiformats/src/hashes/digest.js
+var create2 = (code5, digest2) => {
+  const size = digest2.byteLength;
+  const sizeOffset = encodingLength2(code5);
+  const digestOffset = sizeOffset + encodingLength2(size);
+  const bytes = new Uint8Array(digestOffset + size);
+  encodeTo2(code5, bytes, 0);
+  encodeTo2(size, bytes, sizeOffset);
+  bytes.set(digest2, digestOffset);
+  return new Digest2(code5, size, digest2, bytes);
+};
+var decode10 = (multihash) => {
+  const bytes = coerce2(multihash);
+  const [code5, sizeOffset] = decode9(bytes);
+  const [size, digestOffset] = decode9(bytes.subarray(sizeOffset));
+  const digest2 = bytes.subarray(sizeOffset + digestOffset);
+  if (digest2.byteLength !== size) {
+    throw new Error("Incorrect length");
+  }
+  return new Digest2(code5, size, digest2, bytes);
+};
+var equals4 = (a, b) => {
+  if (a === b) {
+    return true;
+  } else {
+    const data = (
+      /** @type {{code?:unknown, size?:unknown, bytes?:unknown}} */
+      b
+    );
+    return a.code === data.code && a.size === data.size && data.bytes instanceof Uint8Array && equals3(a.bytes, data.bytes);
+  }
+};
+var Digest2 = class {
+  /**
+   * Creates a multihash digest.
+   *
+   * @param {Code} code
+   * @param {Size} size
+   * @param {Uint8Array} digest
+   * @param {Uint8Array} bytes
+   */
+  constructor(code5, size, digest2, bytes) {
+    this.code = code5;
+    this.size = size;
+    this.digest = digest2;
+    this.bytes = bytes;
+  }
+};
+
+// node_modules/multiformats/src/cid.js
+var format2 = (link, base3) => {
+  const { bytes, version } = link;
+  switch (version) {
+    case 0:
+      return toStringV02(
+        bytes,
+        baseCache2(link),
+        /** @type {API.MultibaseEncoder<"z">} */
+        base3 || base58btc2.encoder
+      );
+    default:
+      return toStringV12(
+        bytes,
+        baseCache2(link),
+        /** @type {API.MultibaseEncoder} */
+        base3 || base322.encoder
+      );
+  }
+};
+var cache2 = /* @__PURE__ */ new WeakMap();
+var baseCache2 = (cid) => {
+  const baseCache3 = cache2.get(cid);
+  if (baseCache3 == null) {
+    const baseCache4 = /* @__PURE__ */ new Map();
+    cache2.set(cid, baseCache4);
+    return baseCache4;
+  }
+  return baseCache3;
+};
+var CID2 = class _CID {
+  /**
+   * @param {Version} version - Version of the CID
+   * @param {Format} code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
+   * @param {API.MultihashDigest} multihash - (Multi)hash of the of the content.
+   * @param {Uint8Array} bytes
+   */
+  constructor(version, code5, multihash, bytes) {
+    this.code = code5;
+    this.version = version;
+    this.multihash = multihash;
+    this.bytes = bytes;
+    this["/"] = bytes;
+  }
+  /**
+   * Signalling `cid.asCID === cid` has been replaced with `cid['/'] === cid.bytes`
+   * please either use `CID.asCID(cid)` or switch to new signalling mechanism
+   *
+   * @deprecated
+   */
+  get asCID() {
+    return this;
+  }
+  // ArrayBufferView
+  get byteOffset() {
+    return this.bytes.byteOffset;
+  }
+  // ArrayBufferView
+  get byteLength() {
+    return this.bytes.byteLength;
+  }
+  /**
+   * @returns {CID}
+   */
+  toV0() {
+    switch (this.version) {
+      case 0: {
+        return (
+          /** @type {CID} */
+          this
+        );
+      }
+      case 1: {
+        const { code: code5, multihash } = this;
+        if (code5 !== DAG_PB_CODE2) {
+          throw new Error("Cannot convert a non dag-pb CID to CIDv0");
+        }
+        if (multihash.code !== SHA_256_CODE2) {
+          throw new Error("Cannot convert non sha2-256 multihash CID to CIDv0");
+        }
+        return (
+          /** @type {CID} */
+          _CID.createV0(
+            /** @type {API.MultihashDigest} */
+            multihash
+          )
+        );
+      }
+      default: {
+        throw Error(
+          `Can not convert CID version ${this.version} to version 0. This is a bug please report`
+        );
+      }
+    }
+  }
+  /**
+   * @returns {CID}
+   */
+  toV1() {
+    switch (this.version) {
+      case 0: {
+        const { code: code5, digest: digest2 } = this.multihash;
+        const multihash = create2(code5, digest2);
+        return (
+          /** @type {CID} */
+          _CID.createV1(this.code, multihash)
+        );
+      }
+      case 1: {
+        return (
+          /** @type {CID} */
+          this
+        );
+      }
+      default: {
+        throw Error(
+          `Can not convert CID version ${this.version} to version 1. This is a bug please report`
+        );
+      }
+    }
+  }
+  /**
+   * @param {unknown} other
+   * @returns {other is CID}
+   */
+  equals(other) {
+    return _CID.equals(this, other);
+  }
+  /**
+   * @template {unknown} Data
+   * @template {number} Format
+   * @template {number} Alg
+   * @template {API.Version} Version
+   * @param {API.Link} self
+   * @param {unknown} other
+   * @returns {other is CID}
+   */
+  static equals(self, other) {
+    const unknown = (
+      /** @type {{code?:unknown, version?:unknown, multihash?:unknown}} */
+      other
+    );
+    return unknown && self.code === unknown.code && self.version === unknown.version && equals4(self.multihash, unknown.multihash);
+  }
+  /**
+   * @param {API.MultibaseEncoder} [base]
+   * @returns {string}
+   */
+  toString(base3) {
+    return format2(this, base3);
+  }
+  /**
+   * @returns {API.LinkJSON}
+   */
+  toJSON() {
+    return { "/": format2(this) };
+  }
+  link() {
+    return this;
+  }
+  get [Symbol.toStringTag]() {
+    return "CID";
+  }
+  // Legacy
+  [Symbol.for("nodejs.util.inspect.custom")]() {
+    return `CID(${this.toString()})`;
+  }
+  /**
+   * Takes any input `value` and returns a `CID` instance if it was
+   * a `CID` otherwise returns `null`. If `value` is instanceof `CID`
+   * it will return value back. If `value` is not instance of this CID
+   * class, but is compatible CID it will return new instance of this
+   * `CID` class. Otherwise returns null.
+   *
+   * This allows two different incompatible versions of CID library to
+   * co-exist and interop as long as binary interface is compatible.
+   *
+   * @template {unknown} Data
+   * @template {number} Format
+   * @template {number} Alg
+   * @template {API.Version} Version
+   * @template {unknown} U
+   * @param {API.Link|U} input
+   * @returns {CID|null}
+   */
+  static asCID(input) {
+    if (input == null) {
+      return null;
+    }
+    const value = (
+      /** @type {any} */
+      input
+    );
+    if (value instanceof _CID) {
+      return value;
+    } else if (value["/"] != null && value["/"] === value.bytes || value.asCID === value) {
+      const { version, code: code5, multihash, bytes } = value;
+      return new _CID(
+        version,
+        code5,
+        /** @type {API.MultihashDigest} */
+        multihash,
+        bytes || encodeCID2(version, code5, multihash.bytes)
+      );
+    } else if (value[cidSymbol2] === true) {
+      const { version, multihash, code: code5 } = value;
+      const digest2 = (
+        /** @type {API.MultihashDigest} */
+        decode10(multihash)
+      );
+      return _CID.create(version, code5, digest2);
+    } else {
+      return null;
+    }
+  }
+  /**
+   *
+   * @template {unknown} Data
+   * @template {number} Format
+   * @template {number} Alg
+   * @template {API.Version} Version
+   * @param {Version} version - Version of the CID
+   * @param {Format} code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
+   * @param {API.MultihashDigest} digest - (Multi)hash of the of the content.
+   * @returns {CID}
+   */
+  static create(version, code5, digest2) {
+    if (typeof code5 !== "number") {
+      throw new Error("String codecs are no longer supported");
+    }
+    if (!(digest2.bytes instanceof Uint8Array)) {
+      throw new Error("Invalid digest");
+    }
+    switch (version) {
+      case 0: {
+        if (code5 !== DAG_PB_CODE2) {
+          throw new Error(
+            `Version 0 CID must use dag-pb (code: ${DAG_PB_CODE2}) block encoding`
+          );
+        } else {
+          return new _CID(version, code5, digest2, digest2.bytes);
+        }
+      }
+      case 1: {
+        const bytes = encodeCID2(version, code5, digest2.bytes);
+        return new _CID(version, code5, digest2, bytes);
+      }
+      default: {
+        throw new Error("Invalid version");
+      }
+    }
+  }
+  /**
+   * Simplified version of `create` for CIDv0.
+   *
+   * @template {unknown} [T=unknown]
+   * @param {API.MultihashDigest} digest - Multihash.
+   * @returns {CID}
+   */
+  static createV0(digest2) {
+    return _CID.create(0, DAG_PB_CODE2, digest2);
+  }
+  /**
+   * Simplified version of `create` for CIDv1.
+   *
+   * @template {unknown} Data
+   * @template {number} Code
+   * @template {number} Alg
+   * @param {Code} code - Content encoding format code.
+   * @param {API.MultihashDigest} digest - Miltihash of the content.
+   * @returns {CID}
+   */
+  static createV1(code5, digest2) {
+    return _CID.create(1, code5, digest2);
+  }
+  /**
+   * Decoded a CID from its binary representation. The byte array must contain
+   * only the CID with no additional bytes.
+   *
+   * An error will be thrown if the bytes provided do not contain a valid
+   * binary representation of a CID.
+   *
+   * @template {unknown} Data
+   * @template {number} Code
+   * @template {number} Alg
+   * @template {API.Version} Ver
+   * @param {API.ByteView>} bytes
+   * @returns {CID}
+   */
+  static decode(bytes) {
+    const [cid, remainder] = _CID.decodeFirst(bytes);
+    if (remainder.length) {
+      throw new Error("Incorrect length");
+    }
+    return cid;
+  }
+  /**
+   * Decoded a CID from its binary representation at the beginning of a byte
+   * array.
+   *
+   * Returns an array with the first element containing the CID and the second
+   * element containing the remainder of the original byte array. The remainder
+   * will be a zero-length byte array if the provided bytes only contained a
+   * binary CID representation.
+   *
+   * @template {unknown} T
+   * @template {number} C
+   * @template {number} A
+   * @template {API.Version} V
+   * @param {API.ByteView>} bytes
+   * @returns {[CID, Uint8Array]}
+   */
+  static decodeFirst(bytes) {
+    const specs = _CID.inspectBytes(bytes);
+    const prefixSize = specs.size - specs.multihashSize;
+    const multihashBytes = coerce2(
+      bytes.subarray(prefixSize, prefixSize + specs.multihashSize)
+    );
+    if (multihashBytes.byteLength !== specs.multihashSize) {
+      throw new Error("Incorrect length");
+    }
+    const digestBytes = multihashBytes.subarray(
+      specs.multihashSize - specs.digestSize
+    );
+    const digest2 = new Digest2(
+      specs.multihashCode,
+      specs.digestSize,
+      digestBytes,
+      multihashBytes
+    );
+    const cid = specs.version === 0 ? _CID.createV0(
+      /** @type {API.MultihashDigest} */
+      digest2
+    ) : _CID.createV1(specs.codec, digest2);
+    return [
+      /** @type {CID} */
+      cid,
+      bytes.subarray(specs.size)
+    ];
+  }
+  /**
+   * Inspect the initial bytes of a CID to determine its properties.
+   *
+   * Involves decoding up to 4 varints. Typically this will require only 4 to 6
+   * bytes but for larger multicodec code values and larger multihash digest
+   * lengths these varints can be quite large. It is recommended that at least
+   * 10 bytes be made available in the `initialBytes` argument for a complete
+   * inspection.
+   *
+   * @template {unknown} T
+   * @template {number} C
+   * @template {number} A
+   * @template {API.Version} V
+   * @param {API.ByteView>} initialBytes
+   * @returns {{ version:V, codec:C, multihashCode:A, digestSize:number, multihashSize:number, size:number }}
+   */
+  static inspectBytes(initialBytes) {
+    let offset = 0;
+    const next = () => {
+      const [i, length4] = decode9(initialBytes.subarray(offset));
+      offset += length4;
+      return i;
+    };
+    let version = (
+      /** @type {V} */
+      next()
+    );
+    let codec = (
+      /** @type {C} */
+      DAG_PB_CODE2
+    );
+    if (
+      /** @type {number} */
+      version === 18
+    ) {
+      version = /** @type {V} */
+      0;
+      offset = 0;
+    } else {
+      codec = /** @type {C} */
+      next();
+    }
+    if (version !== 0 && version !== 1) {
+      throw new RangeError(`Invalid CID version ${version}`);
+    }
+    const prefixSize = offset;
+    const multihashCode = (
+      /** @type {A} */
+      next()
+    );
+    const digestSize = next();
+    const size = offset + digestSize;
+    const multihashSize = size - prefixSize;
+    return { version, codec, multihashCode, digestSize, multihashSize, size };
+  }
+  /**
+   * Takes cid in a string representation and creates an instance. If `base`
+   * decoder is not provided will use a default from the configuration. It will
+   * throw an error if encoding of the CID is not compatible with supplied (or
+   * a default decoder).
+   *
+   * @template {string} Prefix
+   * @template {unknown} Data
+   * @template {number} Code
+   * @template {number} Alg
+   * @template {API.Version} Ver
+   * @param {API.ToString, Prefix>} source
+   * @param {API.MultibaseDecoder} [base]
+   * @returns {CID}
+   */
+  static parse(source, base3) {
+    const [prefix, bytes] = parseCIDtoBytes2(source, base3);
+    const cid = _CID.decode(bytes);
+    if (cid.version === 0 && source[0] !== "Q") {
+      throw Error("Version 0 CID string must not include multibase prefix");
+    }
+    baseCache2(cid).set(prefix, source);
+    return cid;
+  }
+};
+var parseCIDtoBytes2 = (source, base3) => {
+  switch (source[0]) {
+    case "Q": {
+      const decoder = base3 || base58btc2;
+      return [
+        /** @type {Prefix} */
+        base58btc2.prefix,
+        decoder.decode(`${base58btc2.prefix}${source}`)
+      ];
+    }
+    case base58btc2.prefix: {
+      const decoder = base3 || base58btc2;
+      return [
+        /** @type {Prefix} */
+        base58btc2.prefix,
+        decoder.decode(source)
+      ];
+    }
+    case base322.prefix: {
+      const decoder = base3 || base322;
+      return [
+        /** @type {Prefix} */
+        base322.prefix,
+        decoder.decode(source)
+      ];
+    }
+    default: {
+      if (base3 == null) {
+        throw Error(
+          "To parse non base32 or base58btc encoded CID multibase decoder must be provided"
+        );
+      }
+      return [
+        /** @type {Prefix} */
+        source[0],
+        base3.decode(source)
+      ];
+    }
+  }
+};
+var toStringV02 = (bytes, cache3, base3) => {
+  const { prefix } = base3;
+  if (prefix !== base58btc2.prefix) {
+    throw Error(`Cannot string encode V0 in ${base3.name} encoding`);
+  }
+  const cid = cache3.get(prefix);
+  if (cid == null) {
+    const cid2 = base3.encode(bytes).slice(1);
+    cache3.set(prefix, cid2);
+    return cid2;
+  } else {
+    return cid;
+  }
+};
+var toStringV12 = (bytes, cache3, base3) => {
+  const { prefix } = base3;
+  const cid = cache3.get(prefix);
+  if (cid == null) {
+    const cid2 = base3.encode(bytes);
+    cache3.set(prefix, cid2);
+    return cid2;
+  } else {
+    return cid;
+  }
+};
+var DAG_PB_CODE2 = 112;
+var SHA_256_CODE2 = 18;
+var encodeCID2 = (version, code5, multihash) => {
+  const codeOffset = encodingLength2(version);
+  const hashOffset = codeOffset + encodingLength2(code5);
+  const bytes = new Uint8Array(hashOffset + multihash.byteLength);
+  encodeTo2(version, bytes, 0);
+  encodeTo2(code5, bytes, codeOffset);
+  bytes.set(multihash, hashOffset);
+  return bytes;
+};
+var cidSymbol2 = Symbol.for("@ipld/js-cid/CID");
+
+// node_modules/@ipld/car/src/decoder-common.js
+var import_varint3 = __toESM(require_varint(), 1);
+var V2_HEADER_LENGTH = (
+  /* characteristics */
+  16 + 8 + 8 + 8
+);
+function decodeVarint(bytes, seeker) {
+  if (!bytes.length) {
+    throw new Error("Unexpected end of data");
+  }
+  const i = import_varint3.default.decode(bytes);
+  seeker.seek(
+    /** @type {number} */
+    import_varint3.default.decode.bytes
+  );
+  return i;
+}
+function decodeV2Header(bytes) {
+  const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
+  let offset = 0;
+  const header = {
+    version: 2,
+    /** @type {[bigint, bigint]} */
+    characteristics: [
+      dv.getBigUint64(offset, true),
+      dv.getBigUint64(offset += 8, true)
+    ],
+    dataOffset: Number(dv.getBigUint64(offset += 8, true)),
+    dataSize: Number(dv.getBigUint64(offset += 8, true)),
+    indexOffset: Number(dv.getBigUint64(offset += 8, true))
+  };
+  return header;
+}
+
+// node_modules/@ipld/car/src/header-validator.js
+var Kinds = {
+  Null: (
+    /** @returns {undefined|null} */
+    (obj) => obj === null ? obj : void 0
+  ),
+  Int: (
+    /** @returns {undefined|number} */
+    (obj) => Number.isInteger(obj) ? obj : void 0
+  ),
+  Float: (
+    /** @returns {undefined|number} */
+    (obj) => typeof obj === "number" && Number.isFinite(obj) ? obj : void 0
+  ),
+  String: (
+    /** @returns {undefined|string} */
+    (obj) => typeof obj === "string" ? obj : void 0
+  ),
+  Bool: (
+    /** @returns {undefined|boolean} */
+    (obj) => typeof obj === "boolean" ? obj : void 0
+  ),
+  Bytes: (
+    /** @returns {undefined|Uint8Array} */
+    (obj) => obj instanceof Uint8Array ? obj : void 0
+  ),
+  Link: (
+    /** @returns {undefined|object} */
+    (obj) => obj !== null && typeof obj === "object" && obj.asCID === obj ? obj : void 0
+  ),
+  List: (
+    /** @returns {undefined|Array} */
+    (obj) => Array.isArray(obj) ? obj : void 0
+  ),
+  Map: (
+    /** @returns {undefined|object} */
+    (obj) => obj !== null && typeof obj === "object" && obj.asCID !== obj && !Array.isArray(obj) && !(obj instanceof Uint8Array) ? obj : void 0
+  )
+};
+var Types = {
+  "CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)": Kinds.Link,
+  "CarV1HeaderOrV2Pragma > roots (anon)": (
+    /** @returns {undefined|any} */
+    (obj) => {
+      if (Kinds.List(obj) === void 0) {
+        return void 0;
+      }
+      for (let i = 0; i < obj.length; i++) {
+        let v = obj[i];
+        v = Types["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v);
+        if (v === void 0) {
+          return void 0;
+        }
+        if (v !== obj[i]) {
+          const ret = obj.slice(0, i);
+          for (let j = i; j < obj.length; j++) {
+            let v2 = obj[j];
+            v2 = Types["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v2);
+            if (v2 === void 0) {
+              return void 0;
+            }
+            ret.push(v2);
+          }
+          return ret;
+        }
+      }
+      return obj;
+    }
+  ),
+  Int: Kinds.Int,
+  CarV1HeaderOrV2Pragma: (
+    /** @returns {undefined|any} */
+    (obj) => {
+      if (Kinds.Map(obj) === void 0) {
+        return void 0;
+      }
+      const entries = Object.entries(obj);
+      let ret = obj;
+      let requiredCount = 1;
+      for (let i = 0; i < entries.length; i++) {
+        const [key, value] = entries[i];
+        switch (key) {
+          case "roots":
+            {
+              const v = Types["CarV1HeaderOrV2Pragma > roots (anon)"](obj[key]);
+              if (v === void 0) {
+                return void 0;
+              }
+              if (v !== value || ret !== obj) {
+                if (ret === obj) {
+                  ret = {};
+                  for (let j = 0; j < i; j++) {
+                    ret[entries[j][0]] = entries[j][1];
+                  }
+                }
+                ret.roots = v;
+              }
+            }
+            break;
+          case "version":
+            {
+              requiredCount--;
+              const v = Types.Int(obj[key]);
+              if (v === void 0) {
+                return void 0;
+              }
+              if (v !== value || ret !== obj) {
+                if (ret === obj) {
+                  ret = {};
+                  for (let j = 0; j < i; j++) {
+                    ret[entries[j][0]] = entries[j][1];
+                  }
+                }
+                ret.version = v;
+              }
+            }
+            break;
+          default:
+            return void 0;
+        }
+      }
+      if (requiredCount > 0) {
+        return void 0;
+      }
+      return ret;
+    }
+  )
+};
+var Reprs = {
+  "CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)": Kinds.Link,
+  "CarV1HeaderOrV2Pragma > roots (anon)": (
+    /** @returns {undefined|any} */
+    (obj) => {
+      if (Kinds.List(obj) === void 0) {
+        return void 0;
+      }
+      for (let i = 0; i < obj.length; i++) {
+        let v = obj[i];
+        v = Reprs["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v);
+        if (v === void 0) {
+          return void 0;
+        }
+        if (v !== obj[i]) {
+          const ret = obj.slice(0, i);
+          for (let j = i; j < obj.length; j++) {
+            let v2 = obj[j];
+            v2 = Reprs["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v2);
+            if (v2 === void 0) {
+              return void 0;
+            }
+            ret.push(v2);
+          }
+          return ret;
+        }
+      }
+      return obj;
+    }
+  ),
+  Int: Kinds.Int,
+  CarV1HeaderOrV2Pragma: (
+    /** @returns {undefined|any} */
+    (obj) => {
+      if (Kinds.Map(obj) === void 0) {
+        return void 0;
+      }
+      const entries = Object.entries(obj);
+      let ret = obj;
+      let requiredCount = 1;
+      for (let i = 0; i < entries.length; i++) {
+        const [key, value] = entries[i];
+        switch (key) {
+          case "roots":
+            {
+              const v = Reprs["CarV1HeaderOrV2Pragma > roots (anon)"](value);
+              if (v === void 0) {
+                return void 0;
+              }
+              if (v !== value || ret !== obj) {
+                if (ret === obj) {
+                  ret = {};
+                  for (let j = 0; j < i; j++) {
+                    ret[entries[j][0]] = entries[j][1];
+                  }
+                }
+                ret.roots = v;
+              }
+            }
+            break;
+          case "version":
+            {
+              requiredCount--;
+              const v = Reprs.Int(value);
+              if (v === void 0) {
+                return void 0;
+              }
+              if (v !== value || ret !== obj) {
+                if (ret === obj) {
+                  ret = {};
+                  for (let j = 0; j < i; j++) {
+                    ret[entries[j][0]] = entries[j][1];
+                  }
+                }
+                ret.version = v;
+              }
+            }
+            break;
+          default:
+            return void 0;
+        }
+      }
+      if (requiredCount > 0) {
+        return void 0;
+      }
+      return ret;
+    }
+  )
+};
+var CarV1HeaderOrV2Pragma = {
+  toTyped: Types.CarV1HeaderOrV2Pragma,
+  toRepresentation: Reprs.CarV1HeaderOrV2Pragma
+};
+
+// node_modules/@ipld/car/src/buffer-reader.js
+var fsread = fs.readSync;
+
+// node_modules/cborg/lib/length.js
+var cborEncoders2 = makeCborEncoders();
+
+// node_modules/@ipld/car/src/buffer-writer.js
+var import_varint4 = __toESM(require_varint(), 1);
+var headerPreludeTokens = [
+  new Token(Type.map, 2),
+  new Token(Type.string, "version"),
+  new Token(Type.uint, 1),
+  new Token(Type.string, "roots")
+];
+var CID_TAG = new Token(Type.tag, 42);
+
+// node_modules/@ipld/car/src/decoder.js
+async function readHeader(reader, strictVersion) {
+  const length4 = decodeVarint(await reader.upTo(8), reader);
+  if (length4 === 0) {
+    throw new Error("Invalid CAR header (zero length)");
+  }
+  const header = await reader.exactly(length4, true);
+  const block = decode6(header);
+  if (CarV1HeaderOrV2Pragma.toTyped(block) === void 0) {
+    throw new Error("Invalid CAR header format");
+  }
+  if (block.version !== 1 && block.version !== 2 || strictVersion !== void 0 && block.version !== strictVersion) {
+    throw new Error(`Invalid CAR version: ${block.version}${strictVersion !== void 0 ? ` (expected ${strictVersion})` : ""}`);
+  }
+  if (block.version === 1) {
+    if (!Array.isArray(block.roots)) {
+      throw new Error("Invalid CAR header format");
+    }
+    return block;
+  }
+  if (block.roots !== void 0) {
+    throw new Error("Invalid CAR header format");
+  }
+  const v2Header = decodeV2Header(await reader.exactly(V2_HEADER_LENGTH, true));
+  reader.seek(v2Header.dataOffset - reader.pos);
+  const v1Header = await readHeader(reader, 1);
+  return Object.assign(v1Header, v2Header);
+}
+function bytesReader(bytes) {
+  let pos = 0;
+  return {
+    async upTo(length4) {
+      const out = bytes.subarray(pos, pos + Math.min(length4, bytes.length - pos));
+      return out;
+    },
+    async exactly(length4, seek = false) {
+      if (length4 > bytes.length - pos) {
+        throw new Error("Unexpected end of data");
+      }
+      const out = bytes.subarray(pos, pos + length4);
+      if (seek) {
+        pos += length4;
+      }
+      return out;
+    },
+    seek(length4) {
+      pos += length4;
+    },
+    get pos() {
+      return pos;
+    }
+  };
+}
+function chunkReader(readChunk) {
+  let pos = 0;
+  let have = 0;
+  let offset = 0;
+  let currentChunk = new Uint8Array(0);
+  const read4 = async (length4) => {
+    have = currentChunk.length - offset;
+    const bufa = [currentChunk.subarray(offset)];
+    while (have < length4) {
+      const chunk = await readChunk();
+      if (chunk == null) {
+        break;
+      }
+      if (have < 0) {
+        if (chunk.length > have) {
+          bufa.push(chunk.subarray(-have));
+        }
+      } else {
+        bufa.push(chunk);
+      }
+      have += chunk.length;
+    }
+    currentChunk = new Uint8Array(bufa.reduce((p, c) => p + c.length, 0));
+    let off = 0;
+    for (const b of bufa) {
+      currentChunk.set(b, off);
+      off += b.length;
+    }
+    offset = 0;
+  };
+  return {
+    async upTo(length4) {
+      if (currentChunk.length - offset < length4) {
+        await read4(length4);
+      }
+      return currentChunk.subarray(offset, offset + Math.min(currentChunk.length - offset, length4));
+    },
+    async exactly(length4, seek = false) {
+      if (currentChunk.length - offset < length4) {
+        await read4(length4);
+      }
+      if (currentChunk.length - offset < length4) {
+        throw new Error("Unexpected end of data");
+      }
+      const out = currentChunk.subarray(offset, offset + length4);
+      if (seek) {
+        pos += length4;
+        offset += length4;
+      }
+      return out;
+    },
+    seek(length4) {
+      pos += length4;
+      offset += length4;
+    },
+    get pos() {
+      return pos;
+    }
+  };
+}
+
+// node_modules/@ipld/car/src/reader.js
+import fs2 from "fs";
+import { promisify } from "util";
+var fsread2 = promisify(fs2.read);
+
+// node_modules/@ipld/car/src/writer.js
+import fs3 from "fs";
+import { promisify as promisify2 } from "util";
+
+// node_modules/@ipld/car/src/encoder.js
+var import_varint5 = __toESM(require_varint(), 1);
+function createHeader(roots) {
+  const headerBytes = encode4({ version: 1, roots });
+  const varintBytes = import_varint5.default.encode(headerBytes.length);
+  const header = new Uint8Array(varintBytes.length + headerBytes.length);
+  header.set(varintBytes, 0);
+  header.set(headerBytes, varintBytes.length);
+  return header;
+}
+function createEncoder(writer) {
+  return {
+    /**
+     * @param {CID[]} roots
+     * @returns {Promise}
+     */
+    async setRoots(roots) {
+      const bytes = createHeader(roots);
+      await writer.write(bytes);
+    },
+    /**
+     * @param {Block} block
+     * @returns {Promise}
+     */
+    async writeBlock(block) {
+      const { cid, bytes } = block;
+      await writer.write(new Uint8Array(import_varint5.default.encode(cid.bytes.length + bytes.length)));
+      await writer.write(cid.bytes);
+      if (bytes.length) {
+        await writer.write(bytes);
+      }
+    },
+    /**
+     * @returns {Promise}
+     */
+    async close() {
+      await writer.end();
+    }
+  };
+}
+
+// node_modules/@ipld/car/src/iterator-channel.js
+function noop() {
+}
+function create3() {
+  const chunkQueue = [];
+  let drainer = null;
+  let drainerResolver = noop;
+  let ended = false;
+  let outWait = null;
+  let outWaitResolver = noop;
+  const makeDrainer = () => {
+    if (!drainer) {
+      drainer = new Promise((resolve6) => {
+        drainerResolver = () => {
+          drainer = null;
+          drainerResolver = noop;
+          resolve6();
+        };
+      });
+    }
+    return drainer;
+  };
+  const writer = {
+    /**
+     * @param {T} chunk
+     * @returns {Promise}
+     */
+    write(chunk) {
+      chunkQueue.push(chunk);
+      const drainer2 = makeDrainer();
+      outWaitResolver();
+      return drainer2;
+    },
+    async end() {
+      ended = true;
+      const drainer2 = makeDrainer();
+      outWaitResolver();
+      await drainer2;
+    }
+  };
+  const iterator = {
+    /** @returns {Promise>} */
+    async next() {
+      const chunk = chunkQueue.shift();
+      if (chunk) {
+        if (chunkQueue.length === 0) {
+          drainerResolver();
+        }
+        return { done: false, value: chunk };
+      }
+      if (ended) {
+        drainerResolver();
+        return { done: true, value: void 0 };
+      }
+      if (!outWait) {
+        outWait = new Promise((resolve6) => {
+          outWaitResolver = () => {
+            outWait = null;
+            outWaitResolver = noop;
+            return resolve6(iterator.next());
+          };
+        });
+      }
+      return outWait;
+    }
+  };
+  return { writer, iterator };
+}
+
+// node_modules/@ipld/car/src/writer-browser.js
+var CarWriter = class _CarWriter {
+  /**
+   * @param {CID[]} roots
+   * @param {CarEncoder} encoder
+   */
+  constructor(roots, encoder) {
+    this._encoder = encoder;
+    this._mutex = encoder.setRoots(roots);
+    this._ended = false;
+  }
+  /**
+   * Write a `Block` (a `{ cid:CID, bytes:Uint8Array }` pair) to the archive.
+   *
+   * @function
+   * @memberof CarWriter
+   * @instance
+   * @async
+   * @param {Block} block - A `{ cid:CID, bytes:Uint8Array }` pair.
+   * @returns {Promise} The returned promise will only resolve once the
+   * bytes this block generates are written to the `out` iterable.
+   */
+  async put(block) {
+    if (!(block.bytes instanceof Uint8Array) || !block.cid) {
+      throw new TypeError("Can only write {cid, bytes} objects");
+    }
+    if (this._ended) {
+      throw new Error("Already closed");
+    }
+    const cid = CID2.asCID(block.cid);
+    if (!cid) {
+      throw new TypeError("Can only write {cid, bytes} objects");
+    }
+    this._mutex = this._mutex.then(() => this._encoder.writeBlock({ cid, bytes: block.bytes }));
+    return this._mutex;
+  }
+  /**
+   * Finalise the CAR archive and signal that the `out` iterable should end once
+   * any remaining bytes are written.
+   *
+   * @function
+   * @memberof CarWriter
+   * @instance
+   * @async
+   * @returns {Promise}
+   */
+  async close() {
+    if (this._ended) {
+      throw new Error("Already closed");
+    }
+    await this._mutex;
+    this._ended = true;
+    return this._encoder.close();
+  }
+  /**
+   * Create a new CAR writer "channel" which consists of a
+   * `{ writer:CarWriter, out:AsyncIterable }` pair.
+   *
+   * @async
+   * @static
+   * @memberof CarWriter
+   * @param {CID[] | CID | void} roots
+   * @returns {WriterChannel} The channel takes the form of
+   * `{ writer:CarWriter, out:AsyncIterable }`.
+   */
+  static create(roots) {
+    roots = toRoots(roots);
+    const { encoder, iterator } = encodeWriter();
+    const writer = new _CarWriter(roots, encoder);
+    const out = new CarWriterOut(iterator);
+    return { writer, out };
+  }
+  /**
+   * Create a new CAR appender "channel" which consists of a
+   * `{ writer:CarWriter, out:AsyncIterable }` pair.
+   * This appender does not consider roots and does not produce a CAR header.
+   * It is designed to append blocks to an _existing_ CAR archive. It is
+   * expected that `out` will be concatenated onto the end of an existing
+   * archive that already has a properly formatted header.
+   *
+   * @async
+   * @static
+   * @memberof CarWriter
+   * @returns {WriterChannel} The channel takes the form of
+   * `{ writer:CarWriter, out:AsyncIterable }`.
+   */
+  static createAppender() {
+    const { encoder, iterator } = encodeWriter();
+    encoder.setRoots = () => Promise.resolve();
+    const writer = new _CarWriter([], encoder);
+    const out = new CarWriterOut(iterator);
+    return { writer, out };
+  }
+  /**
+   * Update the list of roots in the header of an existing CAR as represented
+   * in a Uint8Array.
+   *
+   * This operation is an _overwrite_, the total length of the CAR will not be
+   * modified. A rejection will occur if the new header will not be the same
+   * length as the existing header, in which case the CAR will not be modified.
+   * It is the responsibility of the user to ensure that the roots being
+   * replaced encode as the same length as the new roots.
+   *
+   * The byte array passed in an argument will be modified and also returned
+   * upon successful modification.
+   *
+   * @async
+   * @static
+   * @memberof CarWriter
+   * @param {Uint8Array} bytes
+   * @param {CID[]} roots - A new list of roots to replace the existing list in
+   * the CAR header. The new header must take up the same number of bytes as the
+   * existing header, so the roots should collectively be the same byte length
+   * as the existing roots.
+   * @returns {Promise}
+   */
+  static async updateRootsInBytes(bytes, roots) {
+    const reader = bytesReader(bytes);
+    await readHeader(reader);
+    const newHeader = createHeader(roots);
+    if (Number(reader.pos) !== newHeader.length) {
+      throw new Error(`updateRoots() can only overwrite a header of the same length (old header is ${reader.pos} bytes, new header is ${newHeader.length} bytes)`);
+    }
+    bytes.set(newHeader, 0);
+    return bytes;
+  }
+};
+var CarWriterOut = class {
+  /**
+   * @param {AsyncIterator} iterator
+   */
+  constructor(iterator) {
+    this._iterator = iterator;
+  }
+  [Symbol.asyncIterator]() {
+    if (this._iterating) {
+      throw new Error("Multiple iterator not supported");
+    }
+    this._iterating = true;
+    return this._iterator;
+  }
+};
+function encodeWriter() {
+  const iw = create3();
+  const { writer, iterator } = iw;
+  const encoder = createEncoder(writer);
+  return { encoder, iterator };
+}
+function toRoots(roots) {
+  if (roots === void 0) {
+    return [];
+  }
+  if (!Array.isArray(roots)) {
+    const cid = CID2.asCID(roots);
+    if (!cid) {
+      throw new TypeError("roots must be a single CID or an array of CIDs");
+    }
+    return [cid];
+  }
+  const _roots = [];
+  for (const root of roots) {
+    const _root = CID2.asCID(root);
+    if (!_root) {
+      throw new TypeError("roots must be a single CID or an array of CIDs");
+    }
+    _roots.push(_root);
+  }
+  return _roots;
+}
+
+// node_modules/@ipld/car/src/writer.js
+var fsread3 = promisify2(fs3.read);
+var fswrite = promisify2(fs3.write);
+var CarWriter2 = class extends CarWriter {
+  /**
+   * Update the list of roots in the header of an existing CAR file. The first
+   * argument must be a file descriptor for CAR file that is open in read and
+   * write mode (not append), e.g. `fs.open` or `fs.promises.open` with `'r+'`
+   * mode.
+   *
+   * This operation is an _overwrite_, the total length of the CAR will not be
+   * modified. A rejection will occur if the new header will not be the same
+   * length as the existing header, in which case the CAR will not be modified.
+   * It is the responsibility of the user to ensure that the roots being
+   * replaced encode as the same length as the new roots.
+   *
+   * This function is **only available in Node.js** and not a browser
+   * environment.
+   *
+   * @async
+   * @static
+   * @memberof CarWriter
+   * @param {fs.promises.FileHandle | number} fd - A file descriptor from the
+   * Node.js `fs` module. Either an integer, from `fs.open()` or a `FileHandle`
+   * from `fs.promises.open()`.
+   * @param {CID[]} roots - A new list of roots to replace the existing list in
+   * the CAR header. The new header must take up the same number of bytes as the
+   * existing header, so the roots should collectively be the same byte length
+   * as the existing roots.
+   * @returns {Promise}
+   */
+  static async updateRootsInFile(fd, roots) {
+    const chunkSize = 256;
+    let bytes;
+    let offset = 0;
+    let readChunk;
+    if (typeof fd === "number") {
+      readChunk = async () => (await fsread3(fd, bytes, 0, chunkSize, offset)).bytesRead;
+    } else if (typeof fd === "object" && typeof fd.read === "function") {
+      readChunk = async () => (await fd.read(bytes, 0, chunkSize, offset)).bytesRead;
+    } else {
+      throw new TypeError("Bad fd");
+    }
+    const fdReader = chunkReader(async () => {
+      bytes = new Uint8Array(chunkSize);
+      const read4 = await readChunk();
+      offset += read4;
+      return read4 < chunkSize ? bytes.subarray(0, read4) : bytes;
+    });
+    await readHeader(fdReader);
+    const newHeader = createHeader(roots);
+    if (fdReader.pos !== newHeader.length) {
+      throw new Error(`updateRoots() can only overwrite a header of the same length (old header is ${fdReader.pos} bytes, new header is ${newHeader.length} bytes)`);
+    }
+    if (typeof fd === "number") {
+      await fswrite(fd, newHeader, 0, newHeader.length, 0);
+    } else if (typeof fd === "object" && typeof fd.read === "function") {
+      await fd.write(newHeader, 0, newHeader.length, 0);
+    }
+  }
+};
+
+// node_modules/it-drain/dist/src/index.js
+function isAsyncIterable(thing) {
+  return thing[Symbol.asyncIterator] != null;
+}
+function drain(source) {
+  if (isAsyncIterable(source)) {
+    return (async () => {
+      for await (const _ of source) {
+      }
+    })();
+  } else {
+    for (const _ of source) {
+    }
+  }
+}
+var src_default = drain;
+
+// node_modules/it-peekable/dist/src/index.js
+function peekable(iterable) {
+  const [iterator, symbol2] = iterable[Symbol.asyncIterator] != null ? [iterable[Symbol.asyncIterator](), Symbol.asyncIterator] : [iterable[Symbol.iterator](), Symbol.iterator];
+  const queue = [];
+  return {
+    peek: () => {
+      return iterator.next();
+    },
+    push: (value) => {
+      queue.push(value);
+    },
+    next: () => {
+      if (queue.length > 0) {
+        return {
+          done: false,
+          value: queue.shift()
+        };
+      }
+      return iterator.next();
+    },
+    [symbol2]() {
+      return this;
+    }
+  };
+}
+var src_default2 = peekable;
+
+// node_modules/it-map/dist/src/index.js
+function isAsyncIterable2(thing) {
+  return thing[Symbol.asyncIterator] != null;
+}
+function map(source, func) {
+  if (isAsyncIterable2(source)) {
+    return async function* () {
+      for await (const val of source) {
+        yield func(val);
+      }
+    }();
+  }
+  const peekable2 = src_default2(source);
+  const { value, done } = peekable2.next();
+  if (done === true) {
+    return function* () {
+    }();
+  }
+  const res = func(value);
+  if (typeof res.then === "function") {
+    return async function* () {
+      yield await res;
+      for await (const val of peekable2) {
+        yield func(val);
+      }
+    }();
+  }
+  const fn = func;
+  return function* () {
+    yield res;
+    for (const val of peekable2) {
+      yield fn(val);
+    }
+  }();
+}
+var src_default3 = map;
+
+// node_modules/p-defer/index.js
+function pDefer() {
+  const deferred = {};
+  deferred.promise = new Promise((resolve6, reject) => {
+    deferred.resolve = resolve6;
+    deferred.reject = reject;
+  });
+  return deferred;
+}
+
+// node_modules/eventemitter3/index.mjs
+var import_index = __toESM(require_eventemitter3(), 1);
+
+// node_modules/p-timeout/index.js
+var TimeoutError = class extends Error {
+  constructor(message2) {
+    super(message2);
+    this.name = "TimeoutError";
+  }
+};
+var AbortError = class extends Error {
+  constructor(message2) {
+    super();
+    this.name = "AbortError";
+    this.message = message2;
+  }
+};
+var getDOMException = (errorMessage) => globalThis.DOMException === void 0 ? new AbortError(errorMessage) : new DOMException(errorMessage);
+var getAbortedReason = (signal) => {
+  const reason = signal.reason === void 0 ? getDOMException("This operation was aborted.") : signal.reason;
+  return reason instanceof Error ? reason : getDOMException(reason);
+};
+function pTimeout(promise, milliseconds, fallback, options) {
+  let timer;
+  const cancelablePromise = new Promise((resolve6, reject) => {
+    if (typeof milliseconds !== "number" || Math.sign(milliseconds) !== 1) {
+      throw new TypeError(`Expected \`milliseconds\` to be a positive number, got \`${milliseconds}\``);
+    }
+    if (milliseconds === Number.POSITIVE_INFINITY) {
+      resolve6(promise);
+      return;
+    }
+    options = {
+      customTimers: { setTimeout, clearTimeout },
+      ...options
+    };
+    if (options.signal) {
+      const { signal } = options;
+      if (signal.aborted) {
+        reject(getAbortedReason(signal));
+      }
+      signal.addEventListener("abort", () => {
+        reject(getAbortedReason(signal));
+      });
+    }
+    timer = options.customTimers.setTimeout.call(void 0, () => {
+      if (typeof fallback === "function") {
+        try {
+          resolve6(fallback());
+        } catch (error) {
+          reject(error);
+        }
+        return;
+      }
+      const message2 = typeof fallback === "string" ? fallback : `Promise timed out after ${milliseconds} milliseconds`;
+      const timeoutError = fallback instanceof Error ? fallback : new TimeoutError(message2);
+      if (typeof promise.cancel === "function") {
+        promise.cancel();
+      }
+      reject(timeoutError);
+    }, milliseconds);
+    (async () => {
+      try {
+        resolve6(await promise);
+      } catch (error) {
+        reject(error);
+      } finally {
+        options.customTimers.clearTimeout.call(void 0, timer);
+      }
+    })();
+  });
+  cancelablePromise.clear = () => {
+    clearTimeout(timer);
+    timer = void 0;
+  };
+  return cancelablePromise;
+}
+
+// node_modules/p-queue/dist/lower-bound.js
+function lowerBound(array, value, comparator) {
+  let first2 = 0;
+  let count = array.length;
+  while (count > 0) {
+    const step = Math.trunc(count / 2);
+    let it = first2 + step;
+    if (comparator(array[it], value) <= 0) {
+      first2 = ++it;
+      count -= step + 1;
+    } else {
+      count = step;
+    }
+  }
+  return first2;
+}
+
+// node_modules/p-queue/dist/priority-queue.js
+var __classPrivateFieldGet = function(receiver, state, kind, f) {
+  if (kind === "a" && !f)
+    throw new TypeError("Private accessor was defined without a getter");
+  if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
+    throw new TypeError("Cannot read private member from an object whose class did not declare it");
+  return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
+};
+var _PriorityQueue_queue;
+var PriorityQueue = class {
+  constructor() {
+    _PriorityQueue_queue.set(this, []);
+  }
+  enqueue(run, options) {
+    options = {
+      priority: 0,
+      ...options
+    };
+    const element = {
+      priority: options.priority,
+      run
+    };
+    if (this.size && __classPrivateFieldGet(this, _PriorityQueue_queue, "f")[this.size - 1].priority >= options.priority) {
+      __classPrivateFieldGet(this, _PriorityQueue_queue, "f").push(element);
+      return;
+    }
+    const index = lowerBound(__classPrivateFieldGet(this, _PriorityQueue_queue, "f"), element, (a, b) => b.priority - a.priority);
+    __classPrivateFieldGet(this, _PriorityQueue_queue, "f").splice(index, 0, element);
+  }
+  dequeue() {
+    const item = __classPrivateFieldGet(this, _PriorityQueue_queue, "f").shift();
+    return item === null || item === void 0 ? void 0 : item.run;
+  }
+  filter(options) {
+    return __classPrivateFieldGet(this, _PriorityQueue_queue, "f").filter((element) => element.priority === options.priority).map((element) => element.run);
+  }
+  get size() {
+    return __classPrivateFieldGet(this, _PriorityQueue_queue, "f").length;
+  }
+};
+_PriorityQueue_queue = /* @__PURE__ */ new WeakMap();
+var priority_queue_default = PriorityQueue;
+
+// node_modules/p-queue/dist/index.js
+var __classPrivateFieldSet = function(receiver, state, value, kind, f) {
+  if (kind === "m")
+    throw new TypeError("Private method is not writable");
+  if (kind === "a" && !f)
+    throw new TypeError("Private accessor was defined without a setter");
+  if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
+    throw new TypeError("Cannot write private member to an object whose class did not declare it");
+  return kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value), value;
+};
+var __classPrivateFieldGet2 = function(receiver, state, kind, f) {
+  if (kind === "a" && !f)
+    throw new TypeError("Private accessor was defined without a getter");
+  if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
+    throw new TypeError("Cannot read private member from an object whose class did not declare it");
+  return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
+};
+var _PQueue_instances;
+var _PQueue_carryoverConcurrencyCount;
+var _PQueue_isIntervalIgnored;
+var _PQueue_intervalCount;
+var _PQueue_intervalCap;
+var _PQueue_interval;
+var _PQueue_intervalEnd;
+var _PQueue_intervalId;
+var _PQueue_timeoutId;
+var _PQueue_queue;
+var _PQueue_queueClass;
+var _PQueue_pending;
+var _PQueue_concurrency;
+var _PQueue_isPaused;
+var _PQueue_throwOnTimeout;
+var _PQueue_doesIntervalAllowAnother_get;
+var _PQueue_doesConcurrentAllowAnother_get;
+var _PQueue_next;
+var _PQueue_onResumeInterval;
+var _PQueue_isIntervalPaused_get;
+var _PQueue_tryToStartAnother;
+var _PQueue_initializeIntervalIfNeeded;
+var _PQueue_onInterval;
+var _PQueue_processQueue;
+var _PQueue_throwOnAbort;
+var _PQueue_onEvent;
+var AbortError2 = class extends Error {
+};
+var PQueue = class extends import_index.default {
+  // TODO: The `throwOnTimeout` option should affect the return types of `add()` and `addAll()`
+  constructor(options) {
+    var _a, _b, _c, _d;
+    super();
+    _PQueue_instances.add(this);
+    _PQueue_carryoverConcurrencyCount.set(this, void 0);
+    _PQueue_isIntervalIgnored.set(this, void 0);
+    _PQueue_intervalCount.set(this, 0);
+    _PQueue_intervalCap.set(this, void 0);
+    _PQueue_interval.set(this, void 0);
+    _PQueue_intervalEnd.set(this, 0);
+    _PQueue_intervalId.set(this, void 0);
+    _PQueue_timeoutId.set(this, void 0);
+    _PQueue_queue.set(this, void 0);
+    _PQueue_queueClass.set(this, void 0);
+    _PQueue_pending.set(this, 0);
+    _PQueue_concurrency.set(this, void 0);
+    _PQueue_isPaused.set(this, void 0);
+    _PQueue_throwOnTimeout.set(this, void 0);
+    Object.defineProperty(this, "timeout", {
+      enumerable: true,
+      configurable: true,
+      writable: true,
+      value: void 0
+    });
+    options = {
+      carryoverConcurrencyCount: false,
+      intervalCap: Number.POSITIVE_INFINITY,
+      interval: 0,
+      concurrency: Number.POSITIVE_INFINITY,
+      autoStart: true,
+      queueClass: priority_queue_default,
+      ...options
+    };
+    if (!(typeof options.intervalCap === "number" && options.intervalCap >= 1)) {
+      throw new TypeError(`Expected \`intervalCap\` to be a number from 1 and up, got \`${(_b = (_a = options.intervalCap) === null || _a === void 0 ? void 0 : _a.toString()) !== null && _b !== void 0 ? _b : ""}\` (${typeof options.intervalCap})`);
+    }
+    if (options.interval === void 0 || !(Number.isFinite(options.interval) && options.interval >= 0)) {
+      throw new TypeError(`Expected \`interval\` to be a finite number >= 0, got \`${(_d = (_c = options.interval) === null || _c === void 0 ? void 0 : _c.toString()) !== null && _d !== void 0 ? _d : ""}\` (${typeof options.interval})`);
+    }
+    __classPrivateFieldSet(this, _PQueue_carryoverConcurrencyCount, options.carryoverConcurrencyCount, "f");
+    __classPrivateFieldSet(this, _PQueue_isIntervalIgnored, options.intervalCap === Number.POSITIVE_INFINITY || options.interval === 0, "f");
+    __classPrivateFieldSet(this, _PQueue_intervalCap, options.intervalCap, "f");
+    __classPrivateFieldSet(this, _PQueue_interval, options.interval, "f");
+    __classPrivateFieldSet(this, _PQueue_queue, new options.queueClass(), "f");
+    __classPrivateFieldSet(this, _PQueue_queueClass, options.queueClass, "f");
+    this.concurrency = options.concurrency;
+    this.timeout = options.timeout;
+    __classPrivateFieldSet(this, _PQueue_throwOnTimeout, options.throwOnTimeout === true, "f");
+    __classPrivateFieldSet(this, _PQueue_isPaused, options.autoStart === false, "f");
+  }
+  get concurrency() {
+    return __classPrivateFieldGet2(this, _PQueue_concurrency, "f");
+  }
+  set concurrency(newConcurrency) {
+    if (!(typeof newConcurrency === "number" && newConcurrency >= 1)) {
+      throw new TypeError(`Expected \`concurrency\` to be a number from 1 and up, got \`${newConcurrency}\` (${typeof newConcurrency})`);
+    }
+    __classPrivateFieldSet(this, _PQueue_concurrency, newConcurrency, "f");
+    __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_processQueue).call(this);
+  }
+  async add(function_, options = {}) {
+    options = {
+      timeout: this.timeout,
+      throwOnTimeout: __classPrivateFieldGet2(this, _PQueue_throwOnTimeout, "f"),
+      ...options
+    };
+    return new Promise((resolve6, reject) => {
+      __classPrivateFieldGet2(this, _PQueue_queue, "f").enqueue(async () => {
+        var _a;
+        var _b, _c;
+        __classPrivateFieldSet(this, _PQueue_pending, (_b = __classPrivateFieldGet2(this, _PQueue_pending, "f"), _b++, _b), "f");
+        __classPrivateFieldSet(this, _PQueue_intervalCount, (_c = __classPrivateFieldGet2(this, _PQueue_intervalCount, "f"), _c++, _c), "f");
+        try {
+          if ((_a = options.signal) === null || _a === void 0 ? void 0 : _a.aborted) {
+            throw new AbortError2("The task was aborted.");
+          }
+          let operation = function_({ signal: options.signal });
+          if (options.timeout) {
+            operation = pTimeout(Promise.resolve(operation), options.timeout);
+          }
+          if (options.signal) {
+            operation = Promise.race([operation, __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_throwOnAbort).call(this, options.signal)]);
+          }
+          const result = await operation;
+          resolve6(result);
+          this.emit("completed", result);
+        } catch (error) {
+          if (error instanceof TimeoutError && !options.throwOnTimeout) {
+            resolve6();
+            return;
+          }
+          reject(error);
+          this.emit("error", error);
+        } finally {
+          __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_next).call(this);
+        }
+      }, options);
+      this.emit("add");
+      __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_tryToStartAnother).call(this);
+    });
+  }
+  async addAll(functions, options) {
+    return Promise.all(functions.map(async (function_) => this.add(function_, options)));
+  }
+  /**
+  Start (or resume) executing enqueued tasks within concurrency limit. No need to call this if queue is not paused (via `options.autoStart = false` or by `.pause()` method.)
+  */
+  start() {
+    if (!__classPrivateFieldGet2(this, _PQueue_isPaused, "f")) {
+      return this;
+    }
+    __classPrivateFieldSet(this, _PQueue_isPaused, false, "f");
+    __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_processQueue).call(this);
+    return this;
+  }
+  /**
+  Put queue execution on hold.
+  */
+  pause() {
+    __classPrivateFieldSet(this, _PQueue_isPaused, true, "f");
+  }
+  /**
+  Clear the queue.
+  */
+  clear() {
+    __classPrivateFieldSet(this, _PQueue_queue, new (__classPrivateFieldGet2(this, _PQueue_queueClass, "f"))(), "f");
+  }
+  /**
+      Can be called multiple times. Useful if you for example add additional items at a later time.
+  
+      @returns A promise that settles when the queue becomes empty.
+      */
+  async onEmpty() {
+    if (__classPrivateFieldGet2(this, _PQueue_queue, "f").size === 0) {
+      return;
+    }
+    await __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onEvent).call(this, "empty");
+  }
+  /**
+      @returns A promise that settles when the queue size is less than the given limit: `queue.size < limit`.
+  
+      If you want to avoid having the queue grow beyond a certain size you can `await queue.onSizeLessThan()` before adding a new item.
+  
+      Note that this only limits the number of items waiting to start. There could still be up to `concurrency` jobs already running that this call does not include in its calculation.
+      */
+  async onSizeLessThan(limit) {
+    if (__classPrivateFieldGet2(this, _PQueue_queue, "f").size < limit) {
+      return;
+    }
+    await __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onEvent).call(this, "next", () => __classPrivateFieldGet2(this, _PQueue_queue, "f").size < limit);
+  }
+  /**
+      The difference with `.onEmpty` is that `.onIdle` guarantees that all work from the queue has finished. `.onEmpty` merely signals that the queue is empty, but it could mean that some promises haven't completed yet.
+  
+      @returns A promise that settles when the queue becomes empty, and all promises have completed; `queue.size === 0 && queue.pending === 0`.
+      */
+  async onIdle() {
+    if (__classPrivateFieldGet2(this, _PQueue_pending, "f") === 0 && __classPrivateFieldGet2(this, _PQueue_queue, "f").size === 0) {
+      return;
+    }
+    await __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onEvent).call(this, "idle");
+  }
+  /**
+  Size of the queue, the number of queued items waiting to run.
+  */
+  get size() {
+    return __classPrivateFieldGet2(this, _PQueue_queue, "f").size;
+  }
+  /**
+      Size of the queue, filtered by the given options.
+  
+      For example, this can be used to find the number of items remaining in the queue with a specific priority level.
+      */
+  sizeBy(options) {
+    return __classPrivateFieldGet2(this, _PQueue_queue, "f").filter(options).length;
+  }
+  /**
+  Number of running items (no longer in the queue).
+  */
+  get pending() {
+    return __classPrivateFieldGet2(this, _PQueue_pending, "f");
+  }
+  /**
+  Whether the queue is currently paused.
+  */
+  get isPaused() {
+    return __classPrivateFieldGet2(this, _PQueue_isPaused, "f");
+  }
+};
+_PQueue_carryoverConcurrencyCount = /* @__PURE__ */ new WeakMap(), _PQueue_isIntervalIgnored = /* @__PURE__ */ new WeakMap(), _PQueue_intervalCount = /* @__PURE__ */ new WeakMap(), _PQueue_intervalCap = /* @__PURE__ */ new WeakMap(), _PQueue_interval = /* @__PURE__ */ new WeakMap(), _PQueue_intervalEnd = /* @__PURE__ */ new WeakMap(), _PQueue_intervalId = /* @__PURE__ */ new WeakMap(), _PQueue_timeoutId = /* @__PURE__ */ new WeakMap(), _PQueue_queue = /* @__PURE__ */ new WeakMap(), _PQueue_queueClass = /* @__PURE__ */ new WeakMap(), _PQueue_pending = /* @__PURE__ */ new WeakMap(), _PQueue_concurrency = /* @__PURE__ */ new WeakMap(), _PQueue_isPaused = /* @__PURE__ */ new WeakMap(), _PQueue_throwOnTimeout = /* @__PURE__ */ new WeakMap(), _PQueue_instances = /* @__PURE__ */ new WeakSet(), _PQueue_doesIntervalAllowAnother_get = function _PQueue_doesIntervalAllowAnother_get2() {
+  return __classPrivateFieldGet2(this, _PQueue_isIntervalIgnored, "f") || __classPrivateFieldGet2(this, _PQueue_intervalCount, "f") < __classPrivateFieldGet2(this, _PQueue_intervalCap, "f");
+}, _PQueue_doesConcurrentAllowAnother_get = function _PQueue_doesConcurrentAllowAnother_get2() {
+  return __classPrivateFieldGet2(this, _PQueue_pending, "f") < __classPrivateFieldGet2(this, _PQueue_concurrency, "f");
+}, _PQueue_next = function _PQueue_next2() {
+  var _a;
+  __classPrivateFieldSet(this, _PQueue_pending, (_a = __classPrivateFieldGet2(this, _PQueue_pending, "f"), _a--, _a), "f");
+  __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_tryToStartAnother).call(this);
+  this.emit("next");
+}, _PQueue_onResumeInterval = function _PQueue_onResumeInterval2() {
+  __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onInterval).call(this);
+  __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_initializeIntervalIfNeeded).call(this);
+  __classPrivateFieldSet(this, _PQueue_timeoutId, void 0, "f");
+}, _PQueue_isIntervalPaused_get = function _PQueue_isIntervalPaused_get2() {
+  const now = Date.now();
+  if (__classPrivateFieldGet2(this, _PQueue_intervalId, "f") === void 0) {
+    const delay = __classPrivateFieldGet2(this, _PQueue_intervalEnd, "f") - now;
+    if (delay < 0) {
+      __classPrivateFieldSet(this, _PQueue_intervalCount, __classPrivateFieldGet2(this, _PQueue_carryoverConcurrencyCount, "f") ? __classPrivateFieldGet2(this, _PQueue_pending, "f") : 0, "f");
+    } else {
+      if (__classPrivateFieldGet2(this, _PQueue_timeoutId, "f") === void 0) {
+        __classPrivateFieldSet(this, _PQueue_timeoutId, setTimeout(() => {
+          __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onResumeInterval).call(this);
+        }, delay), "f");
+      }
+      return true;
+    }
+  }
+  return false;
+}, _PQueue_tryToStartAnother = function _PQueue_tryToStartAnother2() {
+  if (__classPrivateFieldGet2(this, _PQueue_queue, "f").size === 0) {
+    if (__classPrivateFieldGet2(this, _PQueue_intervalId, "f")) {
+      clearInterval(__classPrivateFieldGet2(this, _PQueue_intervalId, "f"));
+    }
+    __classPrivateFieldSet(this, _PQueue_intervalId, void 0, "f");
+    this.emit("empty");
+    if (__classPrivateFieldGet2(this, _PQueue_pending, "f") === 0) {
+      this.emit("idle");
+    }
+    return false;
+  }
+  if (!__classPrivateFieldGet2(this, _PQueue_isPaused, "f")) {
+    const canInitializeInterval = !__classPrivateFieldGet2(this, _PQueue_instances, "a", _PQueue_isIntervalPaused_get);
+    if (__classPrivateFieldGet2(this, _PQueue_instances, "a", _PQueue_doesIntervalAllowAnother_get) && __classPrivateFieldGet2(this, _PQueue_instances, "a", _PQueue_doesConcurrentAllowAnother_get)) {
+      const job = __classPrivateFieldGet2(this, _PQueue_queue, "f").dequeue();
+      if (!job) {
+        return false;
+      }
+      this.emit("active");
+      job();
+      if (canInitializeInterval) {
+        __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_initializeIntervalIfNeeded).call(this);
+      }
+      return true;
+    }
+  }
+  return false;
+}, _PQueue_initializeIntervalIfNeeded = function _PQueue_initializeIntervalIfNeeded2() {
+  if (__classPrivateFieldGet2(this, _PQueue_isIntervalIgnored, "f") || __classPrivateFieldGet2(this, _PQueue_intervalId, "f") !== void 0) {
+    return;
+  }
+  __classPrivateFieldSet(this, _PQueue_intervalId, setInterval(() => {
+    __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onInterval).call(this);
+  }, __classPrivateFieldGet2(this, _PQueue_interval, "f")), "f");
+  __classPrivateFieldSet(this, _PQueue_intervalEnd, Date.now() + __classPrivateFieldGet2(this, _PQueue_interval, "f"), "f");
+}, _PQueue_onInterval = function _PQueue_onInterval2() {
+  if (__classPrivateFieldGet2(this, _PQueue_intervalCount, "f") === 0 && __classPrivateFieldGet2(this, _PQueue_pending, "f") === 0 && __classPrivateFieldGet2(this, _PQueue_intervalId, "f")) {
+    clearInterval(__classPrivateFieldGet2(this, _PQueue_intervalId, "f"));
+    __classPrivateFieldSet(this, _PQueue_intervalId, void 0, "f");
+  }
+  __classPrivateFieldSet(this, _PQueue_intervalCount, __classPrivateFieldGet2(this, _PQueue_carryoverConcurrencyCount, "f") ? __classPrivateFieldGet2(this, _PQueue_pending, "f") : 0, "f");
+  __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_processQueue).call(this);
+}, _PQueue_processQueue = function _PQueue_processQueue2() {
+  while (__classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_tryToStartAnother).call(this)) {
+  }
+}, _PQueue_throwOnAbort = async function _PQueue_throwOnAbort2(signal) {
+  return new Promise((_resolve, reject) => {
+    signal.addEventListener("abort", () => {
+      reject(new AbortError2("The task was aborted."));
+    }, { once: true });
+  });
+}, _PQueue_onEvent = async function _PQueue_onEvent2(event, filter3) {
+  return new Promise((resolve6) => {
+    const listener = () => {
+      if (filter3 && !filter3()) {
+        return;
+      }
+      this.off(event, listener);
+      resolve6();
+    };
+    this.on(event, listener);
+  });
+};
+var dist_default = PQueue;
+
+// node_modules/@ipld/dag-pb/src/index.js
+var src_exports2 = {};
+__export(src_exports2, {
+  code: () => code2,
+  createLink: () => createLink,
+  createNode: () => createNode,
+  decode: () => decode11,
+  encode: () => encode7,
+  name: () => name,
+  prepare: () => prepare,
+  validate: () => validate
+});
+
+// node_modules/@ipld/dag-pb/src/pb-decode.js
+var textDecoder2 = new TextDecoder();
+function decodeVarint2(bytes, offset) {
+  let v = 0;
+  for (let shift = 0; ; shift += 7) {
+    if (shift >= 64) {
+      throw new Error("protobuf: varint overflow");
+    }
+    if (offset >= bytes.length) {
+      throw new Error("protobuf: unexpected end of data");
+    }
+    const b = bytes[offset++];
+    v += shift < 28 ? (b & 127) << shift : (b & 127) * 2 ** shift;
+    if (b < 128) {
+      break;
+    }
+  }
+  return [v, offset];
+}
+function decodeBytes(bytes, offset) {
+  let byteLen;
+  [byteLen, offset] = decodeVarint2(bytes, offset);
+  const postOffset = offset + byteLen;
+  if (byteLen < 0 || postOffset < 0) {
+    throw new Error("protobuf: invalid length");
+  }
+  if (postOffset > bytes.length) {
+    throw new Error("protobuf: unexpected end of data");
+  }
+  return [bytes.subarray(offset, postOffset), postOffset];
+}
+function decodeKey(bytes, index) {
+  let wire;
+  [wire, index] = decodeVarint2(bytes, index);
+  return [wire & 7, wire >> 3, index];
+}
+function decodeLink(bytes) {
+  const link = {};
+  const l = bytes.length;
+  let index = 0;
+  while (index < l) {
+    let wireType, fieldNum;
+    [wireType, fieldNum, index] = decodeKey(bytes, index);
+    if (fieldNum === 1) {
+      if (link.Hash) {
+        throw new Error("protobuf: (PBLink) duplicate Hash section");
+      }
+      if (wireType !== 2) {
+        throw new Error(`protobuf: (PBLink) wrong wireType (${wireType}) for Hash`);
+      }
+      if (link.Name !== void 0) {
+        throw new Error("protobuf: (PBLink) invalid order, found Name before Hash");
+      }
+      if (link.Tsize !== void 0) {
+        throw new Error("protobuf: (PBLink) invalid order, found Tsize before Hash");
+      }
+      [link.Hash, index] = decodeBytes(bytes, index);
+    } else if (fieldNum === 2) {
+      if (link.Name !== void 0) {
+        throw new Error("protobuf: (PBLink) duplicate Name section");
+      }
+      if (wireType !== 2) {
+        throw new Error(`protobuf: (PBLink) wrong wireType (${wireType}) for Name`);
+      }
+      if (link.Tsize !== void 0) {
+        throw new Error("protobuf: (PBLink) invalid order, found Tsize before Name");
+      }
+      let byts;
+      [byts, index] = decodeBytes(bytes, index);
+      link.Name = textDecoder2.decode(byts);
+    } else if (fieldNum === 3) {
+      if (link.Tsize !== void 0) {
+        throw new Error("protobuf: (PBLink) duplicate Tsize section");
+      }
+      if (wireType !== 0) {
+        throw new Error(`protobuf: (PBLink) wrong wireType (${wireType}) for Tsize`);
+      }
+      [link.Tsize, index] = decodeVarint2(bytes, index);
+    } else {
+      throw new Error(`protobuf: (PBLink) invalid fieldNumber, expected 1, 2 or 3, got ${fieldNum}`);
+    }
+  }
+  if (index > l) {
+    throw new Error("protobuf: (PBLink) unexpected end of data");
+  }
+  return link;
+}
+function decodeNode(bytes) {
+  const l = bytes.length;
+  let index = 0;
+  let links = void 0;
+  let linksBeforeData = false;
+  let data = void 0;
+  while (index < l) {
+    let wireType, fieldNum;
+    [wireType, fieldNum, index] = decodeKey(bytes, index);
+    if (wireType !== 2) {
+      throw new Error(`protobuf: (PBNode) invalid wireType, expected 2, got ${wireType}`);
+    }
+    if (fieldNum === 1) {
+      if (data) {
+        throw new Error("protobuf: (PBNode) duplicate Data section");
+      }
+      [data, index] = decodeBytes(bytes, index);
+      if (links) {
+        linksBeforeData = true;
+      }
+    } else if (fieldNum === 2) {
+      if (linksBeforeData) {
+        throw new Error("protobuf: (PBNode) duplicate Links section");
+      } else if (!links) {
+        links = [];
+      }
+      let byts;
+      [byts, index] = decodeBytes(bytes, index);
+      links.push(decodeLink(byts));
+    } else {
+      throw new Error(`protobuf: (PBNode) invalid fieldNumber, expected 1 or 2, got ${fieldNum}`);
+    }
+  }
+  if (index > l) {
+    throw new Error("protobuf: (PBNode) unexpected end of data");
+  }
+  const node = {};
+  if (data) {
+    node.Data = data;
+  }
+  node.Links = links || [];
+  return node;
+}
+
+// node_modules/@ipld/dag-pb/src/pb-encode.js
+var textEncoder2 = new TextEncoder();
+var maxInt32 = 2 ** 32;
+var maxUInt32 = 2 ** 31;
+function encodeLink(link, bytes) {
+  let i = bytes.length;
+  if (typeof link.Tsize === "number") {
+    if (link.Tsize < 0) {
+      throw new Error("Tsize cannot be negative");
+    }
+    if (!Number.isSafeInteger(link.Tsize)) {
+      throw new Error("Tsize too large for encoding");
+    }
+    i = encodeVarint(bytes, i, link.Tsize) - 1;
+    bytes[i] = 24;
+  }
+  if (typeof link.Name === "string") {
+    const nameBytes = textEncoder2.encode(link.Name);
+    i -= nameBytes.length;
+    bytes.set(nameBytes, i);
+    i = encodeVarint(bytes, i, nameBytes.length) - 1;
+    bytes[i] = 18;
+  }
+  if (link.Hash) {
+    i -= link.Hash.length;
+    bytes.set(link.Hash, i);
+    i = encodeVarint(bytes, i, link.Hash.length) - 1;
+    bytes[i] = 10;
+  }
+  return bytes.length - i;
+}
+function encodeNode(node) {
+  const size = sizeNode(node);
+  const bytes = new Uint8Array(size);
+  let i = size;
+  if (node.Data) {
+    i -= node.Data.length;
+    bytes.set(node.Data, i);
+    i = encodeVarint(bytes, i, node.Data.length) - 1;
+    bytes[i] = 10;
+  }
+  if (node.Links) {
+    for (let index = node.Links.length - 1; index >= 0; index--) {
+      const size2 = encodeLink(node.Links[index], bytes.subarray(0, i));
+      i -= size2;
+      i = encodeVarint(bytes, i, size2) - 1;
+      bytes[i] = 18;
+    }
+  }
+  return bytes;
+}
+function sizeLink(link) {
+  let n = 0;
+  if (link.Hash) {
+    const l = link.Hash.length;
+    n += 1 + l + sov(l);
+  }
+  if (typeof link.Name === "string") {
+    const l = textEncoder2.encode(link.Name).length;
+    n += 1 + l + sov(l);
+  }
+  if (typeof link.Tsize === "number") {
+    n += 1 + sov(link.Tsize);
+  }
+  return n;
+}
+function sizeNode(node) {
+  let n = 0;
+  if (node.Data) {
+    const l = node.Data.length;
+    n += 1 + l + sov(l);
+  }
+  if (node.Links) {
+    for (const link of node.Links) {
+      const l = sizeLink(link);
+      n += 1 + l + sov(l);
+    }
+  }
+  return n;
+}
+function encodeVarint(bytes, offset, v) {
+  offset -= sov(v);
+  const base3 = offset;
+  while (v >= maxUInt32) {
+    bytes[offset++] = v & 127 | 128;
+    v /= 128;
+  }
+  while (v >= 128) {
+    bytes[offset++] = v & 127 | 128;
+    v >>>= 7;
+  }
+  bytes[offset] = v;
+  return base3;
+}
+function sov(x) {
+  if (x % 2 === 0) {
+    x++;
+  }
+  return Math.floor((len64(x) + 6) / 7);
+}
+function len64(x) {
+  let n = 0;
+  if (x >= maxInt32) {
+    x = Math.floor(x / maxInt32);
+    n = 32;
+  }
+  if (x >= 1 << 16) {
+    x >>>= 16;
+    n += 16;
+  }
+  if (x >= 1 << 8) {
+    x >>>= 8;
+    n += 8;
+  }
+  return n + len8tab[x];
+}
+var len8tab = [
+  0,
+  1,
+  2,
+  2,
+  3,
+  3,
+  3,
+  3,
+  4,
+  4,
+  4,
+  4,
+  4,
+  4,
+  4,
+  4,
+  5,
+  5,
+  5,
+  5,
+  5,
+  5,
+  5,
+  5,
+  5,
+  5,
+  5,
+  5,
+  5,
+  5,
+  5,
+  5,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  6,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  7,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8,
+  8
+];
+
+// node_modules/@ipld/dag-pb/src/util.js
+var pbNodeProperties = ["Data", "Links"];
+var pbLinkProperties = ["Hash", "Name", "Tsize"];
+var textEncoder3 = new TextEncoder();
+function linkComparator(a, b) {
+  if (a === b) {
+    return 0;
+  }
+  const abuf = a.Name ? textEncoder3.encode(a.Name) : [];
+  const bbuf = b.Name ? textEncoder3.encode(b.Name) : [];
+  let x = abuf.length;
+  let y = bbuf.length;
+  for (let i = 0, len = Math.min(x, y); i < len; ++i) {
+    if (abuf[i] !== bbuf[i]) {
+      x = abuf[i];
+      y = bbuf[i];
+      break;
+    }
+  }
+  return x < y ? -1 : y < x ? 1 : 0;
+}
+function hasOnlyProperties(node, properties) {
+  return !Object.keys(node).some((p) => !properties.includes(p));
+}
+function asLink(link) {
+  if (typeof link.asCID === "object") {
+    const Hash = CID2.asCID(link);
+    if (!Hash) {
+      throw new TypeError("Invalid DAG-PB form");
+    }
+    return { Hash };
+  }
+  if (typeof link !== "object" || Array.isArray(link)) {
+    throw new TypeError("Invalid DAG-PB form");
+  }
+  const pbl = {};
+  if (link.Hash) {
+    let cid = CID2.asCID(link.Hash);
+    try {
+      if (!cid) {
+        if (typeof link.Hash === "string") {
+          cid = CID2.parse(link.Hash);
+        } else if (link.Hash instanceof Uint8Array) {
+          cid = CID2.decode(link.Hash);
+        }
+      }
+    } catch (e) {
+      throw new TypeError(`Invalid DAG-PB form: ${e.message}`);
+    }
+    if (cid) {
+      pbl.Hash = cid;
+    }
+  }
+  if (!pbl.Hash) {
+    throw new TypeError("Invalid DAG-PB form");
+  }
+  if (typeof link.Name === "string") {
+    pbl.Name = link.Name;
+  }
+  if (typeof link.Tsize === "number") {
+    pbl.Tsize = link.Tsize;
+  }
+  return pbl;
+}
+function prepare(node) {
+  if (node instanceof Uint8Array || typeof node === "string") {
+    node = { Data: node };
+  }
+  if (typeof node !== "object" || Array.isArray(node)) {
+    throw new TypeError("Invalid DAG-PB form");
+  }
+  const pbn = {};
+  if (node.Data !== void 0) {
+    if (typeof node.Data === "string") {
+      pbn.Data = textEncoder3.encode(node.Data);
+    } else if (node.Data instanceof Uint8Array) {
+      pbn.Data = node.Data;
+    } else {
+      throw new TypeError("Invalid DAG-PB form");
+    }
+  }
+  if (node.Links !== void 0) {
+    if (Array.isArray(node.Links)) {
+      pbn.Links = node.Links.map(asLink);
+      pbn.Links.sort(linkComparator);
+    } else {
+      throw new TypeError("Invalid DAG-PB form");
+    }
+  } else {
+    pbn.Links = [];
+  }
+  return pbn;
+}
+function validate(node) {
+  if (!node || typeof node !== "object" || Array.isArray(node) || node instanceof Uint8Array || node["/"] && node["/"] === node.bytes) {
+    throw new TypeError("Invalid DAG-PB form");
+  }
+  if (!hasOnlyProperties(node, pbNodeProperties)) {
+    throw new TypeError("Invalid DAG-PB form (extraneous properties)");
+  }
+  if (node.Data !== void 0 && !(node.Data instanceof Uint8Array)) {
+    throw new TypeError("Invalid DAG-PB form (Data must be bytes)");
+  }
+  if (!Array.isArray(node.Links)) {
+    throw new TypeError("Invalid DAG-PB form (Links must be a list)");
+  }
+  for (let i = 0; i < node.Links.length; i++) {
+    const link = node.Links[i];
+    if (!link || typeof link !== "object" || Array.isArray(link) || link instanceof Uint8Array || link["/"] && link["/"] === link.bytes) {
+      throw new TypeError("Invalid DAG-PB form (bad link)");
+    }
+    if (!hasOnlyProperties(link, pbLinkProperties)) {
+      throw new TypeError("Invalid DAG-PB form (extraneous properties on link)");
+    }
+    if (link.Hash === void 0) {
+      throw new TypeError("Invalid DAG-PB form (link must have a Hash)");
+    }
+    if (link.Hash == null || !link.Hash["/"] || link.Hash["/"] !== link.Hash.bytes) {
+      throw new TypeError("Invalid DAG-PB form (link Hash must be a CID)");
+    }
+    if (link.Name !== void 0 && typeof link.Name !== "string") {
+      throw new TypeError("Invalid DAG-PB form (link Name must be a string)");
+    }
+    if (link.Tsize !== void 0) {
+      if (typeof link.Tsize !== "number" || link.Tsize % 1 !== 0) {
+        throw new TypeError("Invalid DAG-PB form (link Tsize must be an integer)");
+      }
+      if (link.Tsize < 0) {
+        throw new TypeError("Invalid DAG-PB form (link Tsize cannot be negative)");
+      }
+    }
+    if (i > 0 && linkComparator(link, node.Links[i - 1]) === -1) {
+      throw new TypeError("Invalid DAG-PB form (links must be sorted by Name bytes)");
+    }
+  }
+}
+function createNode(data, links = []) {
+  return prepare({ Data: data, Links: links });
+}
+function createLink(name4, size, cid) {
+  return asLink({ Hash: cid, Name: name4, Tsize: size });
+}
+
+// node_modules/@ipld/dag-pb/src/index.js
+var name = "dag-pb";
+var code2 = 112;
+function encode7(node) {
+  validate(node);
+  const pbn = {};
+  if (node.Links) {
+    pbn.Links = node.Links.map((l) => {
+      const link = {};
+      if (l.Hash) {
+        link.Hash = l.Hash.bytes;
+      }
+      if (l.Name !== void 0) {
+        link.Name = l.Name;
+      }
+      if (l.Tsize !== void 0) {
+        link.Tsize = l.Tsize;
+      }
+      return link;
+    });
+  }
+  if (node.Data) {
+    pbn.Data = node.Data;
+  }
+  return encodeNode(pbn);
+}
+function decode11(bytes) {
+  const pbn = decodeNode(bytes);
+  const node = {};
+  if (pbn.Data) {
+    node.Data = pbn.Data;
+  }
+  if (pbn.Links) {
+    node.Links = pbn.Links.map((l) => {
+      const link = {};
+      try {
+        link.Hash = CID2.decode(l.Hash);
+      } catch (e) {
+      }
+      if (!link.Hash) {
+        throw new Error("Invalid Hash field found in link, expected CID");
+      }
+      if (l.Name !== void 0) {
+        link.Name = l.Name;
+      }
+      if (l.Tsize !== void 0) {
+        link.Tsize = l.Tsize;
+      }
+      return link;
+    });
+  }
+  return node;
+}
+
+// node_modules/cborg/lib/json/encode.js
+var JSONEncoder = class extends Array {
+  constructor() {
+    super();
+    this.inRecursive = [];
+  }
+  /**
+   * @param {Bl} buf
+   */
+  prefix(buf2) {
+    const recurs = this.inRecursive[this.inRecursive.length - 1];
+    if (recurs) {
+      if (recurs.type === Type.array) {
+        recurs.elements++;
+        if (recurs.elements !== 1) {
+          buf2.push([44]);
+        }
+      }
+      if (recurs.type === Type.map) {
+        recurs.elements++;
+        if (recurs.elements !== 1) {
+          if (recurs.elements % 2 === 1) {
+            buf2.push([44]);
+          } else {
+            buf2.push([58]);
+          }
+        }
+      }
+    }
+  }
+  /**
+   * @param {Bl} buf
+   * @param {Token} token
+   */
+  [Type.uint.major](buf2, token) {
+    this.prefix(buf2);
+    const is2 = String(token.value);
+    const isa = [];
+    for (let i = 0; i < is2.length; i++) {
+      isa[i] = is2.charCodeAt(i);
+    }
+    buf2.push(isa);
+  }
+  /**
+   * @param {Bl} buf
+   * @param {Token} token
+   */
+  [Type.negint.major](buf2, token) {
+    this[Type.uint.major](buf2, token);
+  }
+  /**
+   * @param {Bl} _buf
+   * @param {Token} _token
+   */
+  [Type.bytes.major](_buf, _token) {
+    throw new Error(`${encodeErrPrefix} unsupported type: Uint8Array`);
+  }
+  /**
+   * @param {Bl} buf
+   * @param {Token} token
+   */
+  [Type.string.major](buf2, token) {
+    this.prefix(buf2);
+    const byts = fromString(JSON.stringify(token.value));
+    buf2.push(byts.length > 32 ? asU8A(byts) : byts);
+  }
+  /**
+   * @param {Bl} buf
+   * @param {Token} _token
+   */
+  [Type.array.major](buf2, _token) {
+    this.prefix(buf2);
+    this.inRecursive.push({ type: Type.array, elements: 0 });
+    buf2.push([91]);
+  }
+  /**
+   * @param {Bl} buf
+   * @param {Token} _token
+   */
+  [Type.map.major](buf2, _token) {
+    this.prefix(buf2);
+    this.inRecursive.push({ type: Type.map, elements: 0 });
+    buf2.push([123]);
+  }
+  /**
+   * @param {Bl} _buf
+   * @param {Token} _token
+   */
+  [Type.tag.major](_buf, _token) {
+  }
+  /**
+   * @param {Bl} buf
+   * @param {Token} token
+   */
+  [Type.float.major](buf2, token) {
+    if (token.type.name === "break") {
+      const recurs = this.inRecursive.pop();
+      if (recurs) {
+        if (recurs.type === Type.array) {
+          buf2.push([93]);
+        } else if (recurs.type === Type.map) {
+          buf2.push([125]);
+        } else {
+          throw new Error("Unexpected recursive type; this should not happen!");
+        }
+        return;
+      }
+      throw new Error("Unexpected break; this should not happen!");
+    }
+    if (token.value === void 0) {
+      throw new Error(`${encodeErrPrefix} unsupported type: undefined`);
+    }
+    this.prefix(buf2);
+    if (token.type.name === "true") {
+      buf2.push([116, 114, 117, 101]);
+      return;
+    } else if (token.type.name === "false") {
+      buf2.push([102, 97, 108, 115, 101]);
+      return;
+    } else if (token.type.name === "null") {
+      buf2.push([110, 117, 108, 108]);
+      return;
+    }
+    const is2 = String(token.value);
+    const isa = [];
+    let dp = false;
+    for (let i = 0; i < is2.length; i++) {
+      isa[i] = is2.charCodeAt(i);
+      if (!dp && (isa[i] === 46 || isa[i] === 101 || isa[i] === 69)) {
+        dp = true;
+      }
+    }
+    if (!dp) {
+      isa.push(46);
+      isa.push(48);
+    }
+    buf2.push(isa);
+  }
+};
+
+// node_modules/cborg/lib/json/decode.js
+var Tokenizer = class {
+  /**
+   * @param {Uint8Array} data
+   * @param {DecodeOptions} options
+   */
+  constructor(data, options = {}) {
+    this._pos = 0;
+    this.data = data;
+    this.options = options;
+    this.modeStack = ["value"];
+    this.lastToken = "";
+  }
+  pos() {
+    return this._pos;
+  }
+  /**
+   * @returns {boolean}
+   */
+  done() {
+    return this._pos >= this.data.length;
+  }
+  /**
+   * @returns {number}
+   */
+  ch() {
+    return this.data[this._pos];
+  }
+  /**
+   * @returns {string}
+   */
+  currentMode() {
+    return this.modeStack[this.modeStack.length - 1];
+  }
+  skipWhitespace() {
+    let c = this.ch();
+    while (c === 32 || c === 9 || c === 13 || c === 10) {
+      c = this.data[++this._pos];
+    }
+  }
+  /**
+   * @param {number[]} str
+   */
+  expect(str) {
+    if (this.data.length - this._pos < str.length) {
+      throw new Error(`${decodeErrPrefix} unexpected end of input at position ${this._pos}`);
+    }
+    for (let i = 0; i < str.length; i++) {
+      if (this.data[this._pos++] !== str[i]) {
+        throw new Error(`${decodeErrPrefix} unexpected token at position ${this._pos}, expected to find '${String.fromCharCode(...str)}'`);
+      }
+    }
+  }
+  parseNumber() {
+    const startPos = this._pos;
+    let negative = false;
+    let float = false;
+    const swallow = (chars) => {
+      while (!this.done()) {
+        const ch = this.ch();
+        if (chars.includes(ch)) {
+          this._pos++;
+        } else {
+          break;
+        }
+      }
+    };
+    if (this.ch() === 45) {
+      negative = true;
+      this._pos++;
+    }
+    if (this.ch() === 48) {
+      this._pos++;
+      if (this.ch() === 46) {
+        this._pos++;
+        float = true;
+      } else {
+        return new Token(Type.uint, 0, this._pos - startPos);
+      }
+    }
+    swallow([48, 49, 50, 51, 52, 53, 54, 55, 56, 57]);
+    if (negative && this._pos === startPos + 1) {
+      throw new Error(`${decodeErrPrefix} unexpected token at position ${this._pos}`);
+    }
+    if (!this.done() && this.ch() === 46) {
+      if (float) {
+        throw new Error(`${decodeErrPrefix} unexpected token at position ${this._pos}`);
+      }
+      float = true;
+      this._pos++;
+      swallow([48, 49, 50, 51, 52, 53, 54, 55, 56, 57]);
+    }
+    if (!this.done() && (this.ch() === 101 || this.ch() === 69)) {
+      float = true;
+      this._pos++;
+      if (!this.done() && (this.ch() === 43 || this.ch() === 45)) {
+        this._pos++;
+      }
+      swallow([48, 49, 50, 51, 52, 53, 54, 55, 56, 57]);
+    }
+    const numStr = String.fromCharCode.apply(null, this.data.subarray(startPos, this._pos));
+    const num = parseFloat(numStr);
+    if (float) {
+      return new Token(Type.float, num, this._pos - startPos);
+    }
+    if (this.options.allowBigInt !== true || Number.isSafeInteger(num)) {
+      return new Token(num >= 0 ? Type.uint : Type.negint, num, this._pos - startPos);
+    }
+    return new Token(num >= 0 ? Type.uint : Type.negint, BigInt(numStr), this._pos - startPos);
+  }
+  /**
+   * @returns {Token}
+   */
+  parseString() {
+    if (this.ch() !== 34) {
+      throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}; this shouldn't happen`);
+    }
+    this._pos++;
+    for (let i = this._pos, l = 0; i < this.data.length && l < 65536; i++, l++) {
+      const ch = this.data[i];
+      if (ch === 92 || ch < 32 || ch >= 128) {
+        break;
+      }
+      if (ch === 34) {
+        const str = String.fromCharCode.apply(null, this.data.subarray(this._pos, i));
+        this._pos = i + 1;
+        return new Token(Type.string, str, l);
+      }
+    }
+    const startPos = this._pos;
+    const chars = [];
+    const readu4 = () => {
+      if (this._pos + 4 >= this.data.length) {
+        throw new Error(`${decodeErrPrefix} unexpected end of unicode escape sequence at position ${this._pos}`);
+      }
+      let u4 = 0;
+      for (let i = 0; i < 4; i++) {
+        let ch = this.ch();
+        if (ch >= 48 && ch <= 57) {
+          ch -= 48;
+        } else if (ch >= 97 && ch <= 102) {
+          ch = ch - 97 + 10;
+        } else if (ch >= 65 && ch <= 70) {
+          ch = ch - 65 + 10;
+        } else {
+          throw new Error(`${decodeErrPrefix} unexpected unicode escape character at position ${this._pos}`);
+        }
+        u4 = u4 * 16 + ch;
+        this._pos++;
+      }
+      return u4;
+    };
+    const readUtf8Char = () => {
+      const firstByte = this.ch();
+      let codePoint = null;
+      let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
+      if (this._pos + bytesPerSequence > this.data.length) {
+        throw new Error(`${decodeErrPrefix} unexpected unicode sequence at position ${this._pos}`);
+      }
+      let secondByte, thirdByte, fourthByte, tempCodePoint;
+      switch (bytesPerSequence) {
+        case 1:
+          if (firstByte < 128) {
+            codePoint = firstByte;
+          }
+          break;
+        case 2:
+          secondByte = this.data[this._pos + 1];
+          if ((secondByte & 192) === 128) {
+            tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
+            if (tempCodePoint > 127) {
+              codePoint = tempCodePoint;
+            }
+          }
+          break;
+        case 3:
+          secondByte = this.data[this._pos + 1];
+          thirdByte = this.data[this._pos + 2];
+          if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
+            tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
+            if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
+              codePoint = tempCodePoint;
+            }
+          }
+          break;
+        case 4:
+          secondByte = this.data[this._pos + 1];
+          thirdByte = this.data[this._pos + 2];
+          fourthByte = this.data[this._pos + 3];
+          if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
+            tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
+            if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
+              codePoint = tempCodePoint;
+            }
+          }
+      }
+      if (codePoint === null) {
+        codePoint = 65533;
+        bytesPerSequence = 1;
+      } else if (codePoint > 65535) {
+        codePoint -= 65536;
+        chars.push(codePoint >>> 10 & 1023 | 55296);
+        codePoint = 56320 | codePoint & 1023;
+      }
+      chars.push(codePoint);
+      this._pos += bytesPerSequence;
+    };
+    while (!this.done()) {
+      const ch = this.ch();
+      let ch1;
+      switch (ch) {
+        case 92:
+          this._pos++;
+          if (this.done()) {
+            throw new Error(`${decodeErrPrefix} unexpected string termination at position ${this._pos}`);
+          }
+          ch1 = this.ch();
+          this._pos++;
+          switch (ch1) {
+            case 34:
+            case 39:
+            case 92:
+            case 47:
+              chars.push(ch1);
+              break;
+            case 98:
+              chars.push(8);
+              break;
+            case 116:
+              chars.push(9);
+              break;
+            case 110:
+              chars.push(10);
+              break;
+            case 102:
+              chars.push(12);
+              break;
+            case 114:
+              chars.push(13);
+              break;
+            case 117:
+              chars.push(readu4());
+              break;
+            default:
+              throw new Error(`${decodeErrPrefix} unexpected string escape character at position ${this._pos}`);
+          }
+          break;
+        case 34:
+          this._pos++;
+          return new Token(Type.string, decodeCodePointsArray(chars), this._pos - startPos);
+        default:
+          if (ch < 32) {
+            throw new Error(`${decodeErrPrefix} invalid control character at position ${this._pos}`);
+          } else if (ch < 128) {
+            chars.push(ch);
+            this._pos++;
+          } else {
+            readUtf8Char();
+          }
+      }
+    }
+    throw new Error(`${decodeErrPrefix} unexpected end of string at position ${this._pos}`);
+  }
+  /**
+   * @returns {Token}
+   */
+  parseValue() {
+    switch (this.ch()) {
+      case 123:
+        this.modeStack.push("obj-start");
+        this._pos++;
+        return new Token(Type.map, Infinity, 1);
+      case 91:
+        this.modeStack.push("array-start");
+        this._pos++;
+        return new Token(Type.array, Infinity, 1);
+      case 34: {
+        return this.parseString();
+      }
+      case 110:
+        this.expect([110, 117, 108, 108]);
+        return new Token(Type.null, null, 4);
+      case 102:
+        this.expect([102, 97, 108, 115, 101]);
+        return new Token(Type.false, false, 5);
+      case 116:
+        this.expect([116, 114, 117, 101]);
+        return new Token(Type.true, true, 4);
+      case 45:
+      case 48:
+      case 49:
+      case 50:
+      case 51:
+      case 52:
+      case 53:
+      case 54:
+      case 55:
+      case 56:
+      case 57:
+        return this.parseNumber();
+      default:
+        throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}`);
+    }
+  }
+  /**
+   * @returns {Token}
+   */
+  next() {
+    this.skipWhitespace();
+    switch (this.currentMode()) {
+      case "value":
+        this.modeStack.pop();
+        return this.parseValue();
+      case "array-value": {
+        this.modeStack.pop();
+        if (this.ch() === 93) {
+          this._pos++;
+          this.skipWhitespace();
+          return new Token(Type.break, void 0, 1);
+        }
+        if (this.ch() !== 44) {
+          throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}, was expecting array delimiter but found '${String.fromCharCode(this.ch())}'`);
+        }
+        this._pos++;
+        this.modeStack.push("array-value");
+        this.skipWhitespace();
+        return this.parseValue();
+      }
+      case "array-start": {
+        this.modeStack.pop();
+        if (this.ch() === 93) {
+          this._pos++;
+          this.skipWhitespace();
+          return new Token(Type.break, void 0, 1);
+        }
+        this.modeStack.push("array-value");
+        this.skipWhitespace();
+        return this.parseValue();
+      }
+      case "obj-key":
+        if (this.ch() === 125) {
+          this.modeStack.pop();
+          this._pos++;
+          this.skipWhitespace();
+          return new Token(Type.break, void 0, 1);
+        }
+        if (this.ch() !== 44) {
+          throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}, was expecting object delimiter but found '${String.fromCharCode(this.ch())}'`);
+        }
+        this._pos++;
+        this.skipWhitespace();
+      case "obj-start": {
+        this.modeStack.pop();
+        if (this.ch() === 125) {
+          this._pos++;
+          this.skipWhitespace();
+          return new Token(Type.break, void 0, 1);
+        }
+        const token = this.parseString();
+        this.skipWhitespace();
+        if (this.ch() !== 58) {
+          throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}, was expecting key/value delimiter ':' but found '${String.fromCharCode(this.ch())}'`);
+        }
+        this._pos++;
+        this.modeStack.push("obj-value");
+        return token;
+      }
+      case "obj-value": {
+        this.modeStack.pop();
+        this.modeStack.push("obj-key");
+        this.skipWhitespace();
+        return this.parseValue();
+      }
+      default:
+        throw new Error(`${decodeErrPrefix} unexpected parse state at position ${this._pos}; this shouldn't happen`);
+    }
+  }
+};
+function decode12(data, options) {
+  options = Object.assign({ tokenizer: new Tokenizer(data, options) }, options);
+  return decode(data, options);
+}
+
+// node_modules/multiformats/src/bases/base64.js
+var base64_exports = {};
+__export(base64_exports, {
+  base64: () => base64,
+  base64pad: () => base64pad,
+  base64url: () => base64url,
+  base64urlpad: () => base64urlpad
+});
+var base64 = rfc46482({
+  prefix: "m",
+  name: "base64",
+  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",
+  bitsPerChar: 6
+});
+var base64pad = rfc46482({
+  prefix: "M",
+  name: "base64pad",
+  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
+  bitsPerChar: 6
+});
+var base64url = rfc46482({
+  prefix: "u",
+  name: "base64url",
+  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_",
+  bitsPerChar: 6
+});
+var base64urlpad = rfc46482({
+  prefix: "U",
+  name: "base64urlpad",
+  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_=",
+  bitsPerChar: 6
+});
+
+// node_modules/multiformats/src/codecs/raw.js
+var raw_exports = {};
+__export(raw_exports, {
+  code: () => code3,
+  decode: () => decode13,
+  encode: () => encode9,
+  name: () => name2
+});
+var name2 = "raw";
+var code3 = 85;
+var encode9 = (node) => coerce2(node);
+var decode13 = (data) => coerce2(data);
+
+// node_modules/@helia/car/dist/src/utils/dag-walkers.js
+var dagPbWalker = {
+  codec: code2,
+  async *walk(block) {
+    const node = decode11(block);
+    yield* node.Links.map((l) => l.Hash);
+  }
+};
+var rawWalker = {
+  codec: code3,
+  async *walk() {
+  }
+};
+var CID_TAG2 = 42;
+var cborWalker = {
+  codec: 113,
+  async *walk(block) {
+    const cids = [];
+    const tags = [];
+    tags[CID_TAG2] = (bytes) => {
+      if (bytes[0] !== 0) {
+        throw new Error("Invalid CID for CBOR tag 42; expected leading 0x00");
+      }
+      const cid = CID2.decode(bytes.subarray(1));
+      cids.push(cid);
+      return cid;
+    };
+    decode(block, {
+      tags
+    });
+    yield* cids;
+  }
+};
+var DagJsonTokenizer = class extends Tokenizer {
+  tokenBuffer;
+  constructor(data, options) {
+    super(data, options);
+    this.tokenBuffer = [];
+  }
+  done() {
+    return this.tokenBuffer.length === 0 && super.done();
+  }
+  _next() {
+    if (this.tokenBuffer.length > 0) {
+      return this.tokenBuffer.pop();
+    }
+    return super.next();
+  }
+  /**
+   * Implements rules outlined in https://github.com/ipld/specs/pull/356
+   */
+  next() {
+    const token = this._next();
+    if (token.type === Type.map) {
+      const keyToken = this._next();
+      if (keyToken.type === Type.string && keyToken.value === "/") {
+        const valueToken = this._next();
+        if (valueToken.type === Type.string) {
+          const breakToken = this._next();
+          if (breakToken.type !== Type.break) {
+            throw new Error("Invalid encoded CID form");
+          }
+          this.tokenBuffer.push(valueToken);
+          return new Token(Type.tag, 42, 0);
+        }
+        if (valueToken.type === Type.map) {
+          const innerKeyToken = this._next();
+          if (innerKeyToken.type === Type.string && innerKeyToken.value === "bytes") {
+            const innerValueToken = this._next();
+            if (innerValueToken.type === Type.string) {
+              for (let i = 0; i < 2; i++) {
+                const breakToken = this._next();
+                if (breakToken.type !== Type.break) {
+                  throw new Error("Invalid encoded Bytes form");
+                }
+              }
+              const bytes = base64.decode(`m${innerValueToken.value}`);
+              return new Token(Type.bytes, bytes, innerValueToken.value.length);
+            }
+            this.tokenBuffer.push(innerValueToken);
+          }
+          this.tokenBuffer.push(innerKeyToken);
+        }
+        this.tokenBuffer.push(valueToken);
+      }
+      this.tokenBuffer.push(keyToken);
+    }
+    return token;
+  }
+};
+var jsonWalker = {
+  codec: 297,
+  async *walk(block) {
+    const cids = [];
+    const tags = [];
+    tags[CID_TAG2] = (string2) => {
+      const cid = CID2.parse(string2);
+      cids.push(cid);
+      return cid;
+    };
+    decode12(block, {
+      tags,
+      tokenizer: new DagJsonTokenizer(block, {
+        tags,
+        allowIndefinite: true,
+        allowUndefined: true,
+        allowNaN: true,
+        allowInfinity: true,
+        allowBigInt: true,
+        strict: false,
+        rejectDuplicateMapKeys: false
+      })
+    });
+    yield* cids;
+  }
+};
+
+// node_modules/@helia/car/dist/src/index.js
+var DEFAULT_DAG_WALKERS = [
+  rawWalker,
+  dagPbWalker,
+  cborWalker,
+  jsonWalker
+];
+var DAG_WALK_QUEUE_CONCURRENCY = 1;
+var DefaultCar = class {
+  components;
+  dagWalkers;
+  constructor(components, init) {
+    this.components = components;
+    this.dagWalkers = {};
+    [...DEFAULT_DAG_WALKERS, ...init.dagWalkers ?? []].forEach((dagWalker) => {
+      this.dagWalkers[dagWalker.codec] = dagWalker;
+    });
+  }
+  async import(reader, options) {
+    await src_default(this.components.blockstore.putMany(src_default3(reader.blocks(), ({ cid, bytes }) => ({ cid, block: bytes })), options));
+  }
+  async export(root, writer, options) {
+    const deferred = pDefer();
+    const roots = Array.isArray(root) ? root : [root];
+    const queue = new dist_default({
+      concurrency: DAG_WALK_QUEUE_CONCURRENCY
+    });
+    queue.on("idle", () => {
+      deferred.resolve();
+    });
+    queue.on("error", (err) => {
+      deferred.resolve(err);
+    });
+    for (const root2 of roots) {
+      void queue.add(async () => {
+        await this.#walkDag(root2, queue, async (cid, bytes) => {
+          await writer.put({ cid, bytes });
+        }, options);
+      });
+    }
+    try {
+      await deferred.promise;
+    } finally {
+      await writer.close();
+    }
+  }
+  /**
+   * Walk the DAG behind the passed CID, ensure all blocks are present in the blockstore
+   * and update the pin count for them
+   */
+  async #walkDag(cid, queue, withBlock, options) {
+    const dagWalker = this.dagWalkers[cid.code];
+    if (dagWalker == null) {
+      throw new Error(`No dag walker found for cid codec ${cid.code}`);
+    }
+    const block = await this.components.blockstore.get(cid, options);
+    await withBlock(cid, block);
+    for await (const cid2 of dagWalker.walk(block)) {
+      void queue.add(async () => {
+        await this.#walkDag(cid2, queue, withBlock, options);
+      });
+    }
+  }
+};
+function car(helia, init = {}) {
+  return new DefaultCar(helia, init);
+}
+
+// node_modules/ipfs-unixfs-importer/dist/src/index.js
+var import_err_code4 = __toESM(require_err_code(), 1);
+
+// node_modules/it-first/dist/src/index.js
+function isAsyncIterable3(thing) {
+  return thing[Symbol.asyncIterator] != null;
+}
+function first(source) {
+  if (isAsyncIterable3(source)) {
+    return (async () => {
+      for await (const entry of source) {
+        return entry;
+      }
+      return void 0;
+    })();
+  }
+  for (const entry of source) {
+    return entry;
+  }
+  return void 0;
+}
+var src_default4 = first;
+
+// node_modules/it-batch/dist/src/index.js
+function isAsyncIterable4(thing) {
+  return thing[Symbol.asyncIterator] != null;
+}
+function batch(source, size = 1) {
+  size = Number(size);
+  if (isAsyncIterable4(source)) {
+    return async function* () {
+      let things = [];
+      if (size < 1) {
+        size = 1;
+      }
+      if (size !== Math.round(size)) {
+        throw new Error("Batch size must be an integer");
+      }
+      for await (const thing of source) {
+        things.push(thing);
+        while (things.length >= size) {
+          yield things.slice(0, size);
+          things = things.slice(size);
+        }
+      }
+      while (things.length > 0) {
+        yield things.slice(0, size);
+        things = things.slice(size);
+      }
+    }();
+  }
+  return function* () {
+    let things = [];
+    if (size < 1) {
+      size = 1;
+    }
+    if (size !== Math.round(size)) {
+      throw new Error("Batch size must be an integer");
+    }
+    for (const thing of source) {
+      things.push(thing);
+      while (things.length >= size) {
+        yield things.slice(0, size);
+        things = things.slice(size);
+      }
+    }
+    while (things.length > 0) {
+      yield things.slice(0, size);
+      things = things.slice(size);
+    }
+  }();
+}
+var src_default5 = batch;
+
+// node_modules/it-parallel-batch/dist/src/index.js
+async function* parallelBatch(source, size = 1) {
+  for await (const tasks of src_default5(source, size)) {
+    const things = tasks.map(async (p) => {
+      return p().then((value) => ({ ok: true, value }), (err) => ({ ok: false, err }));
+    });
+    for (let i = 0; i < things.length; i++) {
+      const result = await things[i];
+      if (result.ok) {
+        yield result.value;
+      } else {
+        throw result.err;
+      }
+    }
+  }
+}
+
+// node_modules/uint8arrays/dist/src/util/as-uint8array.js
+function asUint8Array(buf2) {
+  if (globalThis.Buffer != null) {
+    return new Uint8Array(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+  }
+  return buf2;
+}
+
+// node_modules/uint8arrays/dist/src/alloc.js
+function alloc2(size = 0) {
+  var _a;
+  if (((_a = globalThis.Buffer) == null ? void 0 : _a.alloc) != null) {
+    return asUint8Array(globalThis.Buffer.alloc(size));
+  }
+  return new Uint8Array(size);
+}
+function allocUnsafe(size = 0) {
+  var _a;
+  if (((_a = globalThis.Buffer) == null ? void 0 : _a.allocUnsafe) != null) {
+    return asUint8Array(globalThis.Buffer.allocUnsafe(size));
+  }
+  return new Uint8Array(size);
+}
+
+// node_modules/uint8arrays/dist/src/concat.js
+function concat2(arrays, length4) {
+  if (length4 == null) {
+    length4 = arrays.reduce((acc, curr) => acc + curr.length, 0);
+  }
+  const output = allocUnsafe(length4);
+  let offset = 0;
+  for (const arr of arrays) {
+    output.set(arr, offset);
+    offset += arr.length;
+  }
+  return asUint8Array(output);
+}
+
+// node_modules/uint8arrays/dist/src/equals.js
+function equals5(a, b) {
+  if (a === b) {
+    return true;
+  }
+  if (a.byteLength !== b.byteLength) {
+    return false;
+  }
+  for (let i = 0; i < a.byteLength; i++) {
+    if (a[i] !== b[i]) {
+      return false;
+    }
+  }
+  return true;
+}
+
+// node_modules/uint8arraylist/dist/src/index.js
+var symbol = Symbol.for("@achingbrain/uint8arraylist");
+function findBufAndOffset(bufs, index) {
+  if (index == null || index < 0) {
+    throw new RangeError("index is out of bounds");
+  }
+  let offset = 0;
+  for (const buf2 of bufs) {
+    const bufEnd = offset + buf2.byteLength;
+    if (index < bufEnd) {
+      return {
+        buf: buf2,
+        index: index - offset
+      };
+    }
+    offset = bufEnd;
+  }
+  throw new RangeError("index is out of bounds");
+}
+function isUint8ArrayList(value) {
+  return Boolean(value == null ? void 0 : value[symbol]);
+}
+var Uint8ArrayList = class _Uint8ArrayList {
+  bufs;
+  length;
+  [symbol] = true;
+  constructor(...data) {
+    this.bufs = [];
+    this.length = 0;
+    if (data.length > 0) {
+      this.appendAll(data);
+    }
+  }
+  *[Symbol.iterator]() {
+    yield* this.bufs;
+  }
+  get byteLength() {
+    return this.length;
+  }
+  /**
+   * Add one or more `bufs` to the end of this Uint8ArrayList
+   */
+  append(...bufs) {
+    this.appendAll(bufs);
+  }
+  /**
+   * Add all `bufs` to the end of this Uint8ArrayList
+   */
+  appendAll(bufs) {
+    let length4 = 0;
+    for (const buf2 of bufs) {
+      if (buf2 instanceof Uint8Array) {
+        length4 += buf2.byteLength;
+        this.bufs.push(buf2);
+      } else if (isUint8ArrayList(buf2)) {
+        length4 += buf2.byteLength;
+        this.bufs.push(...buf2.bufs);
+      } else {
+        throw new Error("Could not append value, must be an Uint8Array or a Uint8ArrayList");
+      }
+    }
+    this.length += length4;
+  }
+  /**
+   * Add one or more `bufs` to the start of this Uint8ArrayList
+   */
+  prepend(...bufs) {
+    this.prependAll(bufs);
+  }
+  /**
+   * Add all `bufs` to the start of this Uint8ArrayList
+   */
+  prependAll(bufs) {
+    let length4 = 0;
+    for (const buf2 of bufs.reverse()) {
+      if (buf2 instanceof Uint8Array) {
+        length4 += buf2.byteLength;
+        this.bufs.unshift(buf2);
+      } else if (isUint8ArrayList(buf2)) {
+        length4 += buf2.byteLength;
+        this.bufs.unshift(...buf2.bufs);
+      } else {
+        throw new Error("Could not prepend value, must be an Uint8Array or a Uint8ArrayList");
+      }
+    }
+    this.length += length4;
+  }
+  /**
+   * Read the value at `index`
+   */
+  get(index) {
+    const res = findBufAndOffset(this.bufs, index);
+    return res.buf[res.index];
+  }
+  /**
+   * Set the value at `index` to `value`
+   */
+  set(index, value) {
+    const res = findBufAndOffset(this.bufs, index);
+    res.buf[res.index] = value;
+  }
+  /**
+   * Copy bytes from `buf` to the index specified by `offset`
+   */
+  write(buf2, offset = 0) {
+    if (buf2 instanceof Uint8Array) {
+      for (let i = 0; i < buf2.length; i++) {
+        this.set(offset + i, buf2[i]);
+      }
+    } else if (isUint8ArrayList(buf2)) {
+      for (let i = 0; i < buf2.length; i++) {
+        this.set(offset + i, buf2.get(i));
+      }
+    } else {
+      throw new Error("Could not write value, must be an Uint8Array or a Uint8ArrayList");
+    }
+  }
+  /**
+   * Remove bytes from the front of the pool
+   */
+  consume(bytes) {
+    bytes = Math.trunc(bytes);
+    if (Number.isNaN(bytes) || bytes <= 0) {
+      return;
+    }
+    if (bytes === this.byteLength) {
+      this.bufs = [];
+      this.length = 0;
+      return;
+    }
+    while (this.bufs.length > 0) {
+      if (bytes >= this.bufs[0].byteLength) {
+        bytes -= this.bufs[0].byteLength;
+        this.length -= this.bufs[0].byteLength;
+        this.bufs.shift();
+      } else {
+        this.bufs[0] = this.bufs[0].subarray(bytes);
+        this.length -= bytes;
+        break;
+      }
+    }
+  }
+  /**
+   * Extracts a section of an array and returns a new array.
+   *
+   * This is a copy operation as it is with Uint8Arrays and Arrays
+   * - note this is different to the behaviour of Node Buffers.
+   */
+  slice(beginInclusive, endExclusive) {
+    const { bufs, length: length4 } = this._subList(beginInclusive, endExclusive);
+    return concat2(bufs, length4);
+  }
+  /**
+   * Returns a alloc from the given start and end element index.
+   *
+   * In the best case where the data extracted comes from a single Uint8Array
+   * internally this is a no-copy operation otherwise it is a copy operation.
+   */
+  subarray(beginInclusive, endExclusive) {
+    const { bufs, length: length4 } = this._subList(beginInclusive, endExclusive);
+    if (bufs.length === 1) {
+      return bufs[0];
+    }
+    return concat2(bufs, length4);
+  }
+  /**
+   * Returns a allocList from the given start and end element index.
+   *
+   * This is a no-copy operation.
+   */
+  sublist(beginInclusive, endExclusive) {
+    const { bufs, length: length4 } = this._subList(beginInclusive, endExclusive);
+    const list = new _Uint8ArrayList();
+    list.length = length4;
+    list.bufs = [...bufs];
+    return list;
+  }
+  _subList(beginInclusive, endExclusive) {
+    beginInclusive = beginInclusive ?? 0;
+    endExclusive = endExclusive ?? this.length;
+    if (beginInclusive < 0) {
+      beginInclusive = this.length + beginInclusive;
+    }
+    if (endExclusive < 0) {
+      endExclusive = this.length + endExclusive;
+    }
+    if (beginInclusive < 0 || endExclusive > this.length) {
+      throw new RangeError("index is out of bounds");
+    }
+    if (beginInclusive === endExclusive) {
+      return { bufs: [], length: 0 };
+    }
+    if (beginInclusive === 0 && endExclusive === this.length) {
+      return { bufs: this.bufs, length: this.length };
+    }
+    const bufs = [];
+    let offset = 0;
+    for (let i = 0; i < this.bufs.length; i++) {
+      const buf2 = this.bufs[i];
+      const bufStart = offset;
+      const bufEnd = bufStart + buf2.byteLength;
+      offset = bufEnd;
+      if (beginInclusive >= bufEnd) {
+        continue;
+      }
+      const sliceStartInBuf = beginInclusive >= bufStart && beginInclusive < bufEnd;
+      const sliceEndsInBuf = endExclusive > bufStart && endExclusive <= bufEnd;
+      if (sliceStartInBuf && sliceEndsInBuf) {
+        if (beginInclusive === bufStart && endExclusive === bufEnd) {
+          bufs.push(buf2);
+          break;
+        }
+        const start = beginInclusive - bufStart;
+        bufs.push(buf2.subarray(start, start + (endExclusive - beginInclusive)));
+        break;
+      }
+      if (sliceStartInBuf) {
+        if (beginInclusive === 0) {
+          bufs.push(buf2);
+          continue;
+        }
+        bufs.push(buf2.subarray(beginInclusive - bufStart));
+        continue;
+      }
+      if (sliceEndsInBuf) {
+        if (endExclusive === bufEnd) {
+          bufs.push(buf2);
+          break;
+        }
+        bufs.push(buf2.subarray(0, endExclusive - bufStart));
+        break;
+      }
+      bufs.push(buf2);
+    }
+    return { bufs, length: endExclusive - beginInclusive };
+  }
+  indexOf(search, offset = 0) {
+    if (!isUint8ArrayList(search) && !(search instanceof Uint8Array)) {
+      throw new TypeError('The "value" argument must be a Uint8ArrayList or Uint8Array');
+    }
+    const needle = search instanceof Uint8Array ? search : search.subarray();
+    offset = Number(offset ?? 0);
+    if (isNaN(offset)) {
+      offset = 0;
+    }
+    if (offset < 0) {
+      offset = this.length + offset;
+    }
+    if (offset < 0) {
+      offset = 0;
+    }
+    if (search.length === 0) {
+      return offset > this.length ? this.length : offset;
+    }
+    const M = needle.byteLength;
+    if (M === 0) {
+      throw new TypeError("search must be at least 1 byte long");
+    }
+    const radix = 256;
+    const rightmostPositions = new Int32Array(radix);
+    for (let c = 0; c < radix; c++) {
+      rightmostPositions[c] = -1;
+    }
+    for (let j = 0; j < M; j++) {
+      rightmostPositions[needle[j]] = j;
+    }
+    const right = rightmostPositions;
+    const lastIndex = this.byteLength - needle.byteLength;
+    const lastPatIndex = needle.byteLength - 1;
+    let skip;
+    for (let i = offset; i <= lastIndex; i += skip) {
+      skip = 0;
+      for (let j = lastPatIndex; j >= 0; j--) {
+        const char = this.get(i + j);
+        if (needle[j] !== char) {
+          skip = Math.max(1, j - right[char]);
+          break;
+        }
+      }
+      if (skip === 0) {
+        return i;
+      }
+    }
+    return -1;
+  }
+  getInt8(byteOffset) {
+    const buf2 = this.subarray(byteOffset, byteOffset + 1);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    return view.getInt8(0);
+  }
+  setInt8(byteOffset, value) {
+    const buf2 = allocUnsafe(1);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    view.setInt8(0, value);
+    this.write(buf2, byteOffset);
+  }
+  getInt16(byteOffset, littleEndian) {
+    const buf2 = this.subarray(byteOffset, byteOffset + 2);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    return view.getInt16(0, littleEndian);
+  }
+  setInt16(byteOffset, value, littleEndian) {
+    const buf2 = alloc2(2);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    view.setInt16(0, value, littleEndian);
+    this.write(buf2, byteOffset);
+  }
+  getInt32(byteOffset, littleEndian) {
+    const buf2 = this.subarray(byteOffset, byteOffset + 4);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    return view.getInt32(0, littleEndian);
+  }
+  setInt32(byteOffset, value, littleEndian) {
+    const buf2 = alloc2(4);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    view.setInt32(0, value, littleEndian);
+    this.write(buf2, byteOffset);
+  }
+  getBigInt64(byteOffset, littleEndian) {
+    const buf2 = this.subarray(byteOffset, byteOffset + 8);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    return view.getBigInt64(0, littleEndian);
+  }
+  setBigInt64(byteOffset, value, littleEndian) {
+    const buf2 = alloc2(8);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    view.setBigInt64(0, value, littleEndian);
+    this.write(buf2, byteOffset);
+  }
+  getUint8(byteOffset) {
+    const buf2 = this.subarray(byteOffset, byteOffset + 1);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    return view.getUint8(0);
+  }
+  setUint8(byteOffset, value) {
+    const buf2 = allocUnsafe(1);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    view.setUint8(0, value);
+    this.write(buf2, byteOffset);
+  }
+  getUint16(byteOffset, littleEndian) {
+    const buf2 = this.subarray(byteOffset, byteOffset + 2);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    return view.getUint16(0, littleEndian);
+  }
+  setUint16(byteOffset, value, littleEndian) {
+    const buf2 = alloc2(2);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    view.setUint16(0, value, littleEndian);
+    this.write(buf2, byteOffset);
+  }
+  getUint32(byteOffset, littleEndian) {
+    const buf2 = this.subarray(byteOffset, byteOffset + 4);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    return view.getUint32(0, littleEndian);
+  }
+  setUint32(byteOffset, value, littleEndian) {
+    const buf2 = alloc2(4);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    view.setUint32(0, value, littleEndian);
+    this.write(buf2, byteOffset);
+  }
+  getBigUint64(byteOffset, littleEndian) {
+    const buf2 = this.subarray(byteOffset, byteOffset + 8);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    return view.getBigUint64(0, littleEndian);
+  }
+  setBigUint64(byteOffset, value, littleEndian) {
+    const buf2 = alloc2(8);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    view.setBigUint64(0, value, littleEndian);
+    this.write(buf2, byteOffset);
+  }
+  getFloat32(byteOffset, littleEndian) {
+    const buf2 = this.subarray(byteOffset, byteOffset + 4);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    return view.getFloat32(0, littleEndian);
+  }
+  setFloat32(byteOffset, value, littleEndian) {
+    const buf2 = alloc2(4);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    view.setFloat32(0, value, littleEndian);
+    this.write(buf2, byteOffset);
+  }
+  getFloat64(byteOffset, littleEndian) {
+    const buf2 = this.subarray(byteOffset, byteOffset + 8);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    return view.getFloat64(0, littleEndian);
+  }
+  setFloat64(byteOffset, value, littleEndian) {
+    const buf2 = alloc2(8);
+    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
+    view.setFloat64(0, value, littleEndian);
+    this.write(buf2, byteOffset);
+  }
+  equals(other) {
+    if (other == null) {
+      return false;
+    }
+    if (!(other instanceof _Uint8ArrayList)) {
+      return false;
+    }
+    if (other.bufs.length !== this.bufs.length) {
+      return false;
+    }
+    for (let i = 0; i < this.bufs.length; i++) {
+      if (!equals5(this.bufs[i], other.bufs[i])) {
+        return false;
+      }
+    }
+    return true;
+  }
+  /**
+   * Create a Uint8ArrayList from a pre-existing list of Uint8Arrays.  Use this
+   * method if you know the total size of all the Uint8Arrays ahead of time.
+   */
+  static fromUint8Arrays(bufs, length4) {
+    const list = new _Uint8ArrayList();
+    list.bufs = bufs;
+    if (length4 == null) {
+      length4 = bufs.reduce((acc, curr) => acc + curr.byteLength, 0);
+    }
+    list.length = length4;
+    return list;
+  }
+};
+
+// node_modules/ipfs-unixfs-importer/dist/src/chunker/fixed-size.js
+var DEFAULT_CHUNK_SIZE = 262144;
+var fixedSize = (options = {}) => {
+  const chunkSize = options.chunkSize ?? DEFAULT_CHUNK_SIZE;
+  return async function* fixedSizeChunker(source) {
+    let list = new Uint8ArrayList();
+    let currentLength = 0;
+    let emitted = false;
+    for await (const buffer2 of source) {
+      list.append(buffer2);
+      currentLength += buffer2.length;
+      while (currentLength >= chunkSize) {
+        yield list.slice(0, chunkSize);
+        emitted = true;
+        if (chunkSize === list.length) {
+          list = new Uint8ArrayList();
+          currentLength = 0;
+        } else {
+          const newBl = new Uint8ArrayList();
+          newBl.append(list.sublist(chunkSize));
+          list = newBl;
+          currentLength -= chunkSize;
+        }
+      }
+    }
+    if (!emitted || currentLength > 0) {
+      yield list.subarray(0, currentLength);
+    }
+  };
+};
+
+// node_modules/ipfs-unixfs/dist/src/index.js
+var import_err_code = __toESM(require_err_code(), 1);
+
+// node_modules/protons-runtime/dist/src/utils/float.js
+var f32 = new Float32Array([-0]);
+var f8b = new Uint8Array(f32.buffer);
+function writeFloatLE(val, buf2, pos) {
+  f32[0] = val;
+  buf2[pos] = f8b[0];
+  buf2[pos + 1] = f8b[1];
+  buf2[pos + 2] = f8b[2];
+  buf2[pos + 3] = f8b[3];
+}
+function readFloatLE(buf2, pos) {
+  f8b[0] = buf2[pos];
+  f8b[1] = buf2[pos + 1];
+  f8b[2] = buf2[pos + 2];
+  f8b[3] = buf2[pos + 3];
+  return f32[0];
+}
+var f64 = new Float64Array([-0]);
+var d8b = new Uint8Array(f64.buffer);
+function writeDoubleLE(val, buf2, pos) {
+  f64[0] = val;
+  buf2[pos] = d8b[0];
+  buf2[pos + 1] = d8b[1];
+  buf2[pos + 2] = d8b[2];
+  buf2[pos + 3] = d8b[3];
+  buf2[pos + 4] = d8b[4];
+  buf2[pos + 5] = d8b[5];
+  buf2[pos + 6] = d8b[6];
+  buf2[pos + 7] = d8b[7];
+}
+function readDoubleLE(buf2, pos) {
+  d8b[0] = buf2[pos];
+  d8b[1] = buf2[pos + 1];
+  d8b[2] = buf2[pos + 2];
+  d8b[3] = buf2[pos + 3];
+  d8b[4] = buf2[pos + 4];
+  d8b[5] = buf2[pos + 5];
+  d8b[6] = buf2[pos + 6];
+  d8b[7] = buf2[pos + 7];
+  return f64[0];
+}
+
+// node_modules/protons-runtime/dist/src/utils/longbits.js
+var MAX_SAFE_NUMBER_INTEGER = BigInt(Number.MAX_SAFE_INTEGER);
+var MIN_SAFE_NUMBER_INTEGER = BigInt(Number.MIN_SAFE_INTEGER);
+var LongBits = class _LongBits {
+  lo;
+  hi;
+  constructor(lo, hi) {
+    this.lo = lo | 0;
+    this.hi = hi | 0;
+  }
+  /**
+   * Converts this long bits to a possibly unsafe JavaScript number
+   */
+  toNumber(unsigned = false) {
+    if (!unsigned && this.hi >>> 31 > 0) {
+      const lo = ~this.lo + 1 >>> 0;
+      let hi = ~this.hi >>> 0;
+      if (lo === 0) {
+        hi = hi + 1 >>> 0;
+      }
+      return -(lo + hi * 4294967296);
+    }
+    return this.lo + this.hi * 4294967296;
+  }
+  /**
+   * Converts this long bits to a bigint
+   */
+  toBigInt(unsigned = false) {
+    if (unsigned) {
+      return BigInt(this.lo >>> 0) + (BigInt(this.hi >>> 0) << 32n);
+    }
+    if (this.hi >>> 31 !== 0) {
+      const lo = ~this.lo + 1 >>> 0;
+      let hi = ~this.hi >>> 0;
+      if (lo === 0) {
+        hi = hi + 1 >>> 0;
+      }
+      return -(BigInt(lo) + (BigInt(hi) << 32n));
+    }
+    return BigInt(this.lo >>> 0) + (BigInt(this.hi >>> 0) << 32n);
+  }
+  /**
+   * Converts this long bits to a string
+   */
+  toString(unsigned = false) {
+    return this.toBigInt(unsigned).toString();
+  }
+  /**
+   * Zig-zag encodes this long bits
+   */
+  zzEncode() {
+    const mask = this.hi >> 31;
+    this.hi = ((this.hi << 1 | this.lo >>> 31) ^ mask) >>> 0;
+    this.lo = (this.lo << 1 ^ mask) >>> 0;
+    return this;
+  }
+  /**
+   * Zig-zag decodes this long bits
+   */
+  zzDecode() {
+    const mask = -(this.lo & 1);
+    this.lo = ((this.lo >>> 1 | this.hi << 31) ^ mask) >>> 0;
+    this.hi = (this.hi >>> 1 ^ mask) >>> 0;
+    return this;
+  }
+  /**
+   * Calculates the length of this longbits when encoded as a varint.
+   */
+  length() {
+    const part0 = this.lo;
+    const part1 = (this.lo >>> 28 | this.hi << 4) >>> 0;
+    const part2 = this.hi >>> 24;
+    return part2 === 0 ? part1 === 0 ? part0 < 16384 ? part0 < 128 ? 1 : 2 : part0 < 2097152 ? 3 : 4 : part1 < 16384 ? part1 < 128 ? 5 : 6 : part1 < 2097152 ? 7 : 8 : part2 < 128 ? 9 : 10;
+  }
+  /**
+   * Constructs new long bits from the specified number
+   */
+  static fromBigInt(value) {
+    if (value === 0n) {
+      return zero;
+    }
+    if (value < MAX_SAFE_NUMBER_INTEGER && value > MIN_SAFE_NUMBER_INTEGER) {
+      return this.fromNumber(Number(value));
+    }
+    const negative = value < 0n;
+    if (negative) {
+      value = -value;
+    }
+    let hi = value >> 32n;
+    let lo = value - (hi << 32n);
+    if (negative) {
+      hi = ~hi | 0n;
+      lo = ~lo | 0n;
+      if (++lo > TWO_32) {
+        lo = 0n;
+        if (++hi > TWO_32) {
+          hi = 0n;
+        }
+      }
+    }
+    return new _LongBits(Number(lo), Number(hi));
+  }
+  /**
+   * Constructs new long bits from the specified number
+   */
+  static fromNumber(value) {
+    if (value === 0) {
+      return zero;
+    }
+    const sign = value < 0;
+    if (sign) {
+      value = -value;
+    }
+    let lo = value >>> 0;
+    let hi = (value - lo) / 4294967296 >>> 0;
+    if (sign) {
+      hi = ~hi >>> 0;
+      lo = ~lo >>> 0;
+      if (++lo > 4294967295) {
+        lo = 0;
+        if (++hi > 4294967295) {
+          hi = 0;
+        }
+      }
+    }
+    return new _LongBits(lo, hi);
+  }
+  /**
+   * Constructs new long bits from a number, long or string
+   */
+  static from(value) {
+    if (typeof value === "number") {
+      return _LongBits.fromNumber(value);
+    }
+    if (typeof value === "bigint") {
+      return _LongBits.fromBigInt(value);
+    }
+    if (typeof value === "string") {
+      return _LongBits.fromBigInt(BigInt(value));
+    }
+    return value.low != null || value.high != null ? new _LongBits(value.low >>> 0, value.high >>> 0) : zero;
+  }
+};
+var zero = new LongBits(0, 0);
+zero.toBigInt = function() {
+  return 0n;
+};
+zero.zzEncode = zero.zzDecode = function() {
+  return this;
+};
+zero.length = function() {
+  return 1;
+};
+var TWO_32 = 4294967296n;
+
+// node_modules/protons-runtime/dist/src/utils/utf8.js
+function length3(string2) {
+  let len = 0;
+  let c = 0;
+  for (let i = 0; i < string2.length; ++i) {
+    c = string2.charCodeAt(i);
+    if (c < 128) {
+      len += 1;
+    } else if (c < 2048) {
+      len += 2;
+    } else if ((c & 64512) === 55296 && (string2.charCodeAt(i + 1) & 64512) === 56320) {
+      ++i;
+      len += 4;
+    } else {
+      len += 3;
+    }
+  }
+  return len;
+}
+function read3(buffer2, start, end) {
+  const len = end - start;
+  if (len < 1) {
+    return "";
+  }
+  let parts;
+  const chunk = [];
+  let i = 0;
+  let t;
+  while (start < end) {
+    t = buffer2[start++];
+    if (t < 128) {
+      chunk[i++] = t;
+    } else if (t > 191 && t < 224) {
+      chunk[i++] = (t & 31) << 6 | buffer2[start++] & 63;
+    } else if (t > 239 && t < 365) {
+      t = ((t & 7) << 18 | (buffer2[start++] & 63) << 12 | (buffer2[start++] & 63) << 6 | buffer2[start++] & 63) - 65536;
+      chunk[i++] = 55296 + (t >> 10);
+      chunk[i++] = 56320 + (t & 1023);
+    } else {
+      chunk[i++] = (t & 15) << 12 | (buffer2[start++] & 63) << 6 | buffer2[start++] & 63;
+    }
+    if (i > 8191) {
+      (parts ?? (parts = [])).push(String.fromCharCode.apply(String, chunk));
+      i = 0;
+    }
+  }
+  if (parts != null) {
+    if (i > 0) {
+      parts.push(String.fromCharCode.apply(String, chunk.slice(0, i)));
+    }
+    return parts.join("");
+  }
+  return String.fromCharCode.apply(String, chunk.slice(0, i));
+}
+function write(string2, buffer2, offset) {
+  const start = offset;
+  let c1;
+  let c2;
+  for (let i = 0; i < string2.length; ++i) {
+    c1 = string2.charCodeAt(i);
+    if (c1 < 128) {
+      buffer2[offset++] = c1;
+    } else if (c1 < 2048) {
+      buffer2[offset++] = c1 >> 6 | 192;
+      buffer2[offset++] = c1 & 63 | 128;
+    } else if ((c1 & 64512) === 55296 && ((c2 = string2.charCodeAt(i + 1)) & 64512) === 56320) {
+      c1 = 65536 + ((c1 & 1023) << 10) + (c2 & 1023);
+      ++i;
+      buffer2[offset++] = c1 >> 18 | 240;
+      buffer2[offset++] = c1 >> 12 & 63 | 128;
+      buffer2[offset++] = c1 >> 6 & 63 | 128;
+      buffer2[offset++] = c1 & 63 | 128;
+    } else {
+      buffer2[offset++] = c1 >> 12 | 224;
+      buffer2[offset++] = c1 >> 6 & 63 | 128;
+      buffer2[offset++] = c1 & 63 | 128;
+    }
+  }
+  return offset - start;
+}
+
+// node_modules/protons-runtime/dist/src/utils/reader.js
+function indexOutOfRange(reader, writeLength) {
+  return RangeError(`index out of range: ${reader.pos} + ${writeLength ?? 1} > ${reader.len}`);
+}
+function readFixed32End(buf2, end) {
+  return (buf2[end - 4] | buf2[end - 3] << 8 | buf2[end - 2] << 16 | buf2[end - 1] << 24) >>> 0;
+}
+var Uint8ArrayReader = class {
+  buf;
+  pos;
+  len;
+  _slice = Uint8Array.prototype.subarray;
+  constructor(buffer2) {
+    this.buf = buffer2;
+    this.pos = 0;
+    this.len = buffer2.length;
+  }
+  /**
+   * Reads a varint as an unsigned 32 bit value
+   */
+  uint32() {
+    let value = 4294967295;
+    value = (this.buf[this.pos] & 127) >>> 0;
+    if (this.buf[this.pos++] < 128)
+      return value;
+    value = (value | (this.buf[this.pos] & 127) << 7) >>> 0;
+    if (this.buf[this.pos++] < 128)
+      return value;
+    value = (value | (this.buf[this.pos] & 127) << 14) >>> 0;
+    if (this.buf[this.pos++] < 128)
+      return value;
+    value = (value | (this.buf[this.pos] & 127) << 21) >>> 0;
+    if (this.buf[this.pos++] < 128)
+      return value;
+    value = (value | (this.buf[this.pos] & 15) << 28) >>> 0;
+    if (this.buf[this.pos++] < 128)
+      return value;
+    if ((this.pos += 5) > this.len) {
+      this.pos = this.len;
+      throw indexOutOfRange(this, 10);
+    }
+    return value;
+  }
+  /**
+   * Reads a varint as a signed 32 bit value
+   */
+  int32() {
+    return this.uint32() | 0;
+  }
+  /**
+   * Reads a zig-zag encoded varint as a signed 32 bit value
+   */
+  sint32() {
+    const value = this.uint32();
+    return value >>> 1 ^ -(value & 1) | 0;
+  }
+  /**
+   * Reads a varint as a boolean
+   */
+  bool() {
+    return this.uint32() !== 0;
+  }
+  /**
+   * Reads fixed 32 bits as an unsigned 32 bit integer
+   */
+  fixed32() {
+    if (this.pos + 4 > this.len) {
+      throw indexOutOfRange(this, 4);
+    }
+    const res = readFixed32End(this.buf, this.pos += 4);
+    return res;
+  }
+  /**
+   * Reads fixed 32 bits as a signed 32 bit integer
+   */
+  sfixed32() {
+    if (this.pos + 4 > this.len) {
+      throw indexOutOfRange(this, 4);
+    }
+    const res = readFixed32End(this.buf, this.pos += 4) | 0;
+    return res;
+  }
+  /**
+   * Reads a float (32 bit) as a number
+   */
+  float() {
+    if (this.pos + 4 > this.len) {
+      throw indexOutOfRange(this, 4);
+    }
+    const value = readFloatLE(this.buf, this.pos);
+    this.pos += 4;
+    return value;
+  }
+  /**
+   * Reads a double (64 bit float) as a number
+   */
+  double() {
+    if (this.pos + 8 > this.len) {
+      throw indexOutOfRange(this, 4);
+    }
+    const value = readDoubleLE(this.buf, this.pos);
+    this.pos += 8;
+    return value;
+  }
+  /**
+   * Reads a sequence of bytes preceded by its length as a varint
+   */
+  bytes() {
+    const length4 = this.uint32();
+    const start = this.pos;
+    const end = this.pos + length4;
+    if (end > this.len) {
+      throw indexOutOfRange(this, length4);
+    }
+    this.pos += length4;
+    return start === end ? new Uint8Array(0) : this.buf.subarray(start, end);
+  }
+  /**
+   * Reads a string preceded by its byte length as a varint
+   */
+  string() {
+    const bytes = this.bytes();
+    return read3(bytes, 0, bytes.length);
+  }
+  /**
+   * Skips the specified number of bytes if specified, otherwise skips a varint
+   */
+  skip(length4) {
+    if (typeof length4 === "number") {
+      if (this.pos + length4 > this.len) {
+        throw indexOutOfRange(this, length4);
+      }
+      this.pos += length4;
+    } else {
+      do {
+        if (this.pos >= this.len) {
+          throw indexOutOfRange(this);
+        }
+      } while ((this.buf[this.pos++] & 128) !== 0);
+    }
+    return this;
+  }
+  /**
+   * Skips the next element of the specified wire type
+   */
+  skipType(wireType) {
+    switch (wireType) {
+      case 0:
+        this.skip();
+        break;
+      case 1:
+        this.skip(8);
+        break;
+      case 2:
+        this.skip(this.uint32());
+        break;
+      case 3:
+        while ((wireType = this.uint32() & 7) !== 4) {
+          this.skipType(wireType);
+        }
+        break;
+      case 5:
+        this.skip(4);
+        break;
+      default:
+        throw Error(`invalid wire type ${wireType} at offset ${this.pos}`);
+    }
+    return this;
+  }
+  readLongVarint() {
+    const bits = new LongBits(0, 0);
+    let i = 0;
+    if (this.len - this.pos > 4) {
+      for (; i < 4; ++i) {
+        bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;
+        if (this.buf[this.pos++] < 128) {
+          return bits;
+        }
+      }
+      bits.lo = (bits.lo | (this.buf[this.pos] & 127) << 28) >>> 0;
+      bits.hi = (bits.hi | (this.buf[this.pos] & 127) >> 4) >>> 0;
+      if (this.buf[this.pos++] < 128) {
+        return bits;
+      }
+      i = 0;
+    } else {
+      for (; i < 3; ++i) {
+        if (this.pos >= this.len) {
+          throw indexOutOfRange(this);
+        }
+        bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;
+        if (this.buf[this.pos++] < 128) {
+          return bits;
+        }
+      }
+      bits.lo = (bits.lo | (this.buf[this.pos++] & 127) << i * 7) >>> 0;
+      return bits;
+    }
+    if (this.len - this.pos > 4) {
+      for (; i < 5; ++i) {
+        bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;
+        if (this.buf[this.pos++] < 128) {
+          return bits;
+        }
+      }
+    } else {
+      for (; i < 5; ++i) {
+        if (this.pos >= this.len) {
+          throw indexOutOfRange(this);
+        }
+        bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;
+        if (this.buf[this.pos++] < 128) {
+          return bits;
+        }
+      }
+    }
+    throw Error("invalid varint encoding");
+  }
+  readFixed64() {
+    if (this.pos + 8 > this.len) {
+      throw indexOutOfRange(this, 8);
+    }
+    const lo = readFixed32End(this.buf, this.pos += 4);
+    const hi = readFixed32End(this.buf, this.pos += 4);
+    return new LongBits(lo, hi);
+  }
+  /**
+   * Reads a varint as a signed 64 bit value
+   */
+  int64() {
+    return this.readLongVarint().toBigInt();
+  }
+  /**
+   * Reads a varint as a signed 64 bit value returned as a possibly unsafe
+   * JavaScript number
+   */
+  int64Number() {
+    return this.readLongVarint().toNumber();
+  }
+  /**
+   * Reads a varint as a signed 64 bit value returned as a string
+   */
+  int64String() {
+    return this.readLongVarint().toString();
+  }
+  /**
+   * Reads a varint as an unsigned 64 bit value
+   */
+  uint64() {
+    return this.readLongVarint().toBigInt(true);
+  }
+  /**
+   * Reads a varint as an unsigned 64 bit value returned as a possibly unsafe
+   * JavaScript number
+   */
+  uint64Number() {
+    return this.readLongVarint().toNumber(true);
+  }
+  /**
+   * Reads a varint as an unsigned 64 bit value returned as a string
+   */
+  uint64String() {
+    return this.readLongVarint().toString(true);
+  }
+  /**
+   * Reads a zig-zag encoded varint as a signed 64 bit value
+   */
+  sint64() {
+    return this.readLongVarint().zzDecode().toBigInt();
+  }
+  /**
+   * Reads a zig-zag encoded varint as a signed 64 bit value returned as a
+   * possibly unsafe JavaScript number
+   */
+  sint64Number() {
+    return this.readLongVarint().zzDecode().toNumber();
+  }
+  /**
+   * Reads a zig-zag encoded varint as a signed 64 bit value returned as a
+   * string
+   */
+  sint64String() {
+    return this.readLongVarint().zzDecode().toString();
+  }
+  /**
+   * Reads fixed 64 bits
+   */
+  fixed64() {
+    return this.readFixed64().toBigInt();
+  }
+  /**
+   * Reads fixed 64 bits returned as a possibly unsafe JavaScript number
+   */
+  fixed64Number() {
+    return this.readFixed64().toNumber();
+  }
+  /**
+   * Reads fixed 64 bits returned as a string
+   */
+  fixed64String() {
+    return this.readFixed64().toString();
+  }
+  /**
+   * Reads zig-zag encoded fixed 64 bits
+   */
+  sfixed64() {
+    return this.readFixed64().toBigInt();
+  }
+  /**
+   * Reads zig-zag encoded fixed 64 bits returned as a possibly unsafe
+   * JavaScript number
+   */
+  sfixed64Number() {
+    return this.readFixed64().toNumber();
+  }
+  /**
+   * Reads zig-zag encoded fixed 64 bits returned as a string
+   */
+  sfixed64String() {
+    return this.readFixed64().toString();
+  }
+};
+function createReader(buf2) {
+  return new Uint8ArrayReader(buf2 instanceof Uint8Array ? buf2 : buf2.subarray());
+}
+
+// node_modules/protons-runtime/dist/src/decode.js
+function decodeMessage(buf2, codec) {
+  const reader = createReader(buf2);
+  return codec.decode(reader);
+}
+
+// node_modules/multiformats/src/bases/base10.js
+var base10_exports = {};
+__export(base10_exports, {
+  base10: () => base10
+});
+var base10 = baseX2({
+  prefix: "9",
+  name: "base10",
+  alphabet: "0123456789"
+});
+
+// node_modules/multiformats/src/bases/base16.js
+var base16_exports = {};
+__export(base16_exports, {
+  base16: () => base16,
+  base16upper: () => base16upper
+});
+var base16 = rfc46482({
+  prefix: "f",
+  name: "base16",
+  alphabet: "0123456789abcdef",
+  bitsPerChar: 4
+});
+var base16upper = rfc46482({
+  prefix: "F",
+  name: "base16upper",
+  alphabet: "0123456789ABCDEF",
+  bitsPerChar: 4
+});
+
+// node_modules/multiformats/src/bases/base2.js
+var base2_exports = {};
+__export(base2_exports, {
+  base2: () => base22
+});
+var base22 = rfc46482({
+  prefix: "0",
+  name: "base2",
+  alphabet: "01",
+  bitsPerChar: 1
+});
+
+// node_modules/multiformats/src/bases/base256emoji.js
+var base256emoji_exports = {};
+__export(base256emoji_exports, {
+  base256emoji: () => base256emoji
+});
+var alphabet = Array.from("\u{1F680}\u{1FA90}\u2604\u{1F6F0}\u{1F30C}\u{1F311}\u{1F312}\u{1F313}\u{1F314}\u{1F315}\u{1F316}\u{1F317}\u{1F318}\u{1F30D}\u{1F30F}\u{1F30E}\u{1F409}\u2600\u{1F4BB}\u{1F5A5}\u{1F4BE}\u{1F4BF}\u{1F602}\u2764\u{1F60D}\u{1F923}\u{1F60A}\u{1F64F}\u{1F495}\u{1F62D}\u{1F618}\u{1F44D}\u{1F605}\u{1F44F}\u{1F601}\u{1F525}\u{1F970}\u{1F494}\u{1F496}\u{1F499}\u{1F622}\u{1F914}\u{1F606}\u{1F644}\u{1F4AA}\u{1F609}\u263A\u{1F44C}\u{1F917}\u{1F49C}\u{1F614}\u{1F60E}\u{1F607}\u{1F339}\u{1F926}\u{1F389}\u{1F49E}\u270C\u2728\u{1F937}\u{1F631}\u{1F60C}\u{1F338}\u{1F64C}\u{1F60B}\u{1F497}\u{1F49A}\u{1F60F}\u{1F49B}\u{1F642}\u{1F493}\u{1F929}\u{1F604}\u{1F600}\u{1F5A4}\u{1F603}\u{1F4AF}\u{1F648}\u{1F447}\u{1F3B6}\u{1F612}\u{1F92D}\u2763\u{1F61C}\u{1F48B}\u{1F440}\u{1F62A}\u{1F611}\u{1F4A5}\u{1F64B}\u{1F61E}\u{1F629}\u{1F621}\u{1F92A}\u{1F44A}\u{1F973}\u{1F625}\u{1F924}\u{1F449}\u{1F483}\u{1F633}\u270B\u{1F61A}\u{1F61D}\u{1F634}\u{1F31F}\u{1F62C}\u{1F643}\u{1F340}\u{1F337}\u{1F63B}\u{1F613}\u2B50\u2705\u{1F97A}\u{1F308}\u{1F608}\u{1F918}\u{1F4A6}\u2714\u{1F623}\u{1F3C3}\u{1F490}\u2639\u{1F38A}\u{1F498}\u{1F620}\u261D\u{1F615}\u{1F33A}\u{1F382}\u{1F33B}\u{1F610}\u{1F595}\u{1F49D}\u{1F64A}\u{1F639}\u{1F5E3}\u{1F4AB}\u{1F480}\u{1F451}\u{1F3B5}\u{1F91E}\u{1F61B}\u{1F534}\u{1F624}\u{1F33C}\u{1F62B}\u26BD\u{1F919}\u2615\u{1F3C6}\u{1F92B}\u{1F448}\u{1F62E}\u{1F646}\u{1F37B}\u{1F343}\u{1F436}\u{1F481}\u{1F632}\u{1F33F}\u{1F9E1}\u{1F381}\u26A1\u{1F31E}\u{1F388}\u274C\u270A\u{1F44B}\u{1F630}\u{1F928}\u{1F636}\u{1F91D}\u{1F6B6}\u{1F4B0}\u{1F353}\u{1F4A2}\u{1F91F}\u{1F641}\u{1F6A8}\u{1F4A8}\u{1F92C}\u2708\u{1F380}\u{1F37A}\u{1F913}\u{1F619}\u{1F49F}\u{1F331}\u{1F616}\u{1F476}\u{1F974}\u25B6\u27A1\u2753\u{1F48E}\u{1F4B8}\u2B07\u{1F628}\u{1F31A}\u{1F98B}\u{1F637}\u{1F57A}\u26A0\u{1F645}\u{1F61F}\u{1F635}\u{1F44E}\u{1F932}\u{1F920}\u{1F927}\u{1F4CC}\u{1F535}\u{1F485}\u{1F9D0}\u{1F43E}\u{1F352}\u{1F617}\u{1F911}\u{1F30A}\u{1F92F}\u{1F437}\u260E\u{1F4A7}\u{1F62F}\u{1F486}\u{1F446}\u{1F3A4}\u{1F647}\u{1F351}\u2744\u{1F334}\u{1F4A3}\u{1F438}\u{1F48C}\u{1F4CD}\u{1F940}\u{1F922}\u{1F445}\u{1F4A1}\u{1F4A9}\u{1F450}\u{1F4F8}\u{1F47B}\u{1F910}\u{1F92E}\u{1F3BC}\u{1F975}\u{1F6A9}\u{1F34E}\u{1F34A}\u{1F47C}\u{1F48D}\u{1F4E3}\u{1F942}");
+var alphabetBytesToChars = (
+  /** @type {string[]} */
+  alphabet.reduce(
+    (p, c, i) => {
+      p[i] = c;
+      return p;
+    },
+    /** @type {string[]} */
+    []
+  )
+);
+var alphabetCharsToBytes = (
+  /** @type {number[]} */
+  alphabet.reduce(
+    (p, c, i) => {
+      p[
+        /** @type {number} */
+        c.codePointAt(0)
+      ] = i;
+      return p;
+    },
+    /** @type {number[]} */
+    []
+  )
+);
+function encode10(data) {
+  return data.reduce((p, c) => {
+    p += alphabetBytesToChars[c];
+    return p;
+  }, "");
+}
+function decode14(str) {
+  const byts = [];
+  for (const char of str) {
+    const byt = alphabetCharsToBytes[
+      /** @type {number} */
+      char.codePointAt(0)
+    ];
+    if (byt === void 0) {
+      throw new Error(`Non-base256emoji character: ${char}`);
+    }
+    byts.push(byt);
+  }
+  return new Uint8Array(byts);
+}
+var base256emoji = from2({
+  prefix: "\u{1F680}",
+  name: "base256emoji",
+  encode: encode10,
+  decode: decode14
+});
+
+// node_modules/multiformats/src/bases/base36.js
+var base36_exports = {};
+__export(base36_exports, {
+  base36: () => base36,
+  base36upper: () => base36upper
+});
+var base36 = baseX2({
+  prefix: "k",
+  name: "base36",
+  alphabet: "0123456789abcdefghijklmnopqrstuvwxyz"
+});
+var base36upper = baseX2({
+  prefix: "K",
+  name: "base36upper",
+  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+});
+
+// node_modules/multiformats/src/bases/base8.js
+var base8_exports = {};
+__export(base8_exports, {
+  base8: () => base8
+});
+var base8 = rfc46482({
+  prefix: "7",
+  name: "base8",
+  alphabet: "01234567",
+  bitsPerChar: 3
+});
+
+// node_modules/multiformats/src/bases/identity.js
+var identity_exports = {};
+__export(identity_exports, {
+  identity: () => identity
+});
+var identity = from2({
+  prefix: "\0",
+  name: "identity",
+  encode: (buf2) => toString2(buf2),
+  decode: (str) => fromString2(str)
+});
+
+// node_modules/multiformats/src/codecs/json.js
+var textEncoder4 = new TextEncoder();
+var textDecoder3 = new TextDecoder();
+
+// node_modules/multiformats/src/hashes/identity.js
+var identity_exports2 = {};
+__export(identity_exports2, {
+  identity: () => identity2
+});
+var code4 = 0;
+var name3 = "identity";
+var encode11 = coerce2;
+var digest = (input) => create2(code4, encode11(input));
+var identity2 = { code: code4, name: name3, encode: encode11, digest };
+
+// node_modules/multiformats/src/hashes/sha2.js
+var sha2_exports = {};
+__export(sha2_exports, {
+  sha256: () => sha256,
+  sha512: () => sha512
+});
+import crypto from "crypto";
+
+// node_modules/multiformats/src/hashes/hasher.js
+var from3 = ({ name: name4, code: code5, encode: encode12 }) => new Hasher(name4, code5, encode12);
+var Hasher = class {
+  /**
+   *
+   * @param {Name} name
+   * @param {Code} code
+   * @param {(input: Uint8Array) => Await} encode
+   */
+  constructor(name4, code5, encode12) {
+    this.name = name4;
+    this.code = code5;
+    this.encode = encode12;
+  }
+  /**
+   * @param {Uint8Array} input
+   * @returns {Await>}
+   */
+  digest(input) {
+    if (input instanceof Uint8Array) {
+      const result = this.encode(input);
+      return result instanceof Uint8Array ? create2(this.code, result) : result.then((digest2) => create2(this.code, digest2));
+    } else {
+      throw Error("Unknown type, must be binary type");
+    }
+  }
+};
+
+// node_modules/multiformats/src/hashes/sha2.js
+var sha256 = from3({
+  name: "sha2-256",
+  code: 18,
+  encode: (input) => coerce2(crypto.createHash("sha256").update(input).digest())
+});
+var sha512 = from3({
+  name: "sha2-512",
+  code: 19,
+  encode: (input) => coerce2(crypto.createHash("sha512").update(input).digest())
+});
+
+// node_modules/multiformats/src/basics.js
+var bases = { ...identity_exports, ...base2_exports, ...base8_exports, ...base10_exports, ...base16_exports, ...base32_exports, ...base36_exports, ...base58_exports, ...base64_exports, ...base256emoji_exports };
+var hashes = { ...sha2_exports, ...identity_exports2 };
+
+// node_modules/uint8arrays/dist/src/util/bases.js
+function createCodec(name4, prefix, encode12, decode15) {
+  return {
+    name: name4,
+    prefix,
+    encoder: {
+      name: name4,
+      prefix,
+      encode: encode12
+    },
+    decoder: {
+      decode: decode15
+    }
+  };
+}
+var string = createCodec("utf8", "u", (buf2) => {
+  const decoder = new TextDecoder("utf8");
+  return "u" + decoder.decode(buf2);
+}, (str) => {
+  const encoder = new TextEncoder();
+  return encoder.encode(str.substring(1));
+});
+var ascii = createCodec("ascii", "a", (buf2) => {
+  let string2 = "a";
+  for (let i = 0; i < buf2.length; i++) {
+    string2 += String.fromCharCode(buf2[i]);
+  }
+  return string2;
+}, (str) => {
+  str = str.substring(1);
+  const buf2 = allocUnsafe(str.length);
+  for (let i = 0; i < str.length; i++) {
+    buf2[i] = str.charCodeAt(i);
+  }
+  return buf2;
+});
+var BASES = {
+  utf8: string,
+  "utf-8": string,
+  hex: bases.base16,
+  latin1: ascii,
+  ascii,
+  binary: ascii,
+  ...bases
+};
+var bases_default = BASES;
+
+// node_modules/uint8arrays/dist/src/from-string.js
+function fromString3(string2, encoding = "utf8") {
+  const base3 = bases_default[encoding];
+  if (base3 == null) {
+    throw new Error(`Unsupported encoding "${encoding}"`);
+  }
+  if ((encoding === "utf8" || encoding === "utf-8") && globalThis.Buffer != null && globalThis.Buffer.from != null) {
+    return asUint8Array(globalThis.Buffer.from(string2, "utf-8"));
+  }
+  return base3.decoder.decode(`${base3.prefix}${string2}`);
+}
+
+// node_modules/protons-runtime/dist/src/utils/pool.js
+function pool(size) {
+  const SIZE = size ?? 8192;
+  const MAX = SIZE >>> 1;
+  let slab;
+  let offset = SIZE;
+  return function poolAlloc(size2) {
+    if (size2 < 1 || size2 > MAX) {
+      return allocUnsafe(size2);
+    }
+    if (offset + size2 > SIZE) {
+      slab = allocUnsafe(SIZE);
+      offset = 0;
+    }
+    const buf2 = slab.subarray(offset, offset += size2);
+    if ((offset & 7) !== 0) {
+      offset = (offset | 7) + 1;
+    }
+    return buf2;
+  };
+}
+
+// node_modules/protons-runtime/dist/src/utils/writer.js
+var Op = class {
+  /**
+   * Function to call
+   */
+  fn;
+  /**
+   * Value byte length
+   */
+  len;
+  /**
+   * Next operation
+   */
+  next;
+  /**
+   * Value to write
+   */
+  val;
+  constructor(fn, len, val) {
+    this.fn = fn;
+    this.len = len;
+    this.next = void 0;
+    this.val = val;
+  }
+};
+function noop2() {
+}
+var State = class {
+  /**
+   * Current head
+   */
+  head;
+  /**
+   * Current tail
+   */
+  tail;
+  /**
+   * Current buffer length
+   */
+  len;
+  /**
+   * Next state
+   */
+  next;
+  constructor(writer) {
+    this.head = writer.head;
+    this.tail = writer.tail;
+    this.len = writer.len;
+    this.next = writer.states;
+  }
+};
+var bufferPool = pool();
+function alloc3(size) {
+  if (globalThis.Buffer != null) {
+    return allocUnsafe(size);
+  }
+  return bufferPool(size);
+}
+var Uint8ArrayWriter = class {
+  /**
+   * Current length
+   */
+  len;
+  /**
+   * Operations head
+   */
+  head;
+  /**
+   * Operations tail
+   */
+  tail;
+  /**
+   * Linked forked states
+   */
+  states;
+  constructor() {
+    this.len = 0;
+    this.head = new Op(noop2, 0, 0);
+    this.tail = this.head;
+    this.states = null;
+  }
+  /**
+   * Pushes a new operation to the queue
+   */
+  _push(fn, len, val) {
+    this.tail = this.tail.next = new Op(fn, len, val);
+    this.len += len;
+    return this;
+  }
+  /**
+   * Writes an unsigned 32 bit value as a varint
+   */
+  uint32(value) {
+    this.len += (this.tail = this.tail.next = new VarintOp((value = value >>> 0) < 128 ? 1 : value < 16384 ? 2 : value < 2097152 ? 3 : value < 268435456 ? 4 : 5, value)).len;
+    return this;
+  }
+  /**
+   * Writes a signed 32 bit value as a varint`
+   */
+  int32(value) {
+    return value < 0 ? this._push(writeVarint64, 10, LongBits.fromNumber(value)) : this.uint32(value);
+  }
+  /**
+   * Writes a 32 bit value as a varint, zig-zag encoded
+   */
+  sint32(value) {
+    return this.uint32((value << 1 ^ value >> 31) >>> 0);
+  }
+  /**
+   * Writes an unsigned 64 bit value as a varint
+   */
+  uint64(value) {
+    const bits = LongBits.fromBigInt(value);
+    return this._push(writeVarint64, bits.length(), bits);
+  }
+  /**
+   * Writes an unsigned 64 bit value as a varint
+   */
+  uint64Number(value) {
+    const bits = LongBits.fromNumber(value);
+    return this._push(writeVarint64, bits.length(), bits);
+  }
+  /**
+   * Writes an unsigned 64 bit value as a varint
+   */
+  uint64String(value) {
+    return this.uint64(BigInt(value));
+  }
+  /**
+   * Writes a signed 64 bit value as a varint
+   */
+  int64(value) {
+    return this.uint64(value);
+  }
+  /**
+   * Writes a signed 64 bit value as a varint
+   */
+  int64Number(value) {
+    return this.uint64Number(value);
+  }
+  /**
+   * Writes a signed 64 bit value as a varint
+   */
+  int64String(value) {
+    return this.uint64String(value);
+  }
+  /**
+   * Writes a signed 64 bit value as a varint, zig-zag encoded
+   */
+  sint64(value) {
+    const bits = LongBits.fromBigInt(value).zzEncode();
+    return this._push(writeVarint64, bits.length(), bits);
+  }
+  /**
+   * Writes a signed 64 bit value as a varint, zig-zag encoded
+   */
+  sint64Number(value) {
+    const bits = LongBits.fromNumber(value).zzEncode();
+    return this._push(writeVarint64, bits.length(), bits);
+  }
+  /**
+   * Writes a signed 64 bit value as a varint, zig-zag encoded
+   */
+  sint64String(value) {
+    return this.sint64(BigInt(value));
+  }
+  /**
+   * Writes a boolish value as a varint
+   */
+  bool(value) {
+    return this._push(writeByte, 1, value ? 1 : 0);
+  }
+  /**
+   * Writes an unsigned 32 bit value as fixed 32 bits
+   */
+  fixed32(value) {
+    return this._push(writeFixed32, 4, value >>> 0);
+  }
+  /**
+   * Writes a signed 32 bit value as fixed 32 bits
+   */
+  sfixed32(value) {
+    return this.fixed32(value);
+  }
+  /**
+   * Writes an unsigned 64 bit value as fixed 64 bits
+   */
+  fixed64(value) {
+    const bits = LongBits.fromBigInt(value);
+    return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi);
+  }
+  /**
+   * Writes an unsigned 64 bit value as fixed 64 bits
+   */
+  fixed64Number(value) {
+    const bits = LongBits.fromNumber(value);
+    return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi);
+  }
+  /**
+   * Writes an unsigned 64 bit value as fixed 64 bits
+   */
+  fixed64String(value) {
+    return this.fixed64(BigInt(value));
+  }
+  /**
+   * Writes a signed 64 bit value as fixed 64 bits
+   */
+  sfixed64(value) {
+    return this.fixed64(value);
+  }
+  /**
+   * Writes a signed 64 bit value as fixed 64 bits
+   */
+  sfixed64Number(value) {
+    return this.fixed64Number(value);
+  }
+  /**
+   * Writes a signed 64 bit value as fixed 64 bits
+   */
+  sfixed64String(value) {
+    return this.fixed64String(value);
+  }
+  /**
+   * Writes a float (32 bit)
+   */
+  float(value) {
+    return this._push(writeFloatLE, 4, value);
+  }
+  /**
+   * Writes a double (64 bit float).
+   *
+   * @function
+   * @param {number} value - Value to write
+   * @returns {Writer} `this`
+   */
+  double(value) {
+    return this._push(writeDoubleLE, 8, value);
+  }
+  /**
+   * Writes a sequence of bytes
+   */
+  bytes(value) {
+    const len = value.length >>> 0;
+    if (len === 0) {
+      return this._push(writeByte, 1, 0);
+    }
+    return this.uint32(len)._push(writeBytes, len, value);
+  }
+  /**
+   * Writes a string
+   */
+  string(value) {
+    const len = length3(value);
+    return len !== 0 ? this.uint32(len)._push(write, len, value) : this._push(writeByte, 1, 0);
+  }
+  /**
+   * Forks this writer's state by pushing it to a stack.
+   * Calling {@link Writer#reset|reset} or {@link Writer#ldelim|ldelim} resets the writer to the previous state.
+   */
+  fork() {
+    this.states = new State(this);
+    this.head = this.tail = new Op(noop2, 0, 0);
+    this.len = 0;
+    return this;
+  }
+  /**
+   * Resets this instance to the last state
+   */
+  reset() {
+    if (this.states != null) {
+      this.head = this.states.head;
+      this.tail = this.states.tail;
+      this.len = this.states.len;
+      this.states = this.states.next;
+    } else {
+      this.head = this.tail = new Op(noop2, 0, 0);
+      this.len = 0;
+    }
+    return this;
+  }
+  /**
+   * Resets to the last state and appends the fork state's current write length as a varint followed by its operations.
+   */
+  ldelim() {
+    const head = this.head;
+    const tail = this.tail;
+    const len = this.len;
+    this.reset().uint32(len);
+    if (len !== 0) {
+      this.tail.next = head.next;
+      this.tail = tail;
+      this.len += len;
+    }
+    return this;
+  }
+  /**
+   * Finishes the write operation
+   */
+  finish() {
+    let head = this.head.next;
+    const buf2 = alloc3(this.len);
+    let pos = 0;
+    while (head != null) {
+      head.fn(head.val, buf2, pos);
+      pos += head.len;
+      head = head.next;
+    }
+    return buf2;
+  }
+};
+function writeByte(val, buf2, pos) {
+  buf2[pos] = val & 255;
+}
+function writeVarint32(val, buf2, pos) {
+  while (val > 127) {
+    buf2[pos++] = val & 127 | 128;
+    val >>>= 7;
+  }
+  buf2[pos] = val;
+}
+var VarintOp = class extends Op {
+  next;
+  constructor(len, val) {
+    super(writeVarint32, len, val);
+    this.next = void 0;
+  }
+};
+function writeVarint64(val, buf2, pos) {
+  while (val.hi !== 0) {
+    buf2[pos++] = val.lo & 127 | 128;
+    val.lo = (val.lo >>> 7 | val.hi << 25) >>> 0;
+    val.hi >>>= 7;
+  }
+  while (val.lo > 127) {
+    buf2[pos++] = val.lo & 127 | 128;
+    val.lo = val.lo >>> 7;
+  }
+  buf2[pos++] = val.lo;
+}
+function writeFixed32(val, buf2, pos) {
+  buf2[pos] = val & 255;
+  buf2[pos + 1] = val >>> 8 & 255;
+  buf2[pos + 2] = val >>> 16 & 255;
+  buf2[pos + 3] = val >>> 24;
+}
+function writeBytes(val, buf2, pos) {
+  buf2.set(val, pos);
+}
+if (globalThis.Buffer != null) {
+  Uint8ArrayWriter.prototype.bytes = function(value) {
+    const len = value.length >>> 0;
+    this.uint32(len);
+    if (len > 0) {
+      this._push(writeBytesBuffer, len, value);
+    }
+    return this;
+  };
+  Uint8ArrayWriter.prototype.string = function(value) {
+    const len = globalThis.Buffer.byteLength(value);
+    this.uint32(len);
+    if (len > 0) {
+      this._push(writeStringBuffer, len, value);
+    }
+    return this;
+  };
+}
+function writeBytesBuffer(val, buf2, pos) {
+  buf2.set(val, pos);
+}
+function writeStringBuffer(val, buf2, pos) {
+  if (val.length < 40) {
+    write(val, buf2, pos);
+  } else if (buf2.utf8Write != null) {
+    buf2.utf8Write(val, pos);
+  } else {
+    buf2.set(fromString3(val), pos);
+  }
+}
+function createWriter() {
+  return new Uint8ArrayWriter();
+}
+
+// node_modules/protons-runtime/dist/src/encode.js
+function encodeMessage(message2, codec) {
+  const w = createWriter();
+  codec.encode(message2, w, {
+    lengthDelimited: false
+  });
+  return w.finish();
+}
+
+// node_modules/protons-runtime/dist/src/codec.js
+var CODEC_TYPES;
+(function(CODEC_TYPES2) {
+  CODEC_TYPES2[CODEC_TYPES2["VARINT"] = 0] = "VARINT";
+  CODEC_TYPES2[CODEC_TYPES2["BIT64"] = 1] = "BIT64";
+  CODEC_TYPES2[CODEC_TYPES2["LENGTH_DELIMITED"] = 2] = "LENGTH_DELIMITED";
+  CODEC_TYPES2[CODEC_TYPES2["START_GROUP"] = 3] = "START_GROUP";
+  CODEC_TYPES2[CODEC_TYPES2["END_GROUP"] = 4] = "END_GROUP";
+  CODEC_TYPES2[CODEC_TYPES2["BIT32"] = 5] = "BIT32";
+})(CODEC_TYPES || (CODEC_TYPES = {}));
+function createCodec2(name4, type, encode12, decode15) {
+  return {
+    name: name4,
+    type,
+    encode: encode12,
+    decode: decode15
+  };
+}
+
+// node_modules/protons-runtime/dist/src/codecs/enum.js
+function enumeration(v) {
+  function findValue(val) {
+    if (v[val.toString()] == null) {
+      throw new Error("Invalid enum value");
+    }
+    return v[val];
+  }
+  const encode12 = function enumEncode(val, writer) {
+    const enumValue = findValue(val);
+    writer.int32(enumValue);
+  };
+  const decode15 = function enumDecode(reader) {
+    const val = reader.int32();
+    return findValue(val);
+  };
+  return createCodec2("enum", CODEC_TYPES.VARINT, encode12, decode15);
+}
+
+// node_modules/protons-runtime/dist/src/codecs/message.js
+function message(encode12, decode15) {
+  return createCodec2("message", CODEC_TYPES.LENGTH_DELIMITED, encode12, decode15);
+}
+
+// node_modules/ipfs-unixfs/dist/src/unixfs.js
+var Data;
+(function(Data2) {
+  let DataType;
+  (function(DataType2) {
+    DataType2["Raw"] = "Raw";
+    DataType2["Directory"] = "Directory";
+    DataType2["File"] = "File";
+    DataType2["Metadata"] = "Metadata";
+    DataType2["Symlink"] = "Symlink";
+    DataType2["HAMTShard"] = "HAMTShard";
+  })(DataType = Data2.DataType || (Data2.DataType = {}));
+  let __DataTypeValues;
+  (function(__DataTypeValues2) {
+    __DataTypeValues2[__DataTypeValues2["Raw"] = 0] = "Raw";
+    __DataTypeValues2[__DataTypeValues2["Directory"] = 1] = "Directory";
+    __DataTypeValues2[__DataTypeValues2["File"] = 2] = "File";
+    __DataTypeValues2[__DataTypeValues2["Metadata"] = 3] = "Metadata";
+    __DataTypeValues2[__DataTypeValues2["Symlink"] = 4] = "Symlink";
+    __DataTypeValues2[__DataTypeValues2["HAMTShard"] = 5] = "HAMTShard";
+  })(__DataTypeValues || (__DataTypeValues = {}));
+  (function(DataType2) {
+    DataType2.codec = () => {
+      return enumeration(__DataTypeValues);
+    };
+  })(DataType = Data2.DataType || (Data2.DataType = {}));
+  let _codec;
+  Data2.codec = () => {
+    if (_codec == null) {
+      _codec = message((obj, w, opts = {}) => {
+        if (opts.lengthDelimited !== false) {
+          w.fork();
+        }
+        if (obj.Type != null) {
+          w.uint32(8);
+          Data2.DataType.codec().encode(obj.Type, w);
+        }
+        if (obj.Data != null) {
+          w.uint32(18);
+          w.bytes(obj.Data);
+        }
+        if (obj.filesize != null) {
+          w.uint32(24);
+          w.uint64(obj.filesize);
+        }
+        if (obj.blocksizes != null) {
+          for (const value of obj.blocksizes) {
+            w.uint32(32);
+            w.uint64(value);
+          }
+        }
+        if (obj.hashType != null) {
+          w.uint32(40);
+          w.uint64(obj.hashType);
+        }
+        if (obj.fanout != null) {
+          w.uint32(48);
+          w.uint64(obj.fanout);
+        }
+        if (obj.mode != null) {
+          w.uint32(56);
+          w.uint32(obj.mode);
+        }
+        if (obj.mtime != null) {
+          w.uint32(66);
+          UnixTime.codec().encode(obj.mtime, w);
+        }
+        if (opts.lengthDelimited !== false) {
+          w.ldelim();
+        }
+      }, (reader, length4) => {
+        const obj = {
+          blocksizes: []
+        };
+        const end = length4 == null ? reader.len : reader.pos + length4;
+        while (reader.pos < end) {
+          const tag = reader.uint32();
+          switch (tag >>> 3) {
+            case 1:
+              obj.Type = Data2.DataType.codec().decode(reader);
+              break;
+            case 2:
+              obj.Data = reader.bytes();
+              break;
+            case 3:
+              obj.filesize = reader.uint64();
+              break;
+            case 4:
+              obj.blocksizes.push(reader.uint64());
+              break;
+            case 5:
+              obj.hashType = reader.uint64();
+              break;
+            case 6:
+              obj.fanout = reader.uint64();
+              break;
+            case 7:
+              obj.mode = reader.uint32();
+              break;
+            case 8:
+              obj.mtime = UnixTime.codec().decode(reader, reader.uint32());
+              break;
+            default:
+              reader.skipType(tag & 7);
+              break;
+          }
+        }
+        return obj;
+      });
+    }
+    return _codec;
+  };
+  Data2.encode = (obj) => {
+    return encodeMessage(obj, Data2.codec());
+  };
+  Data2.decode = (buf2) => {
+    return decodeMessage(buf2, Data2.codec());
+  };
+})(Data || (Data = {}));
+var UnixTime;
+(function(UnixTime2) {
+  let _codec;
+  UnixTime2.codec = () => {
+    if (_codec == null) {
+      _codec = message((obj, w, opts = {}) => {
+        if (opts.lengthDelimited !== false) {
+          w.fork();
+        }
+        if (obj.Seconds != null) {
+          w.uint32(8);
+          w.int64(obj.Seconds);
+        }
+        if (obj.FractionalNanoseconds != null) {
+          w.uint32(21);
+          w.fixed32(obj.FractionalNanoseconds);
+        }
+        if (opts.lengthDelimited !== false) {
+          w.ldelim();
+        }
+      }, (reader, length4) => {
+        const obj = {};
+        const end = length4 == null ? reader.len : reader.pos + length4;
+        while (reader.pos < end) {
+          const tag = reader.uint32();
+          switch (tag >>> 3) {
+            case 1:
+              obj.Seconds = reader.int64();
+              break;
+            case 2:
+              obj.FractionalNanoseconds = reader.fixed32();
+              break;
+            default:
+              reader.skipType(tag & 7);
+              break;
+          }
+        }
+        return obj;
+      });
+    }
+    return _codec;
+  };
+  UnixTime2.encode = (obj) => {
+    return encodeMessage(obj, UnixTime2.codec());
+  };
+  UnixTime2.decode = (buf2) => {
+    return decodeMessage(buf2, UnixTime2.codec());
+  };
+})(UnixTime || (UnixTime = {}));
+var Metadata;
+(function(Metadata2) {
+  let _codec;
+  Metadata2.codec = () => {
+    if (_codec == null) {
+      _codec = message((obj, w, opts = {}) => {
+        if (opts.lengthDelimited !== false) {
+          w.fork();
+        }
+        if (obj.MimeType != null) {
+          w.uint32(10);
+          w.string(obj.MimeType);
+        }
+        if (opts.lengthDelimited !== false) {
+          w.ldelim();
+        }
+      }, (reader, length4) => {
+        const obj = {};
+        const end = length4 == null ? reader.len : reader.pos + length4;
+        while (reader.pos < end) {
+          const tag = reader.uint32();
+          switch (tag >>> 3) {
+            case 1:
+              obj.MimeType = reader.string();
+              break;
+            default:
+              reader.skipType(tag & 7);
+              break;
+          }
+        }
+        return obj;
+      });
+    }
+    return _codec;
+  };
+  Metadata2.encode = (obj) => {
+    return encodeMessage(obj, Metadata2.codec());
+  };
+  Metadata2.decode = (buf2) => {
+    return decodeMessage(buf2, Metadata2.codec());
+  };
+})(Metadata || (Metadata = {}));
+
+// node_modules/ipfs-unixfs/dist/src/index.js
+var types = {
+  Raw: "raw",
+  Directory: "directory",
+  File: "file",
+  Metadata: "metadata",
+  Symlink: "symlink",
+  HAMTShard: "hamt-sharded-directory"
+};
+var dirTypes = [
+  "directory",
+  "hamt-sharded-directory"
+];
+var DEFAULT_FILE_MODE = parseInt("0644", 8);
+var DEFAULT_DIRECTORY_MODE = parseInt("0755", 8);
+var UnixFS = class _UnixFS {
+  /**
+   * Decode from protobuf https://github.com/ipfs/specs/blob/master/UNIXFS.md
+   */
+  static unmarshal(marshaled) {
+    const message2 = Data.decode(marshaled);
+    const data = new _UnixFS({
+      type: types[message2.Type != null ? message2.Type.toString() : "File"],
+      data: message2.Data,
+      blockSizes: message2.blocksizes,
+      mode: message2.mode,
+      mtime: message2.mtime != null ? {
+        secs: message2.mtime.Seconds ?? 0n,
+        nsecs: message2.mtime.FractionalNanoseconds
+      } : void 0,
+      fanout: message2.fanout
+    });
+    data._originalMode = message2.mode ?? 0;
+    return data;
+  }
+  type;
+  data;
+  blockSizes;
+  hashType;
+  fanout;
+  mtime;
+  _mode;
+  _originalMode;
+  constructor(options = {
+    type: "file"
+  }) {
+    const { type, data, blockSizes, hashType, fanout, mtime, mode } = options;
+    if (type != null && !Object.values(types).includes(type)) {
+      throw (0, import_err_code.default)(new Error("Type: " + type + " is not valid"), "ERR_INVALID_TYPE");
+    }
+    this.type = type ?? "file";
+    this.data = data;
+    this.hashType = hashType;
+    this.fanout = fanout;
+    this.blockSizes = blockSizes ?? [];
+    this._originalMode = 0;
+    this.mode = mode;
+    this.mtime = mtime;
+  }
+  set mode(mode) {
+    if (mode == null) {
+      this._mode = this.isDirectory() ? DEFAULT_DIRECTORY_MODE : DEFAULT_FILE_MODE;
+    } else {
+      this._mode = mode & 4095;
+    }
+  }
+  get mode() {
+    return this._mode;
+  }
+  isDirectory() {
+    return dirTypes.includes(this.type);
+  }
+  addBlockSize(size) {
+    this.blockSizes.push(size);
+  }
+  removeBlockSize(index) {
+    this.blockSizes.splice(index, 1);
+  }
+  /**
+   * Returns `0n` for directories or `data.length + sum(blockSizes)` for everything else
+   */
+  fileSize() {
+    if (this.isDirectory()) {
+      return 0n;
+    }
+    let sum = 0n;
+    this.blockSizes.forEach((size) => {
+      sum += size;
+    });
+    if (this.data != null) {
+      sum += BigInt(this.data.length);
+    }
+    return sum;
+  }
+  /**
+   * encode to protobuf Uint8Array
+   */
+  marshal() {
+    let type;
+    switch (this.type) {
+      case "raw":
+        type = Data.DataType.Raw;
+        break;
+      case "directory":
+        type = Data.DataType.Directory;
+        break;
+      case "file":
+        type = Data.DataType.File;
+        break;
+      case "metadata":
+        type = Data.DataType.Metadata;
+        break;
+      case "symlink":
+        type = Data.DataType.Symlink;
+        break;
+      case "hamt-sharded-directory":
+        type = Data.DataType.HAMTShard;
+        break;
+      default:
+        throw (0, import_err_code.default)(new Error(`Type: ${type} is not valid`), "ERR_INVALID_TYPE");
+    }
+    let data = this.data;
+    if (this.data == null || this.data.length === 0) {
+      data = void 0;
+    }
+    let mode;
+    if (this.mode != null) {
+      mode = this._originalMode & 4294963200 | (this.mode ?? 0);
+      if (mode === DEFAULT_FILE_MODE && !this.isDirectory()) {
+        mode = void 0;
+      }
+      if (mode === DEFAULT_DIRECTORY_MODE && this.isDirectory()) {
+        mode = void 0;
+      }
+    }
+    let mtime;
+    if (this.mtime != null) {
+      mtime = {
+        Seconds: this.mtime.secs,
+        FractionalNanoseconds: this.mtime.nsecs
+      };
+    }
+    return Data.encode({
+      Type: type,
+      Data: data,
+      filesize: this.isDirectory() ? void 0 : this.fileSize(),
+      blocksizes: this.blockSizes,
+      hashType: this.hashType,
+      fanout: this.fanout,
+      mode,
+      mtime
+    });
+  }
+};
+
+// node_modules/progress-events/dist/src/index.js
+var CustomProgressEvent = class extends Event {
+  constructor(type, detail) {
+    super(type);
+    this.detail = detail;
+  }
+};
+
+// node_modules/ipfs-unixfs-importer/dist/src/utils/persist.js
+var persist = async (buffer2, blockstore, options) => {
+  if (options.codec == null) {
+    options.codec = src_exports2;
+  }
+  const multihash = await sha256.digest(buffer2);
+  const cid = CID2.create(options.cidVersion, options.codec.code, multihash);
+  await blockstore.put(cid, buffer2, options);
+  return cid;
+};
+
+// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/buffer-importer.js
+function defaultBufferImporter(options) {
+  return async function* bufferImporter(file, blockstore) {
+    let bytesWritten = 0n;
+    for await (let block of file.content) {
+      yield async () => {
+        var _a;
+        let unixfs2;
+        const opts = {
+          codec: src_exports2,
+          cidVersion: options.cidVersion,
+          onProgress: options.onProgress
+        };
+        if (options.rawLeaves) {
+          opts.codec = raw_exports;
+          opts.cidVersion = 1;
+        } else {
+          unixfs2 = new UnixFS({
+            type: options.leafType,
+            data: block
+          });
+          block = encode7({
+            Data: unixfs2.marshal(),
+            Links: []
+          });
+        }
+        const cid = await persist(block, blockstore, opts);
+        bytesWritten += BigInt(block.byteLength);
+        (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:importer:progress:file:write", {
+          bytesWritten,
+          cid,
+          path: file.path
+        }));
+        return {
+          cid,
+          unixfs: unixfs2,
+          size: BigInt(block.length),
+          block
+        };
+      };
+    }
+  };
+}
+
+// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/index.js
+var import_err_code2 = __toESM(require_err_code(), 1);
+
+// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/dir.js
+var dirBuilder = async (dir, blockstore, options) => {
+  const unixfs2 = new UnixFS({
+    type: "directory",
+    mtime: dir.mtime,
+    mode: dir.mode
+  });
+  const block = encode7(prepare({ Data: unixfs2.marshal() }));
+  const cid = await persist(block, blockstore, options);
+  const path6 = dir.path;
+  return {
+    cid,
+    path: path6,
+    unixfs: unixfs2,
+    size: BigInt(block.length),
+    originalPath: dir.originalPath,
+    block
+  };
+};
+
+// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/file.js
+async function* buildFileBatch(file, blockstore, options) {
+  let count = -1;
+  let previous;
+  for await (const entry of parallelBatch(options.bufferImporter(file, blockstore), options.blockWriteConcurrency)) {
+    count++;
+    if (count === 0) {
+      previous = {
+        ...entry,
+        single: true
+      };
+      continue;
+    } else if (count === 1 && previous != null) {
+      yield {
+        ...previous,
+        block: void 0,
+        single: void 0
+      };
+      previous = void 0;
+    }
+    yield {
+      ...entry,
+      block: void 0
+    };
+  }
+  if (previous != null) {
+    yield previous;
+  }
+}
+function isSingleBlockImport(result) {
+  return result.single === true;
+}
+var reduce = (file, blockstore, options) => {
+  const reducer = async function(leaves) {
+    var _a, _b;
+    if (leaves.length === 1 && isSingleBlockImport(leaves[0]) && options.reduceSingleLeafToSelf) {
+      const leaf = leaves[0];
+      let node2 = leaf.block;
+      if (isSingleBlockImport(leaf) && (file.mtime !== void 0 || file.mode !== void 0)) {
+        leaf.unixfs = new UnixFS({
+          type: "file",
+          mtime: file.mtime,
+          mode: file.mode,
+          data: leaf.block
+        });
+        node2 = { Data: leaf.unixfs.marshal(), Links: [] };
+        leaf.block = encode7(prepare(node2));
+        leaf.cid = await persist(leaf.block, blockstore, {
+          ...options,
+          cidVersion: options.cidVersion
+        });
+        leaf.size = BigInt(leaf.block.length);
+      }
+      (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:importer:progress:file:layout", {
+        cid: leaf.cid,
+        path: leaf.originalPath
+      }));
+      return {
+        cid: leaf.cid,
+        path: file.path,
+        unixfs: leaf.unixfs,
+        size: leaf.size,
+        originalPath: leaf.originalPath
+      };
+    }
+    const f = new UnixFS({
+      type: "file",
+      mtime: file.mtime,
+      mode: file.mode
+    });
+    const links = leaves.filter((leaf) => {
+      var _a2, _b2;
+      if (leaf.cid.code === code3 && leaf.size > 0) {
+        return true;
+      }
+      if (leaf.unixfs != null && leaf.unixfs.data == null && leaf.unixfs.fileSize() > 0n) {
+        return true;
+      }
+      return Boolean((_b2 = (_a2 = leaf.unixfs) == null ? void 0 : _a2.data) == null ? void 0 : _b2.length);
+    }).map((leaf) => {
+      var _a2;
+      if (leaf.cid.code === code3) {
+        f.addBlockSize(leaf.size);
+        return {
+          Name: "",
+          Tsize: Number(leaf.size),
+          Hash: leaf.cid
+        };
+      }
+      if (leaf.unixfs == null || leaf.unixfs.data == null) {
+        f.addBlockSize(((_a2 = leaf.unixfs) == null ? void 0 : _a2.fileSize()) ?? 0n);
+      } else {
+        f.addBlockSize(BigInt(leaf.unixfs.data.length));
+      }
+      return {
+        Name: "",
+        Tsize: Number(leaf.size),
+        Hash: leaf.cid
+      };
+    });
+    const node = {
+      Data: f.marshal(),
+      Links: links
+    };
+    const block = encode7(prepare(node));
+    const cid = await persist(block, blockstore, options);
+    (_b = options.onProgress) == null ? void 0 : _b.call(options, new CustomProgressEvent("unixfs:importer:progress:file:layout", {
+      cid,
+      path: file.originalPath
+    }));
+    return {
+      cid,
+      path: file.path,
+      unixfs: f,
+      size: BigInt(block.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0)),
+      originalPath: file.originalPath,
+      block
+    };
+  };
+  return reducer;
+};
+var fileBuilder = async (file, block, options) => {
+  return options.layout(buildFileBatch(file, block, options), reduce(file, block, options));
+};
+
+// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/index.js
+function isIterable(thing) {
+  return Symbol.iterator in thing;
+}
+function isAsyncIterable5(thing) {
+  return Symbol.asyncIterator in thing;
+}
+function contentAsAsyncIterable(content) {
+  try {
+    if (content instanceof Uint8Array) {
+      return async function* () {
+        yield content;
+      }();
+    } else if (isIterable(content)) {
+      return async function* () {
+        yield* content;
+      }();
+    } else if (isAsyncIterable5(content)) {
+      return content;
+    }
+  } catch {
+    throw (0, import_err_code2.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT");
+  }
+  throw (0, import_err_code2.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT");
+}
+function defaultDagBuilder(options) {
+  return async function* dagBuilder(source, blockstore) {
+    for await (const entry of source) {
+      let originalPath;
+      if (entry.path != null) {
+        originalPath = entry.path;
+        entry.path = entry.path.split("/").filter((path6) => path6 != null && path6 !== ".").join("/");
+      }
+      if (isFileCandidate(entry)) {
+        const file = {
+          path: entry.path,
+          mtime: entry.mtime,
+          mode: entry.mode,
+          content: async function* () {
+            var _a;
+            let bytesRead = 0n;
+            for await (const chunk of options.chunker(options.chunkValidator(contentAsAsyncIterable(entry.content)))) {
+              const currentChunkSize = BigInt(chunk.byteLength);
+              bytesRead += currentChunkSize;
+              (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:importer:progress:file:read", {
+                bytesRead,
+                chunkSize: currentChunkSize,
+                path: entry.path
+              }));
+              yield chunk;
+            }
+          }(),
+          originalPath
+        };
+        yield async () => fileBuilder(file, blockstore, options);
+      } else if (entry.path != null) {
+        const dir = {
+          path: entry.path,
+          mtime: entry.mtime,
+          mode: entry.mode,
+          originalPath
+        };
+        yield async () => dirBuilder(dir, blockstore, options);
+      } else {
+        throw new Error("Import candidate must have content or path or both");
+      }
+    }
+  };
+}
+function isFileCandidate(entry) {
+  return entry.content != null;
+}
+
+// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/validate-chunks.js
+var import_err_code3 = __toESM(require_err_code(), 1);
+var defaultChunkValidator = () => {
+  return async function* validateChunks(source) {
+    for await (const content of source) {
+      if (content.length === void 0) {
+        throw (0, import_err_code3.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT");
+      }
+      if (typeof content === "string" || content instanceof String) {
+        yield fromString3(content.toString());
+      } else if (Array.isArray(content)) {
+        yield Uint8Array.from(content);
+      } else if (content instanceof Uint8Array) {
+        yield content;
+      } else {
+        throw (0, import_err_code3.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT");
+      }
+    }
+  };
+};
+
+// node_modules/ipfs-unixfs-importer/dist/src/layout/balanced.js
+var DEFAULT_MAX_CHILDREN_PER_NODE = 174;
+function balanced(options) {
+  const maxChildrenPerNode = (options == null ? void 0 : options.maxChildrenPerNode) ?? DEFAULT_MAX_CHILDREN_PER_NODE;
+  return async function balancedLayout(source, reduce2) {
+    const roots = [];
+    for await (const chunked of src_default5(source, maxChildrenPerNode)) {
+      roots.push(await reduce2(chunked));
+    }
+    if (roots.length > 1) {
+      return balancedLayout(roots, reduce2);
+    }
+    return roots[0];
+  };
+}
+
+// node_modules/ipfs-unixfs-importer/dist/src/dir.js
+var Dir = class {
+  options;
+  root;
+  dir;
+  path;
+  dirty;
+  flat;
+  parent;
+  parentKey;
+  unixfs;
+  mode;
+  mtime;
+  cid;
+  size;
+  nodeSize;
+  constructor(props, options) {
+    this.options = options ?? {};
+    this.root = props.root;
+    this.dir = props.dir;
+    this.path = props.path;
+    this.dirty = props.dirty;
+    this.flat = props.flat;
+    this.parent = props.parent;
+    this.parentKey = props.parentKey;
+    this.unixfs = props.unixfs;
+    this.mode = props.mode;
+    this.mtime = props.mtime;
+  }
+};
+var CID_V0 = CID2.parse("QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn");
+var CID_V1 = CID2.parse("zdj7WbTaiJT1fgatdet9Ei9iDB5hdCxkbVyhyh8YTUnXMiwYi");
+
+// node_modules/ipfs-unixfs-importer/dist/src/dir-flat.js
+var DirFlat = class extends Dir {
+  _children;
+  constructor(props, options) {
+    super(props, options);
+    this._children = /* @__PURE__ */ new Map();
+  }
+  async put(name4, value) {
+    this.cid = void 0;
+    this.size = void 0;
+    this.nodeSize = void 0;
+    this._children.set(name4, value);
+  }
+  async get(name4) {
+    return Promise.resolve(this._children.get(name4));
+  }
+  childCount() {
+    return this._children.size;
+  }
+  directChildrenCount() {
+    return this.childCount();
+  }
+  onlyChild() {
+    return this._children.values().next().value;
+  }
+  async *eachChildSeries() {
+    for (const [key, child] of this._children.entries()) {
+      yield {
+        key,
+        child
+      };
+    }
+  }
+  estimateNodeSize() {
+    if (this.nodeSize !== void 0) {
+      return this.nodeSize;
+    }
+    this.nodeSize = 0;
+    for (const [name4, child] of this._children.entries()) {
+      if (child.size != null && child.cid != null) {
+        this.nodeSize += name4.length + (this.options.cidVersion === 1 ? CID_V1.bytes.byteLength : CID_V0.bytes.byteLength);
+      }
+    }
+    return this.nodeSize;
+  }
+  async *flush(block) {
+    const links = [];
+    for (const [name4, child] of this._children.entries()) {
+      let result = child;
+      if (child instanceof Dir) {
+        for await (const entry of child.flush(block)) {
+          result = entry;
+          yield entry;
+        }
+      }
+      if (result.size != null && result.cid != null) {
+        links.push({
+          Name: name4,
+          Tsize: Number(result.size),
+          Hash: result.cid
+        });
+      }
+    }
+    const unixfs2 = new UnixFS({
+      type: "directory",
+      mtime: this.mtime,
+      mode: this.mode
+    });
+    const node = { Data: unixfs2.marshal(), Links: links };
+    const buffer2 = encode7(prepare(node));
+    const cid = await persist(buffer2, block, this.options);
+    const size = buffer2.length + node.Links.reduce(
+      /**
+       * @param {number} acc
+       * @param {PBLink} curr
+       */
+      (acc, curr) => acc + (curr.Tsize == null ? 0 : curr.Tsize),
+      0
+    );
+    this.cid = cid;
+    this.size = size;
+    yield {
+      cid,
+      unixfs: unixfs2,
+      path: this.path,
+      size: BigInt(size)
+    };
+  }
+};
+
+// node_modules/@multiformats/murmur3/src/index.js
+var import_murmurhash3js_revisited = __toESM(require_murmurhash3js_revisited(), 1);
+function fromNumberTo32BitBuf(number) {
+  const bytes = new Array(4);
+  for (let i = 0; i < 4; i++) {
+    bytes[i] = number & 255;
+    number = number >> 8;
+  }
+  return new Uint8Array(bytes);
+}
+var murmur332 = from3({
+  name: "murmur3-32",
+  code: 35,
+  encode: (input) => fromNumberTo32BitBuf(import_murmurhash3js_revisited.default.x86.hash32(input))
+});
+var murmur3128 = from3({
+  name: "murmur3-128",
+  code: 34,
+  encode: (input) => bytes_exports2.fromHex(import_murmurhash3js_revisited.default.x64.hash128(input))
+});
+var murmur364 = from3({
+  name: "murmur3-x64-64",
+  code: 34,
+  encode: (input) => bytes_exports2.fromHex(import_murmurhash3js_revisited.default.x64.hash128(input)).subarray(0, 8)
+});
+
+// node_modules/hamt-sharding/dist/src/bucket.js
+var import_sparse_array = __toESM(require_sparse_array(), 1);
+var Bucket = class _Bucket {
+  constructor(options, parent, posAtParent = 0) {
+    this._options = options;
+    this._popCount = 0;
+    this._parent = parent;
+    this._posAtParent = posAtParent;
+    this._children = new import_sparse_array.default();
+    this.key = null;
+  }
+  async put(key, value) {
+    const place = await this._findNewBucketAndPos(key);
+    await place.bucket._putAt(place, key, value);
+  }
+  async get(key) {
+    const child = await this._findChild(key);
+    if (child != null) {
+      return child.value;
+    }
+  }
+  async del(key) {
+    const place = await this._findPlace(key);
+    const child = place.bucket._at(place.pos);
+    if (child != null && child.key === key) {
+      place.bucket._delAt(place.pos);
+    }
+  }
+  leafCount() {
+    const children = this._children.compactArray();
+    return children.reduce((acc, child) => {
+      if (child instanceof _Bucket) {
+        return acc + child.leafCount();
+      }
+      return acc + 1;
+    }, 0);
+  }
+  childrenCount() {
+    return this._children.length;
+  }
+  onlyChild() {
+    return this._children.get(0);
+  }
+  *eachLeafSeries() {
+    const children = this._children.compactArray();
+    for (const child of children) {
+      if (child instanceof _Bucket) {
+        yield* child.eachLeafSeries();
+      } else {
+        yield child;
+      }
+    }
+  }
+  serialize(map2, reduce2) {
+    const acc = [];
+    return reduce2(this._children.reduce((acc2, child, index) => {
+      if (child != null) {
+        if (child instanceof _Bucket) {
+          acc2.push(child.serialize(map2, reduce2));
+        } else {
+          acc2.push(map2(child, index));
+        }
+      }
+      return acc2;
+    }, acc));
+  }
+  async asyncTransform(asyncMap, asyncReduce) {
+    return await asyncTransformBucket(this, asyncMap, asyncReduce);
+  }
+  toJSON() {
+    return this.serialize(mapNode, reduceNodes);
+  }
+  prettyPrint() {
+    return JSON.stringify(this.toJSON(), null, "  ");
+  }
+  tableSize() {
+    return Math.pow(2, this._options.bits);
+  }
+  async _findChild(key) {
+    const result = await this._findPlace(key);
+    const child = result.bucket._at(result.pos);
+    if (child instanceof _Bucket) {
+      return void 0;
+    }
+    if (child != null && child.key === key) {
+      return child;
+    }
+  }
+  async _findPlace(key) {
+    const hashValue = this._options.hash(typeof key === "string" ? fromString3(key) : key);
+    const index = await hashValue.take(this._options.bits);
+    const child = this._children.get(index);
+    if (child instanceof _Bucket) {
+      return await child._findPlace(hashValue);
+    }
+    return {
+      bucket: this,
+      pos: index,
+      hash: hashValue,
+      existingChild: child
+    };
+  }
+  async _findNewBucketAndPos(key) {
+    const place = await this._findPlace(key);
+    if (place.existingChild != null && place.existingChild.key !== key) {
+      const bucket = new _Bucket(this._options, place.bucket, place.pos);
+      place.bucket._putObjectAt(place.pos, bucket);
+      const newPlace = await bucket._findPlace(place.existingChild.hash);
+      newPlace.bucket._putAt(newPlace, place.existingChild.key, place.existingChild.value);
+      return await bucket._findNewBucketAndPos(place.hash);
+    }
+    return place;
+  }
+  _putAt(place, key, value) {
+    this._putObjectAt(place.pos, {
+      key,
+      value,
+      hash: place.hash
+    });
+  }
+  _putObjectAt(pos, object) {
+    if (this._children.get(pos) == null) {
+      this._popCount++;
+    }
+    this._children.set(pos, object);
+  }
+  _delAt(pos) {
+    if (pos === -1) {
+      throw new Error("Invalid position");
+    }
+    if (this._children.get(pos) != null) {
+      this._popCount--;
+    }
+    this._children.unset(pos);
+    this._level();
+  }
+  _level() {
+    if (this._parent != null && this._popCount <= 1) {
+      if (this._popCount === 1) {
+        const onlyChild = this._children.find(exists);
+        if (onlyChild != null && !(onlyChild instanceof _Bucket)) {
+          const hash = onlyChild.hash;
+          hash.untake(this._options.bits);
+          const place = {
+            pos: this._posAtParent,
+            hash,
+            bucket: this._parent
+          };
+          this._parent._putAt(place, onlyChild.key, onlyChild.value);
+        }
+      } else {
+        this._parent._delAt(this._posAtParent);
+      }
+    }
+  }
+  _at(index) {
+    return this._children.get(index);
+  }
+};
+function exists(o) {
+  return Boolean(o);
+}
+function mapNode(node, _) {
+  return node.key;
+}
+function reduceNodes(nodes) {
+  return nodes;
+}
+async function asyncTransformBucket(bucket, asyncMap, asyncReduce) {
+  const output = [];
+  for (const child of bucket._children.compactArray()) {
+    if (child instanceof Bucket) {
+      await asyncTransformBucket(child, asyncMap, asyncReduce);
+    } else {
+      const mappedChildren = await asyncMap(child);
+      output.push({
+        bitField: bucket._children.bitField(),
+        children: mappedChildren
+      });
+    }
+  }
+  return await asyncReduce(output);
+}
+
+// node_modules/hamt-sharding/dist/src/consumable-buffer.js
+var START_MASKS = [
+  255,
+  254,
+  252,
+  248,
+  240,
+  224,
+  192,
+  128
+];
+var STOP_MASKS = [
+  1,
+  3,
+  7,
+  15,
+  31,
+  63,
+  127,
+  255
+];
+var ConsumableBuffer = class {
+  constructor(value) {
+    this._value = value;
+    this._currentBytePos = value.length - 1;
+    this._currentBitPos = 7;
+  }
+  availableBits() {
+    return this._currentBitPos + 1 + this._currentBytePos * 8;
+  }
+  totalBits() {
+    return this._value.length * 8;
+  }
+  take(bits) {
+    let pendingBits = bits;
+    let result = 0;
+    while (pendingBits > 0 && this._haveBits()) {
+      const byte = this._value[this._currentBytePos];
+      const availableBits = this._currentBitPos + 1;
+      const taking = Math.min(availableBits, pendingBits);
+      const value = byteBitsToInt(byte, availableBits - taking, taking);
+      result = (result << taking) + value;
+      pendingBits -= taking;
+      this._currentBitPos -= taking;
+      if (this._currentBitPos < 0) {
+        this._currentBitPos = 7;
+        this._currentBytePos--;
+      }
+    }
+    return result;
+  }
+  untake(bits) {
+    this._currentBitPos += bits;
+    while (this._currentBitPos > 7) {
+      this._currentBitPos -= 8;
+      this._currentBytePos += 1;
+    }
+  }
+  _haveBits() {
+    return this._currentBytePos >= 0;
+  }
+};
+function byteBitsToInt(byte, start, length4) {
+  const mask = maskFor(start, length4);
+  return (byte & mask) >>> start;
+}
+function maskFor(start, length4) {
+  return START_MASKS[start] & STOP_MASKS[Math.min(length4 + start - 1, 7)];
+}
+
+// node_modules/hamt-sharding/dist/src/consumable-hash.js
+function wrapHash(hashFn2) {
+  function hashing(value) {
+    if (value instanceof InfiniteHash) {
+      return value;
+    } else {
+      return new InfiniteHash(value, hashFn2);
+    }
+  }
+  return hashing;
+}
+var InfiniteHash = class {
+  constructor(value, hashFn2) {
+    if (!(value instanceof Uint8Array)) {
+      throw new Error("can only hash Uint8Arrays");
+    }
+    this._value = value;
+    this._hashFn = hashFn2;
+    this._depth = -1;
+    this._availableBits = 0;
+    this._currentBufferIndex = 0;
+    this._buffers = [];
+  }
+  async take(bits) {
+    let pendingBits = bits;
+    while (this._availableBits < pendingBits) {
+      await this._produceMoreBits();
+    }
+    let result = 0;
+    while (pendingBits > 0) {
+      const hash = this._buffers[this._currentBufferIndex];
+      const available = Math.min(hash.availableBits(), pendingBits);
+      const took = hash.take(available);
+      result = (result << available) + took;
+      pendingBits -= available;
+      this._availableBits -= available;
+      if (hash.availableBits() === 0) {
+        this._currentBufferIndex++;
+      }
+    }
+    return result;
+  }
+  untake(bits) {
+    let pendingBits = bits;
+    while (pendingBits > 0) {
+      const hash = this._buffers[this._currentBufferIndex];
+      const availableForUntake = Math.min(hash.totalBits() - hash.availableBits(), pendingBits);
+      hash.untake(availableForUntake);
+      pendingBits -= availableForUntake;
+      this._availableBits += availableForUntake;
+      if (this._currentBufferIndex > 0 && hash.totalBits() === hash.availableBits()) {
+        this._depth--;
+        this._currentBufferIndex--;
+      }
+    }
+  }
+  async _produceMoreBits() {
+    this._depth++;
+    const value = this._depth > 0 ? concat2([this._value, Uint8Array.from([this._depth])]) : this._value;
+    const hashValue = await this._hashFn(value);
+    const buffer2 = new ConsumableBuffer(hashValue);
+    this._buffers.push(buffer2);
+    this._availableBits += buffer2.availableBits();
+  }
+};
+
+// node_modules/hamt-sharding/dist/src/index.js
+function createHAMT(options) {
+  if (options == null || options.hashFn == null) {
+    throw new Error("please define an options.hashFn");
+  }
+  const bucketOptions = {
+    bits: options.bits ?? 8,
+    hash: wrapHash(options.hashFn)
+  };
+  return new Bucket(bucketOptions);
+}
+
+// node_modules/ipfs-unixfs-importer/dist/src/dir-sharded.js
+async function hamtHashFn(buf2) {
+  return (await murmur3128.encode(buf2)).slice(0, 8).reverse();
+}
+var HAMT_HASH_CODE = BigInt(34);
+var DEFAULT_FANOUT_BITS = 8;
+var DirSharded = class extends Dir {
+  _bucket;
+  constructor(props, options) {
+    super(props, options);
+    this._bucket = createHAMT({
+      hashFn: hamtHashFn,
+      bits: options.shardFanoutBits ?? DEFAULT_FANOUT_BITS
+    });
+  }
+  async put(name4, value) {
+    this.cid = void 0;
+    this.size = void 0;
+    this.nodeSize = void 0;
+    await this._bucket.put(name4, value);
+  }
+  async get(name4) {
+    return this._bucket.get(name4);
+  }
+  childCount() {
+    return this._bucket.leafCount();
+  }
+  directChildrenCount() {
+    return this._bucket.childrenCount();
+  }
+  onlyChild() {
+    return this._bucket.onlyChild();
+  }
+  async *eachChildSeries() {
+    for await (const { key, value } of this._bucket.eachLeafSeries()) {
+      yield {
+        key,
+        child: value
+      };
+    }
+  }
+  estimateNodeSize() {
+    if (this.nodeSize !== void 0) {
+      return this.nodeSize;
+    }
+    this.nodeSize = calculateSize(this._bucket, this, this.options);
+    return this.nodeSize;
+  }
+  async *flush(blockstore) {
+    for await (const entry of flush(this._bucket, blockstore, this, this.options)) {
+      yield {
+        ...entry,
+        path: this.path
+      };
+    }
+  }
+};
+var dir_sharded_default = DirSharded;
+async function* flush(bucket, blockstore, shardRoot, options) {
+  const children = bucket._children;
+  const padLength = (bucket.tableSize() - 1).toString(16).length;
+  const links = [];
+  let childrenSize = 0n;
+  for (let i = 0; i < children.length; i++) {
+    const child = children.get(i);
+    if (child == null) {
+      continue;
+    }
+    const labelPrefix = i.toString(16).toUpperCase().padStart(padLength, "0");
+    if (child instanceof Bucket) {
+      let shard;
+      for await (const subShard of flush(child, blockstore, null, options)) {
+        shard = subShard;
+      }
+      if (shard == null) {
+        throw new Error("Could not flush sharded directory, no subshard found");
+      }
+      links.push({
+        Name: labelPrefix,
+        Tsize: Number(shard.size),
+        Hash: shard.cid
+      });
+      childrenSize += shard.size;
+    } else if (isDir(child.value)) {
+      const dir2 = child.value;
+      let flushedDir;
+      for await (const entry of dir2.flush(blockstore)) {
+        flushedDir = entry;
+        yield flushedDir;
+      }
+      if (flushedDir == null) {
+        throw new Error("Did not flush dir");
+      }
+      const label = labelPrefix + child.key;
+      links.push({
+        Name: label,
+        Tsize: Number(flushedDir.size),
+        Hash: flushedDir.cid
+      });
+      childrenSize += flushedDir.size;
+    } else {
+      const value = child.value;
+      if (value.cid == null) {
+        continue;
+      }
+      const label = labelPrefix + child.key;
+      const size2 = value.size;
+      links.push({
+        Name: label,
+        Tsize: Number(size2),
+        Hash: value.cid
+      });
+      childrenSize += BigInt(size2 ?? 0);
+    }
+  }
+  const data = Uint8Array.from(children.bitField().reverse());
+  const dir = new UnixFS({
+    type: "hamt-sharded-directory",
+    data,
+    fanout: BigInt(bucket.tableSize()),
+    hashType: HAMT_HASH_CODE,
+    mtime: shardRoot == null ? void 0 : shardRoot.mtime,
+    mode: shardRoot == null ? void 0 : shardRoot.mode
+  });
+  const node = {
+    Data: dir.marshal(),
+    Links: links
+  };
+  const buffer2 = encode7(prepare(node));
+  const cid = await persist(buffer2, blockstore, options);
+  const size = BigInt(buffer2.byteLength) + childrenSize;
+  yield {
+    cid,
+    unixfs: dir,
+    size
+  };
+}
+function isDir(obj) {
+  return typeof obj.flush === "function";
+}
+function calculateSize(bucket, shardRoot, options) {
+  const children = bucket._children;
+  const padLength = (bucket.tableSize() - 1).toString(16).length;
+  const links = [];
+  for (let i = 0; i < children.length; i++) {
+    const child = children.get(i);
+    if (child == null) {
+      continue;
+    }
+    const labelPrefix = i.toString(16).toUpperCase().padStart(padLength, "0");
+    if (child instanceof Bucket) {
+      const size = calculateSize(child, null, options);
+      links.push({
+        Name: labelPrefix,
+        Tsize: Number(size),
+        Hash: options.cidVersion === 0 ? CID_V0 : CID_V1
+      });
+    } else if (typeof child.value.flush === "function") {
+      const dir2 = child.value;
+      const size = dir2.nodeSize();
+      links.push({
+        Name: labelPrefix + child.key,
+        Tsize: Number(size),
+        Hash: options.cidVersion === 0 ? CID_V0 : CID_V1
+      });
+    } else {
+      const value = child.value;
+      if (value.cid == null) {
+        continue;
+      }
+      const label = labelPrefix + child.key;
+      const size = value.size;
+      links.push({
+        Name: label,
+        Tsize: Number(size),
+        Hash: value.cid
+      });
+    }
+  }
+  const data = Uint8Array.from(children.bitField().reverse());
+  const dir = new UnixFS({
+    type: "hamt-sharded-directory",
+    data,
+    fanout: BigInt(bucket.tableSize()),
+    hashType: HAMT_HASH_CODE,
+    mtime: shardRoot == null ? void 0 : shardRoot.mtime,
+    mode: shardRoot == null ? void 0 : shardRoot.mode
+  });
+  const buffer2 = encode7(prepare({
+    Data: dir.marshal(),
+    Links: links
+  }));
+  return buffer2.length;
+}
+
+// node_modules/ipfs-unixfs-importer/dist/src/flat-to-shard.js
+async function flatToShard(child, dir, threshold, options) {
+  let newDir = dir;
+  if (dir instanceof DirFlat && dir.estimateNodeSize() > threshold) {
+    newDir = await convertToShard(dir, options);
+  }
+  const parent = newDir.parent;
+  if (parent != null) {
+    if (newDir !== dir) {
+      if (child != null) {
+        child.parent = newDir;
+      }
+      if (newDir.parentKey == null) {
+        throw new Error("No parent key found");
+      }
+      await parent.put(newDir.parentKey, newDir);
+    }
+    return flatToShard(newDir, parent, threshold, options);
+  }
+  return newDir;
+}
+async function convertToShard(oldDir, options) {
+  const newDir = new dir_sharded_default({
+    root: oldDir.root,
+    dir: true,
+    parent: oldDir.parent,
+    parentKey: oldDir.parentKey,
+    path: oldDir.path,
+    dirty: oldDir.dirty,
+    flat: false,
+    mtime: oldDir.mtime,
+    mode: oldDir.mode
+  }, options);
+  for await (const { key, child } of oldDir.eachChildSeries()) {
+    await newDir.put(key, child);
+  }
+  return newDir;
+}
+
+// node_modules/ipfs-unixfs-importer/dist/src/utils/to-path-components.js
+var toPathComponents = (path6 = "") => {
+  return path6.split(/(? 1) {
+      yield* flushAndYield(tree, block);
+    } else {
+      for await (const unwrapped of tree.eachChildSeries()) {
+        if (unwrapped == null) {
+          continue;
+        }
+        yield* flushAndYield(unwrapped.child, block);
+      }
+    }
+  };
+}
+
+// node_modules/ipfs-unixfs-importer/dist/src/index.js
+async function* importer(source, blockstore, options = {}) {
+  let candidates;
+  if (Symbol.asyncIterator in source || Symbol.iterator in source) {
+    candidates = source;
+  } else {
+    candidates = [source];
+  }
+  const wrapWithDirectory = options.wrapWithDirectory ?? false;
+  const shardSplitThresholdBytes = options.shardSplitThresholdBytes ?? 262144;
+  const shardFanoutBits = options.shardFanoutBits ?? 8;
+  const cidVersion = options.cidVersion ?? 1;
+  const rawLeaves = options.rawLeaves ?? true;
+  const leafType = options.leafType ?? "file";
+  const fileImportConcurrency = options.fileImportConcurrency ?? 50;
+  const blockWriteConcurrency = options.blockWriteConcurrency ?? 10;
+  const reduceSingleLeafToSelf = options.reduceSingleLeafToSelf ?? true;
+  const chunker = options.chunker ?? fixedSize();
+  const chunkValidator = options.chunkValidator ?? defaultChunkValidator();
+  const buildDag = options.dagBuilder ?? defaultDagBuilder({
+    chunker,
+    chunkValidator,
+    wrapWithDirectory,
+    layout: options.layout ?? balanced(),
+    bufferImporter: options.bufferImporter ?? defaultBufferImporter({
+      cidVersion,
+      rawLeaves,
+      leafType,
+      onProgress: options.onProgress
+    }),
+    blockWriteConcurrency,
+    reduceSingleLeafToSelf,
+    cidVersion,
+    onProgress: options.onProgress
+  });
+  const buildTree = options.treeBuilder ?? defaultTreeBuilder({
+    wrapWithDirectory,
+    shardSplitThresholdBytes,
+    shardFanoutBits,
+    cidVersion,
+    onProgress: options.onProgress
+  });
+  for await (const entry of buildTree(parallelBatch(buildDag(candidates, blockstore), fileImportConcurrency), blockstore)) {
+    yield {
+      cid: entry.cid,
+      path: entry.path,
+      unixfs: entry.unixfs,
+      size: entry.size
+    };
+  }
+}
+async function importFile(content, blockstore, options = {}) {
+  const result = await src_default4(importer([content], blockstore, options));
+  if (result == null) {
+    throw (0, import_err_code4.default)(new Error("Nothing imported"), "ERR_INVALID_PARAMS");
+  }
+  return result;
+}
+async function importDirectory(content, blockstore, options = {}) {
+  const result = await src_default4(importer([content], blockstore, options));
+  if (result == null) {
+    throw (0, import_err_code4.default)(new Error("Nothing imported"), "ERR_INVALID_PARAMS");
+  }
+  return result;
+}
+async function importBytes(buf2, blockstore, options = {}) {
+  return importFile({
+    content: buf2
+  }, blockstore, options);
+}
+async function importByteStream(bufs, blockstore, options = {}) {
+  return importFile({
+    content: bufs
+  }, blockstore, options);
+}
+
+// node_modules/ipfs-unixfs-importer/dist/src/chunker/rabin.js
+var import_err_code5 = __toESM(require_err_code(), 1);
+var import_rabin_wasm = __toESM(require_src(), 1);
+
+// node_modules/@helia/unixfs/dist/src/commands/add.js
+var defaultImporterSettings = {
+  cidVersion: 1,
+  rawLeaves: true,
+  layout: balanced({
+    maxChildrenPerNode: 1024
+  }),
+  chunker: fixedSize({
+    chunkSize: 1048576
+  })
+};
+async function* addAll(source, blockstore, options = {}) {
+  yield* importer(source, blockstore, {
+    ...defaultImporterSettings,
+    ...options
+  });
+}
+async function addBytes(bytes, blockstore, options = {}) {
+  const { cid } = await importBytes(bytes, blockstore, {
+    ...defaultImporterSettings,
+    ...options
+  });
+  return cid;
+}
+async function addByteStream(bytes, blockstore, options = {}) {
+  const { cid } = await importByteStream(bytes, blockstore, {
+    ...defaultImporterSettings,
+    ...options
+  });
+  return cid;
+}
+async function addFile(file, blockstore, options = {}) {
+  const { cid } = await importFile(file, blockstore, {
+    ...defaultImporterSettings,
+    ...options
+  });
+  return cid;
+}
+async function addDirectory(dir, blockstore, options = {}) {
+  const { cid } = await importDirectory({
+    ...dir,
+    path: dir.path ?? "-"
+  }, blockstore, {
+    ...defaultImporterSettings,
+    ...options
+  });
+  return cid;
+}
+
+// node_modules/ipfs-unixfs-exporter/dist/src/index.js
+var import_err_code15 = __toESM(require_err_code(), 1);
+
+// node_modules/it-last/dist/src/index.js
+function isAsyncIterable6(thing) {
+  return thing[Symbol.asyncIterator] != null;
+}
+function last(source) {
+  if (isAsyncIterable6(source)) {
+    return (async () => {
+      let res2;
+      for await (const entry of source) {
+        res2 = entry;
+      }
+      return res2;
+    })();
+  }
+  let res;
+  for (const entry of source) {
+    res = entry;
+  }
+  return res;
+}
+var src_default7 = last;
+
+// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/index.js
+var import_err_code14 = __toESM(require_err_code(), 1);
+
+// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/dag-cbor.js
+var import_err_code6 = __toESM(require_err_code(), 1);
+var resolve = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => {
+  const block = await blockstore.get(cid, options);
+  const object = decode6(block);
+  let subObject = object;
+  let subPath = path6;
+  while (toResolve.length > 0) {
+    const prop = toResolve[0];
+    if (prop in subObject) {
+      toResolve.shift();
+      subPath = `${subPath}/${prop}`;
+      const subObjectCid = CID2.asCID(subObject[prop]);
+      if (subObjectCid != null) {
+        return {
+          entry: {
+            type: "object",
+            name: name4,
+            path: path6,
+            cid,
+            node: block,
+            depth,
+            size: BigInt(block.length),
+            content: async function* () {
+              yield object;
+            }
+          },
+          next: {
+            cid: subObjectCid,
+            name: prop,
+            path: subPath,
+            toResolve
+          }
+        };
+      }
+      subObject = subObject[prop];
+    } else {
+      throw (0, import_err_code6.default)(new Error(`No property named ${prop} found in cbor node ${cid}`), "ERR_NO_PROP");
+    }
+  }
+  return {
+    entry: {
+      type: "object",
+      name: name4,
+      path: path6,
+      cid,
+      node: block,
+      depth,
+      size: BigInt(block.length),
+      content: async function* () {
+        yield object;
+      }
+    }
+  };
+};
+var dag_cbor_default = resolve;
+
+// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/identity.js
+var import_err_code8 = __toESM(require_err_code(), 1);
+
+// node_modules/ipfs-unixfs-exporter/dist/src/utils/extract-data-from-block.js
+function extractDataFromBlock(block, blockStart, requestedStart, requestedEnd) {
+  const blockLength = BigInt(block.length);
+  const blockEnd = BigInt(blockStart + blockLength);
+  if (requestedStart >= blockEnd || requestedEnd < blockStart) {
+    return new Uint8Array(0);
+  }
+  if (requestedEnd >= blockStart && requestedEnd < blockEnd) {
+    block = block.subarray(0, Number(requestedEnd - blockStart));
+  }
+  if (requestedStart >= blockStart && requestedStart < blockEnd) {
+    block = block.subarray(Number(requestedStart - blockStart));
+  }
+  return block;
+}
+var extract_data_from_block_default = extractDataFromBlock;
+
+// node_modules/ipfs-unixfs-exporter/dist/src/utils/validate-offset-and-length.js
+var import_err_code7 = __toESM(require_err_code(), 1);
+var validateOffsetAndLength = (size, offset = 0, length4 = size) => {
+  const fileSize = BigInt(size);
+  const start = BigInt(offset ?? 0);
+  let end = BigInt(length4);
+  if (end !== fileSize) {
+    end = start + end;
+  }
+  if (end > fileSize) {
+    end = fileSize;
+  }
+  if (start < 0n) {
+    throw (0, import_err_code7.default)(new Error("Offset must be greater than or equal to 0"), "ERR_INVALID_PARAMS");
+  }
+  if (start > fileSize) {
+    throw (0, import_err_code7.default)(new Error("Offset must be less than the file size"), "ERR_INVALID_PARAMS");
+  }
+  if (end < 0n) {
+    throw (0, import_err_code7.default)(new Error("Length must be greater than or equal to 0"), "ERR_INVALID_PARAMS");
+  }
+  if (end > fileSize) {
+    throw (0, import_err_code7.default)(new Error("Length must be less than the file size"), "ERR_INVALID_PARAMS");
+  }
+  return {
+    start,
+    end
+  };
+};
+var validate_offset_and_length_default = validateOffsetAndLength;
+
+// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/identity.js
+var rawContent = (node) => {
+  async function* contentGenerator(options = {}) {
+    var _a;
+    const { start, end } = validate_offset_and_length_default(node.length, options.offset, options.length);
+    const buf2 = extract_data_from_block_default(node, 0n, start, end);
+    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:progress:identity", {
+      bytesRead: BigInt(buf2.byteLength),
+      totalBytes: end - start,
+      fileSize: BigInt(node.byteLength)
+    }));
+    yield buf2;
+  }
+  return contentGenerator;
+};
+var resolve2 = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => {
+  if (toResolve.length > 0) {
+    throw (0, import_err_code8.default)(new Error(`No link named ${path6} found in raw node ${cid}`), "ERR_NOT_FOUND");
+  }
+  const buf2 = decode10(cid.multihash.bytes);
+  return {
+    entry: {
+      type: "identity",
+      name: name4,
+      path: path6,
+      cid,
+      content: rawContent(buf2.digest),
+      depth,
+      size: BigInt(buf2.digest.length),
+      node: buf2.digest
+    }
+  };
+};
+var identity_default = resolve2;
+
+// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/raw.js
+var import_err_code9 = __toESM(require_err_code(), 1);
+var rawContent2 = (node) => {
+  async function* contentGenerator(options = {}) {
+    var _a;
+    const { start, end } = validate_offset_and_length_default(node.length, options.offset, options.length);
+    const buf2 = extract_data_from_block_default(node, 0n, start, end);
+    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:progress:raw", {
+      bytesRead: BigInt(buf2.byteLength),
+      totalBytes: end - start,
+      fileSize: BigInt(node.byteLength)
+    }));
+    yield buf2;
+  }
+  return contentGenerator;
+};
+var resolve3 = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => {
+  if (toResolve.length > 0) {
+    throw (0, import_err_code9.default)(new Error(`No link named ${path6} found in raw node ${cid}`), "ERR_NOT_FOUND");
+  }
+  const block = await blockstore.get(cid, options);
+  return {
+    entry: {
+      type: "raw",
+      name: name4,
+      path: path6,
+      cid,
+      content: rawContent2(block),
+      depth,
+      size: BigInt(block.length),
+      node: block
+    }
+  };
+};
+var raw_default = resolve3;
+
+// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/index.js
+var import_err_code13 = __toESM(require_err_code(), 1);
+
+// node_modules/ipfs-unixfs-exporter/dist/src/utils/find-cid-in-shard.js
+var import_err_code10 = __toESM(require_err_code(), 1);
+var hashFn = async function(buf2) {
+  return (await murmur3128.encode(buf2)).slice(0, 8).reverse();
+};
+var addLinksToHamtBucket = async (links, bucket, rootBucket) => {
+  const padLength = (bucket.tableSize() - 1).toString(16).length;
+  await Promise.all(links.map(async (link) => {
+    if (link.Name == null) {
+      throw new Error("Unexpected Link without a Name");
+    }
+    if (link.Name.length === padLength) {
+      const pos = parseInt(link.Name, 16);
+      bucket._putObjectAt(pos, new Bucket({
+        hash: rootBucket._options.hash,
+        bits: rootBucket._options.bits
+      }, bucket, pos));
+      return;
+    }
+    await rootBucket.put(link.Name.substring(2), true);
+  }));
+};
+var toPrefix = (position, padLength) => {
+  return position.toString(16).toUpperCase().padStart(padLength, "0").substring(0, padLength);
+};
+var toBucketPath = (position) => {
+  let bucket = position.bucket;
+  const path6 = [];
+  while (bucket._parent != null) {
+    path6.push(bucket);
+    bucket = bucket._parent;
+  }
+  path6.push(bucket);
+  return path6.reverse();
+};
+var findShardCid = async (node, name4, blockstore, context, options) => {
+  if (context == null) {
+    if (node.Data == null) {
+      throw (0, import_err_code10.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS");
+    }
+    let dir;
+    try {
+      dir = UnixFS.unmarshal(node.Data);
+    } catch (err) {
+      throw (0, import_err_code10.default)(err, "ERR_NOT_UNIXFS");
+    }
+    if (dir.type !== "hamt-sharded-directory") {
+      throw (0, import_err_code10.default)(new Error("not a HAMT"), "ERR_NOT_UNIXFS");
+    }
+    if (dir.fanout == null) {
+      throw (0, import_err_code10.default)(new Error("missing fanout"), "ERR_NOT_UNIXFS");
+    }
+    const rootBucket = createHAMT({
+      hashFn,
+      bits: Math.log2(Number(dir.fanout))
+    });
+    context = {
+      rootBucket,
+      hamtDepth: 1,
+      lastBucket: rootBucket
+    };
+  }
+  const padLength = (context.lastBucket.tableSize() - 1).toString(16).length;
+  await addLinksToHamtBucket(node.Links, context.lastBucket, context.rootBucket);
+  const position = await context.rootBucket._findNewBucketAndPos(name4);
+  let prefix = toPrefix(position.pos, padLength);
+  const bucketPath = toBucketPath(position);
+  if (bucketPath.length > context.hamtDepth) {
+    context.lastBucket = bucketPath[context.hamtDepth];
+    prefix = toPrefix(context.lastBucket._posAtParent, padLength);
+  }
+  const link = node.Links.find((link2) => {
+    if (link2.Name == null) {
+      return false;
+    }
+    const entryPrefix = link2.Name.substring(0, padLength);
+    const entryName = link2.Name.substring(padLength);
+    if (entryPrefix !== prefix) {
+      return false;
+    }
+    if (entryName !== "" && entryName !== name4) {
+      return false;
+    }
+    return true;
+  });
+  if (link == null) {
+    return;
+  }
+  if (link.Name != null && link.Name.substring(padLength) === name4) {
+    return link.Hash;
+  }
+  context.hamtDepth++;
+  const block = await blockstore.get(link.Hash, options);
+  node = decode11(block);
+  return findShardCid(node, name4, blockstore, context, options);
+};
+var find_cid_in_shard_default = findShardCid;
+
+// node_modules/it-filter/dist/src/index.js
+function isAsyncIterable7(thing) {
+  return thing[Symbol.asyncIterator] != null;
+}
+function filter(source, fn) {
+  if (isAsyncIterable7(source)) {
+    return async function* () {
+      for await (const entry of source) {
+        if (await fn(entry)) {
+          yield entry;
+        }
+      }
+    }();
+  }
+  const peekable2 = src_default2(source);
+  const { value, done } = peekable2.next();
+  if (done === true) {
+    return function* () {
+    }();
+  }
+  const res = fn(value);
+  if (typeof res.then === "function") {
+    return async function* () {
+      if (await res) {
+        yield value;
+      }
+      for await (const entry of peekable2) {
+        if (await fn(entry)) {
+          yield entry;
+        }
+      }
+    }();
+  }
+  const func = fn;
+  return function* () {
+    if (res === true) {
+      yield value;
+    }
+    for (const entry of peekable2) {
+      if (func(entry)) {
+        yield entry;
+      }
+    }
+  }();
+}
+var src_default8 = filter;
+
+// node_modules/it-parallel/dist/src/index.js
+var CustomEvent = globalThis.CustomEvent ?? Event;
+async function* parallel(source, options = {}) {
+  let concurrency = options.concurrency ?? Infinity;
+  if (concurrency < 1) {
+    concurrency = Infinity;
+  }
+  const ordered = options.ordered == null ? false : options.ordered;
+  const emitter = new EventTarget();
+  const ops = [];
+  let slotAvailable = pDefer();
+  let resultAvailable = pDefer();
+  let sourceFinished = false;
+  let sourceErr;
+  let opErred = false;
+  emitter.addEventListener("task-complete", () => {
+    resultAvailable.resolve();
+  });
+  void Promise.resolve().then(async () => {
+    try {
+      for await (const task of source) {
+        if (ops.length === concurrency) {
+          slotAvailable = pDefer();
+          await slotAvailable.promise;
+        }
+        if (opErred) {
+          break;
+        }
+        const op = {
+          done: false
+        };
+        ops.push(op);
+        task().then((result) => {
+          op.done = true;
+          op.ok = true;
+          op.value = result;
+          emitter.dispatchEvent(new CustomEvent("task-complete"));
+        }, (err) => {
+          op.done = true;
+          op.err = err;
+          emitter.dispatchEvent(new CustomEvent("task-complete"));
+        });
+      }
+      sourceFinished = true;
+      emitter.dispatchEvent(new CustomEvent("task-complete"));
+    } catch (err) {
+      sourceErr = err;
+      emitter.dispatchEvent(new CustomEvent("task-complete"));
+    }
+  });
+  function valuesAvailable() {
+    var _a;
+    if (ordered) {
+      return (_a = ops[0]) == null ? void 0 : _a.done;
+    }
+    return Boolean(ops.find((op) => op.done));
+  }
+  function* yieldOrderedValues() {
+    while (ops.length > 0 && ops[0].done) {
+      const op = ops[0];
+      ops.shift();
+      if (op.ok) {
+        yield op.value;
+      } else {
+        opErred = true;
+        slotAvailable.resolve();
+        throw op.err;
+      }
+      slotAvailable.resolve();
+    }
+  }
+  function* yieldUnOrderedValues() {
+    while (valuesAvailable()) {
+      for (let i = 0; i < ops.length; i++) {
+        if (ops[i].done) {
+          const op = ops[i];
+          ops.splice(i, 1);
+          i--;
+          if (op.ok) {
+            yield op.value;
+          } else {
+            opErred = true;
+            slotAvailable.resolve();
+            throw op.err;
+          }
+          slotAvailable.resolve();
+        }
+      }
+    }
+  }
+  while (true) {
+    if (!valuesAvailable()) {
+      resultAvailable = pDefer();
+      await resultAvailable.promise;
+    }
+    if (sourceErr != null) {
+      throw sourceErr;
+    }
+    if (ordered) {
+      yield* yieldOrderedValues();
+    } else {
+      yield* yieldUnOrderedValues();
+    }
+    if (sourceFinished && ops.length === 0) {
+      break;
+    }
+  }
+}
+
+// node_modules/it-pushable/dist/src/fifo.js
+var FixedFIFO = class {
+  buffer;
+  mask;
+  top;
+  btm;
+  next;
+  constructor(hwm) {
+    if (!(hwm > 0) || (hwm - 1 & hwm) !== 0) {
+      throw new Error("Max size for a FixedFIFO should be a power of two");
+    }
+    this.buffer = new Array(hwm);
+    this.mask = hwm - 1;
+    this.top = 0;
+    this.btm = 0;
+    this.next = null;
+  }
+  push(data) {
+    if (this.buffer[this.top] !== void 0) {
+      return false;
+    }
+    this.buffer[this.top] = data;
+    this.top = this.top + 1 & this.mask;
+    return true;
+  }
+  shift() {
+    const last2 = this.buffer[this.btm];
+    if (last2 === void 0) {
+      return void 0;
+    }
+    this.buffer[this.btm] = void 0;
+    this.btm = this.btm + 1 & this.mask;
+    return last2;
+  }
+  isEmpty() {
+    return this.buffer[this.btm] === void 0;
+  }
+};
+var FIFO = class {
+  size;
+  hwm;
+  head;
+  tail;
+  constructor(options = {}) {
+    this.hwm = options.splitLimit ?? 16;
+    this.head = new FixedFIFO(this.hwm);
+    this.tail = this.head;
+    this.size = 0;
+  }
+  calculateSize(obj) {
+    if ((obj == null ? void 0 : obj.byteLength) != null) {
+      return obj.byteLength;
+    }
+    return 1;
+  }
+  push(val) {
+    if ((val == null ? void 0 : val.value) != null) {
+      this.size += this.calculateSize(val.value);
+    }
+    if (!this.head.push(val)) {
+      const prev = this.head;
+      this.head = prev.next = new FixedFIFO(2 * this.head.buffer.length);
+      this.head.push(val);
+    }
+  }
+  shift() {
+    let val = this.tail.shift();
+    if (val === void 0 && this.tail.next != null) {
+      const next = this.tail.next;
+      this.tail.next = null;
+      this.tail = next;
+      val = this.tail.shift();
+    }
+    if ((val == null ? void 0 : val.value) != null) {
+      this.size -= this.calculateSize(val.value);
+    }
+    return val;
+  }
+  isEmpty() {
+    return this.head.isEmpty();
+  }
+};
+
+// node_modules/it-pushable/dist/src/index.js
+var AbortError3 = class extends Error {
+  type;
+  code;
+  constructor(message2, code5) {
+    super(message2 ?? "The operation was aborted");
+    this.type = "aborted";
+    this.code = code5 ?? "ABORT_ERR";
+  }
+};
+function pushable(options = {}) {
+  const getNext = (buffer2) => {
+    const next = buffer2.shift();
+    if (next == null) {
+      return { done: true };
+    }
+    if (next.error != null) {
+      throw next.error;
+    }
+    return {
+      done: next.done === true,
+      // @ts-expect-error if done is false, value will be present
+      value: next.value
+    };
+  };
+  return _pushable(getNext, options);
+}
+function _pushable(getNext, options) {
+  options = options ?? {};
+  let onEnd = options.onEnd;
+  let buffer2 = new FIFO();
+  let pushable2;
+  let onNext;
+  let ended;
+  let drain2 = pDefer();
+  const waitNext = async () => {
+    try {
+      if (!buffer2.isEmpty()) {
+        return getNext(buffer2);
+      }
+      if (ended) {
+        return { done: true };
+      }
+      return await new Promise((resolve6, reject) => {
+        onNext = (next) => {
+          onNext = null;
+          buffer2.push(next);
+          try {
+            resolve6(getNext(buffer2));
+          } catch (err) {
+            reject(err);
+          }
+          return pushable2;
+        };
+      });
+    } finally {
+      if (buffer2.isEmpty()) {
+        queueMicrotask(() => {
+          drain2.resolve();
+          drain2 = pDefer();
+        });
+      }
+    }
+  };
+  const bufferNext = (next) => {
+    if (onNext != null) {
+      return onNext(next);
+    }
+    buffer2.push(next);
+    return pushable2;
+  };
+  const bufferError = (err) => {
+    buffer2 = new FIFO();
+    if (onNext != null) {
+      return onNext({ error: err });
+    }
+    buffer2.push({ error: err });
+    return pushable2;
+  };
+  const push = (value) => {
+    if (ended) {
+      return pushable2;
+    }
+    if ((options == null ? void 0 : options.objectMode) !== true && (value == null ? void 0 : value.byteLength) == null) {
+      throw new Error("objectMode was not true but tried to push non-Uint8Array value");
+    }
+    return bufferNext({ done: false, value });
+  };
+  const end = (err) => {
+    if (ended)
+      return pushable2;
+    ended = true;
+    return err != null ? bufferError(err) : bufferNext({ done: true });
+  };
+  const _return = () => {
+    buffer2 = new FIFO();
+    end();
+    return { done: true };
+  };
+  const _throw = (err) => {
+    end(err);
+    return { done: true };
+  };
+  pushable2 = {
+    [Symbol.asyncIterator]() {
+      return this;
+    },
+    next: waitNext,
+    return: _return,
+    throw: _throw,
+    push,
+    end,
+    get readableLength() {
+      return buffer2.size;
+    },
+    onEmpty: async (options2) => {
+      const signal = options2 == null ? void 0 : options2.signal;
+      signal == null ? void 0 : signal.throwIfAborted();
+      if (buffer2.isEmpty()) {
+        return;
+      }
+      let cancel;
+      let listener;
+      if (signal != null) {
+        cancel = new Promise((resolve6, reject) => {
+          listener = () => {
+            reject(new AbortError3());
+          };
+          signal.addEventListener("abort", listener);
+        });
+      }
+      try {
+        await Promise.race([
+          drain2.promise,
+          cancel
+        ]);
+      } finally {
+        if (listener != null && signal != null) {
+          signal == null ? void 0 : signal.removeEventListener("abort", listener);
+        }
+      }
+    }
+  };
+  if (onEnd == null) {
+    return pushable2;
+  }
+  const _pushable2 = pushable2;
+  pushable2 = {
+    [Symbol.asyncIterator]() {
+      return this;
+    },
+    next() {
+      return _pushable2.next();
+    },
+    throw(err) {
+      _pushable2.throw(err);
+      if (onEnd != null) {
+        onEnd(err);
+        onEnd = void 0;
+      }
+      return { done: true };
+    },
+    return() {
+      _pushable2.return();
+      if (onEnd != null) {
+        onEnd();
+        onEnd = void 0;
+      }
+      return { done: true };
+    },
+    push,
+    end(err) {
+      _pushable2.end(err);
+      if (onEnd != null) {
+        onEnd(err);
+        onEnd = void 0;
+      }
+      return pushable2;
+    },
+    get readableLength() {
+      return _pushable2.readableLength;
+    },
+    onEmpty: (opts) => {
+      return _pushable2.onEmpty(opts);
+    }
+  };
+  return pushable2;
+}
+
+// node_modules/it-merge/dist/src/index.js
+function isAsyncIterable8(thing) {
+  return thing[Symbol.asyncIterator] != null;
+}
+function merge(...sources) {
+  const syncSources = [];
+  for (const source of sources) {
+    if (!isAsyncIterable8(source)) {
+      syncSources.push(source);
+    }
+  }
+  if (syncSources.length === sources.length) {
+    return function* () {
+      for (const source of syncSources) {
+        yield* source;
+      }
+    }();
+  }
+  return async function* () {
+    const output = pushable({
+      objectMode: true
+    });
+    void Promise.resolve().then(async () => {
+      try {
+        await Promise.all(sources.map(async (source) => {
+          for await (const item of source) {
+            output.push(item);
+          }
+        }));
+        output.end();
+      } catch (err) {
+        output.end(err);
+      }
+    });
+    yield* output;
+  }();
+}
+var src_default9 = merge;
+
+// node_modules/it-pipe/dist/src/index.js
+function pipe(first2, ...rest) {
+  if (first2 == null) {
+    throw new Error("Empty pipeline");
+  }
+  if (isDuplex(first2)) {
+    const duplex = first2;
+    first2 = () => duplex.source;
+  } else if (isIterable2(first2) || isAsyncIterable9(first2)) {
+    const source = first2;
+    first2 = () => source;
+  }
+  const fns = [first2, ...rest];
+  if (fns.length > 1) {
+    if (isDuplex(fns[fns.length - 1])) {
+      fns[fns.length - 1] = fns[fns.length - 1].sink;
+    }
+  }
+  if (fns.length > 2) {
+    for (let i = 1; i < fns.length - 1; i++) {
+      if (isDuplex(fns[i])) {
+        fns[i] = duplexPipelineFn(fns[i]);
+      }
+    }
+  }
+  return rawPipe(...fns);
+}
+var rawPipe = (...fns) => {
+  let res;
+  while (fns.length > 0) {
+    res = fns.shift()(res);
+  }
+  return res;
+};
+var isAsyncIterable9 = (obj) => {
+  return (obj == null ? void 0 : obj[Symbol.asyncIterator]) != null;
+};
+var isIterable2 = (obj) => {
+  return (obj == null ? void 0 : obj[Symbol.iterator]) != null;
+};
+var isDuplex = (obj) => {
+  if (obj == null) {
+    return false;
+  }
+  return obj.sink != null && obj.source != null;
+};
+var duplexPipelineFn = (duplex) => {
+  return (source) => {
+    const p = duplex.sink(source);
+    if ((p == null ? void 0 : p.then) != null) {
+      const stream = pushable({
+        objectMode: true
+      });
+      p.then(() => {
+        stream.end();
+      }, (err) => {
+        stream.end(err);
+      });
+      let sourceWrap;
+      const source2 = duplex.source;
+      if (isAsyncIterable9(source2)) {
+        sourceWrap = async function* () {
+          yield* source2;
+          stream.end();
+        };
+      } else if (isIterable2(source2)) {
+        sourceWrap = function* () {
+          yield* source2;
+          stream.end();
+        };
+      } else {
+        throw new Error("Unknown duplex source type - must be Iterable or AsyncIterable");
+      }
+      return src_default9(stream, sourceWrap());
+    }
+    return duplex.source;
+  };
+};
+
+// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/directory.js
+var directoryContent = (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
+  async function* yieldDirectoryContent(options = {}) {
+    var _a;
+    const offset = options.offset ?? 0;
+    const length4 = options.length ?? node.Links.length;
+    const links = node.Links.slice(offset, length4);
+    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:directory", {
+      cid
+    }));
+    yield* pipe(links, (source) => src_default3(source, (link) => {
+      return async () => {
+        const linkName = link.Name ?? "";
+        const linkPath = `${path6}/${linkName}`;
+        const result = await resolve6(link.Hash, linkName, linkPath, [], depth + 1, blockstore, options);
+        return result.entry;
+      };
+    }), (source) => parallel(source, { ordered: true }), (source) => src_default8(source, (entry) => entry != null));
+  }
+  return yieldDirectoryContent;
+};
+var directory_default = directoryContent;
+
+// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/file.js
+var import_err_code11 = __toESM(require_err_code(), 1);
+async function walkDAG(blockstore, node, queue, streamPosition, start, end, options) {
+  if (node instanceof Uint8Array) {
+    const buf2 = extract_data_from_block_default(node, streamPosition, start, end);
+    queue.push(buf2);
+    return;
+  }
+  if (node.Data == null) {
+    throw (0, import_err_code11.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS");
+  }
+  let file;
+  try {
+    file = UnixFS.unmarshal(node.Data);
+  } catch (err) {
+    throw (0, import_err_code11.default)(err, "ERR_NOT_UNIXFS");
+  }
+  if (file.data != null) {
+    const data = file.data;
+    const buf2 = extract_data_from_block_default(data, streamPosition, start, end);
+    queue.push(buf2);
+    streamPosition += BigInt(buf2.byteLength);
+  }
+  const childOps = [];
+  if (node.Links.length !== file.blockSizes.length) {
+    throw (0, import_err_code11.default)(new Error("Inconsistent block sizes and dag links"), "ERR_NOT_UNIXFS");
+  }
+  for (let i = 0; i < node.Links.length; i++) {
+    const childLink = node.Links[i];
+    const childStart = streamPosition;
+    const childEnd = childStart + file.blockSizes[i];
+    if (start >= childStart && start < childEnd || // child has offset byte
+    end >= childStart && end <= childEnd || // child has end byte
+    start < childStart && end > childEnd) {
+      childOps.push({
+        link: childLink,
+        blockStart: streamPosition
+      });
+    }
+    streamPosition = childEnd;
+    if (streamPosition > end) {
+      break;
+    }
+  }
+  await pipe(childOps, (source) => src_default3(source, (op) => {
+    return async () => {
+      const block = await blockstore.get(op.link.Hash, options);
+      return {
+        ...op,
+        block
+      };
+    };
+  }), (source) => parallel(source, {
+    ordered: true
+  }), async (source) => {
+    for await (const { link, block, blockStart } of source) {
+      let child;
+      switch (link.Hash.code) {
+        case code2:
+          child = decode11(block);
+          break;
+        case code3:
+          child = block;
+          break;
+        default:
+          queue.end((0, import_err_code11.default)(new Error(`Unsupported codec: ${link.Hash.code}`), "ERR_NOT_UNIXFS"));
+          return;
+      }
+      const childQueue = new dist_default({
+        concurrency: 1
+      });
+      childQueue.on("error", (error) => {
+        queue.end(error);
+      });
+      void childQueue.add(async () => {
+        var _a;
+        (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:file", {
+          cid: link.Hash
+        }));
+        await walkDAG(blockstore, child, queue, blockStart, start, end, options);
+      });
+      await childQueue.onIdle();
+    }
+  });
+  if (streamPosition >= end) {
+    queue.end();
+  }
+}
+var fileContent = (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
+  async function* yieldFileContent(options = {}) {
+    var _a, _b;
+    const fileSize = unixfs2.fileSize();
+    if (fileSize === void 0) {
+      throw new Error("File was a directory");
+    }
+    const { start, end } = validate_offset_and_length_default(fileSize, options.offset, options.length);
+    if (end === 0n) {
+      return;
+    }
+    let read4 = 0n;
+    const wanted = end - start;
+    const queue = pushable();
+    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:file", {
+      cid
+    }));
+    void walkDAG(blockstore, node, queue, 0n, start, end, options).catch((err) => {
+      queue.end(err);
+    });
+    for await (const buf2 of queue) {
+      if (buf2 == null) {
+        continue;
+      }
+      read4 += BigInt(buf2.byteLength);
+      if (read4 > wanted) {
+        queue.end();
+        throw (0, import_err_code11.default)(new Error("Read too many bytes - the file size reported by the UnixFS data in the root node may be incorrect"), "ERR_OVER_READ");
+      }
+      if (read4 === wanted) {
+        queue.end();
+      }
+      (_b = options.onProgress) == null ? void 0 : _b.call(options, new CustomProgressEvent("unixfs:exporter:progress:unixfs:file", {
+        bytesRead: read4,
+        totalBytes: wanted,
+        fileSize
+      }));
+      yield buf2;
+    }
+    if (read4 < wanted) {
+      throw (0, import_err_code11.default)(new Error("Traversed entire DAG but did not read enough bytes"), "ERR_UNDER_READ");
+    }
+  }
+  return yieldFileContent;
+};
+var file_default = fileContent;
+
+// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js
+var import_err_code12 = __toESM(require_err_code(), 1);
+var hamtShardedDirectoryContent = (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
+  function yieldHamtDirectoryContent(options = {}) {
+    var _a;
+    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:hamt-sharded-directory", {
+      cid
+    }));
+    return listDirectory(node, path6, resolve6, depth, blockstore, options);
+  }
+  return yieldHamtDirectoryContent;
+};
+async function* listDirectory(node, path6, resolve6, depth, blockstore, options) {
+  const links = node.Links;
+  if (node.Data == null) {
+    throw (0, import_err_code12.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS");
+  }
+  let dir;
+  try {
+    dir = UnixFS.unmarshal(node.Data);
+  } catch (err) {
+    throw (0, import_err_code12.default)(err, "ERR_NOT_UNIXFS");
+  }
+  if (dir.fanout == null) {
+    throw (0, import_err_code12.default)(new Error("missing fanout"), "ERR_NOT_UNIXFS");
+  }
+  const padLength = (dir.fanout - 1n).toString(16).length;
+  const results = pipe(links, (source) => src_default3(source, (link) => {
+    return async () => {
+      var _a;
+      const name4 = link.Name != null ? link.Name.substring(padLength) : null;
+      if (name4 != null && name4 !== "") {
+        const result = await resolve6(link.Hash, name4, `${path6}/${name4}`, [], depth + 1, blockstore, options);
+        return { entries: result.entry == null ? [] : [result.entry] };
+      } else {
+        const block = await blockstore.get(link.Hash, options);
+        node = decode11(block);
+        (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:hamt-sharded-directory", {
+          cid: link.Hash
+        }));
+        return { entries: listDirectory(node, path6, resolve6, depth, blockstore, options) };
+      }
+    };
+  }), (source) => parallel(source, { ordered: true }));
+  for await (const { entries } of results) {
+    yield* entries;
+  }
+}
+var hamt_sharded_directory_default = hamtShardedDirectoryContent;
+
+// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/index.js
+var findLinkCid = (node, name4) => {
+  const link = node.Links.find((link2) => link2.Name === name4);
+  return link == null ? void 0 : link.Hash;
+};
+var contentExporters = {
+  raw: file_default,
+  file: file_default,
+  directory: directory_default,
+  "hamt-sharded-directory": hamt_sharded_directory_default,
+  metadata: (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
+    return () => [];
+  },
+  symlink: (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
+    return () => [];
+  }
+};
+var unixFsResolver = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => {
+  const block = await blockstore.get(cid, options);
+  const node = decode11(block);
+  let unixfs2;
+  let next;
+  if (name4 == null) {
+    name4 = cid.toString();
+  }
+  if (node.Data == null) {
+    throw (0, import_err_code13.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS");
+  }
+  try {
+    unixfs2 = UnixFS.unmarshal(node.Data);
+  } catch (err) {
+    throw (0, import_err_code13.default)(err, "ERR_NOT_UNIXFS");
+  }
+  if (path6 == null) {
+    path6 = name4;
+  }
+  if (toResolve.length > 0) {
+    let linkCid;
+    if ((unixfs2 == null ? void 0 : unixfs2.type) === "hamt-sharded-directory") {
+      linkCid = await find_cid_in_shard_default(node, toResolve[0], blockstore);
+    } else {
+      linkCid = findLinkCid(node, toResolve[0]);
+    }
+    if (linkCid == null) {
+      throw (0, import_err_code13.default)(new Error("file does not exist"), "ERR_NOT_FOUND");
+    }
+    const nextName = toResolve.shift();
+    const nextPath = `${path6}/${nextName}`;
+    next = {
+      cid: linkCid,
+      toResolve,
+      name: nextName ?? "",
+      path: nextPath
+    };
+  }
+  const content = contentExporters[unixfs2.type](cid, node, unixfs2, path6, resolve6, depth, blockstore);
+  if (content == null) {
+    throw (0, import_err_code13.default)(new Error("could not find content exporter"), "ERR_NOT_FOUND");
+  }
+  if (unixfs2.isDirectory()) {
+    return {
+      entry: {
+        type: "directory",
+        name: name4,
+        path: path6,
+        cid,
+        content,
+        unixfs: unixfs2,
+        depth,
+        node,
+        size: unixfs2.fileSize()
+      },
+      next
+    };
+  }
+  return {
+    entry: {
+      type: "file",
+      name: name4,
+      path: path6,
+      cid,
+      content,
+      unixfs: unixfs2,
+      depth,
+      node,
+      size: unixfs2.fileSize()
+    },
+    next
+  };
+};
+var unixfs_v1_default = unixFsResolver;
+
+// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/index.js
+var resolvers = {
+  [code2]: unixfs_v1_default,
+  [code3]: raw_default,
+  [code]: dag_cbor_default,
+  [identity2.code]: identity_default
+};
+var resolve4 = async (cid, name4, path6, toResolve, depth, blockstore, options) => {
+  const resolver = resolvers[cid.code];
+  if (resolver == null) {
+    throw (0, import_err_code14.default)(new Error(`No resolver for code ${cid.code}`), "ERR_NO_RESOLVER");
+  }
+  return resolver(cid, name4, path6, toResolve, resolve4, depth, blockstore, options);
+};
+var resolvers_default = resolve4;
+
+// node_modules/ipfs-unixfs-exporter/dist/src/index.js
+var toPathComponents2 = (path6 = "") => {
+  return (path6.trim().match(/([^\\^/]|\\\/)+/g) ?? []).filter(Boolean);
+};
+var cidAndRest = (path6) => {
+  if (path6 instanceof Uint8Array) {
+    return {
+      cid: CID2.decode(path6),
+      toResolve: []
+    };
+  }
+  const cid = CID2.asCID(path6);
+  if (cid != null) {
+    return {
+      cid,
+      toResolve: []
+    };
+  }
+  if (typeof path6 === "string") {
+    if (path6.indexOf("/ipfs/") === 0) {
+      path6 = path6.substring(6);
+    }
+    const output = toPathComponents2(path6);
+    return {
+      cid: CID2.parse(output[0]),
+      toResolve: output.slice(1)
+    };
+  }
+  throw (0, import_err_code15.default)(new Error(`Unknown path type ${path6}`), "ERR_BAD_PATH");
+};
+async function* walkPath(path6, blockstore, options = {}) {
+  let { cid, toResolve } = cidAndRest(path6);
+  let name4 = cid.toString();
+  let entryPath = name4;
+  const startingDepth = toResolve.length;
+  while (true) {
+    const result = await resolvers_default(cid, name4, entryPath, toResolve, startingDepth, blockstore, options);
+    if (result.entry == null && result.next == null) {
+      throw (0, import_err_code15.default)(new Error(`Could not resolve ${path6}`), "ERR_NOT_FOUND");
+    }
+    if (result.entry != null) {
+      yield result.entry;
+    }
+    if (result.next == null) {
+      return;
+    }
+    toResolve = result.next.toResolve;
+    cid = result.next.cid;
+    name4 = result.next.name;
+    entryPath = result.next.path;
+  }
+}
+async function exporter(path6, blockstore, options = {}) {
+  const result = await src_default7(walkPath(path6, blockstore, options));
+  if (result == null) {
+    throw (0, import_err_code15.default)(new Error(`Could not resolve ${path6}`), "ERR_NOT_FOUND");
+  }
+  return result;
+}
+async function* recursive(path6, blockstore, options = {}) {
+  const node = await exporter(path6, blockstore, options);
+  if (node == null) {
+    return;
+  }
+  yield node;
+  if (node.type === "directory") {
+    for await (const child of recurse(node, options)) {
+      yield child;
+    }
+  }
+  async function* recurse(node2, options2) {
+    for await (const file of node2.content(options2)) {
+      yield file;
+      if (file instanceof Uint8Array) {
+        continue;
+      }
+      if (file.type === "directory") {
+        yield* recurse(file, options2);
+      }
+    }
+  }
+}
+
+// node_modules/merge-options/index.mjs
+var import_index3 = __toESM(require_merge_options(), 1);
+var merge_options_default = import_index3.default;
+
+// node_modules/@helia/unixfs/dist/src/errors.js
+var UnixFSError = class extends Error {
+  name;
+  code;
+  constructor(message2, name4, code5) {
+    super(message2);
+    this.name = name4;
+    this.code = code5;
+  }
+};
+var NotUnixFSError = class extends UnixFSError {
+  constructor(message2 = "not a Unixfs node") {
+    super(message2, "NotUnixFSError", "ERR_NOT_UNIXFS");
+  }
+};
+var InvalidPBNodeError = class extends UnixFSError {
+  constructor(message2 = "invalid PBNode") {
+    super(message2, "InvalidPBNodeError", "ERR_INVALID_PBNODE");
+  }
+};
+var UnknownError = class extends UnixFSError {
+  constructor(message2 = "unknown error") {
+    super(message2, "InvalidPBNodeError", "ERR_UNKNOWN_ERROR");
+  }
+};
+var AlreadyExistsError = class extends UnixFSError {
+  constructor(message2 = "path already exists") {
+    super(message2, "AlreadyExistsError", "ERR_ALREADY_EXISTS");
+  }
+};
+var DoesNotExistError = class extends UnixFSError {
+  constructor(message2 = "path does not exist") {
+    super(message2, "DoesNotExistError", "ERR_DOES_NOT_EXIST");
+  }
+};
+var NoContentError = class extends UnixFSError {
+  constructor(message2 = "no content") {
+    super(message2, "NoContentError", "ERR_NO_CONTENT");
+  }
+};
+var NotAFileError = class extends UnixFSError {
+  constructor(message2 = "not a file") {
+    super(message2, "NotAFileError", "ERR_NOT_A_FILE");
+  }
+};
+var NotADirectoryError = class extends UnixFSError {
+  constructor(message2 = "not a directory") {
+    super(message2, "NotADirectoryError", "ERR_NOT_A_DIRECTORY");
+  }
+};
+var InvalidParametersError = class extends UnixFSError {
+  constructor(message2 = "invalid parameters") {
+    super(message2, "InvalidParametersError", "ERR_INVALID_PARAMETERS");
+  }
+};
+
+// node_modules/@libp2p/logger/dist/src/index.js
+var import_debug = __toESM(require_src2(), 1);
+import_debug.default.formatters.b = (v) => {
+  return v == null ? "undefined" : base58btc2.baseEncode(v);
+};
+import_debug.default.formatters.t = (v) => {
+  return v == null ? "undefined" : base322.baseEncode(v);
+};
+import_debug.default.formatters.m = (v) => {
+  return v == null ? "undefined" : base64.baseEncode(v);
+};
+import_debug.default.formatters.p = (v) => {
+  return v == null ? "undefined" : v.toString();
+};
+import_debug.default.formatters.c = (v) => {
+  return v == null ? "undefined" : v.toString();
+};
+import_debug.default.formatters.k = (v) => {
+  return v == null ? "undefined" : v.toString();
+};
+import_debug.default.formatters.a = (v) => {
+  return v == null ? "undefined" : v.toString();
+};
+function createDisabledLogger(namespace) {
+  const logger3 = () => {
+  };
+  logger3.enabled = false;
+  logger3.color = "";
+  logger3.diff = 0;
+  logger3.log = () => {
+  };
+  logger3.namespace = namespace;
+  logger3.destroy = () => true;
+  logger3.extend = () => logger3;
+  return logger3;
+}
+function logger(name4) {
+  let trace = createDisabledLogger(`${name4}:trace`);
+  if (import_debug.default.enabled(`${name4}:trace`) && import_debug.default.names.map((r) => r.toString()).find((n) => n.includes(":trace")) != null) {
+    trace = (0, import_debug.default)(`${name4}:trace`);
+  }
+  return Object.assign((0, import_debug.default)(name4), {
+    error: (0, import_debug.default)(`${name4}:error`),
+    trace
+  });
+}
+
+// node_modules/@helia/unixfs/dist/src/commands/utils/add-link.js
+var import_sparse_array3 = __toESM(require_sparse_array(), 1);
+
+// node_modules/@helia/unixfs/dist/src/commands/utils/consumable-hash.js
+function wrapHash2(hashFn2) {
+  function hashing(value) {
+    if (value instanceof InfiniteHash2) {
+      return value;
+    } else {
+      return new InfiniteHash2(value, hashFn2);
+    }
+  }
+  return hashing;
+}
+var InfiniteHash2 = class {
+  _value;
+  _hashFn;
+  _depth;
+  _availableBits;
+  _currentBufferIndex;
+  _buffers;
+  constructor(value, hashFn2) {
+    if (!(value instanceof Uint8Array)) {
+      throw new Error("can only hash Uint8Arrays");
+    }
+    this._value = value;
+    this._hashFn = hashFn2;
+    this._depth = -1;
+    this._availableBits = 0;
+    this._currentBufferIndex = 0;
+    this._buffers = [];
+  }
+  async take(bits) {
+    let pendingBits = bits;
+    while (this._availableBits < pendingBits) {
+      await this._produceMoreBits();
+    }
+    let result = 0;
+    while (pendingBits > 0) {
+      const hash = this._buffers[this._currentBufferIndex];
+      const available = Math.min(hash.availableBits(), pendingBits);
+      const took = hash.take(available);
+      result = (result << available) + took;
+      pendingBits -= available;
+      this._availableBits -= available;
+      if (hash.availableBits() === 0) {
+        this._currentBufferIndex++;
+      }
+    }
+    return result;
+  }
+  untake(bits) {
+    let pendingBits = bits;
+    while (pendingBits > 0) {
+      const hash = this._buffers[this._currentBufferIndex];
+      const availableForUntake = Math.min(hash.totalBits() - hash.availableBits(), pendingBits);
+      hash.untake(availableForUntake);
+      pendingBits -= availableForUntake;
+      this._availableBits += availableForUntake;
+      if (this._currentBufferIndex > 0 && hash.totalBits() === hash.availableBits()) {
+        this._depth--;
+        this._currentBufferIndex--;
+      }
+    }
+  }
+  async _produceMoreBits() {
+    this._depth++;
+    const value = this._depth > 0 ? concat2([this._value, Uint8Array.from([this._depth])]) : this._value;
+    const hashValue = await this._hashFn(value);
+    const buffer2 = new ConsumableBuffer2(hashValue);
+    this._buffers.push(buffer2);
+    this._availableBits += buffer2.availableBits();
+  }
+};
+var START_MASKS2 = [
+  255,
+  254,
+  252,
+  248,
+  240,
+  224,
+  192,
+  128
+];
+var STOP_MASKS2 = [
+  1,
+  3,
+  7,
+  15,
+  31,
+  63,
+  127,
+  255
+];
+var ConsumableBuffer2 = class {
+  _value;
+  _currentBytePos;
+  _currentBitPos;
+  constructor(value) {
+    this._value = value;
+    this._currentBytePos = value.length - 1;
+    this._currentBitPos = 7;
+  }
+  availableBits() {
+    return this._currentBitPos + 1 + this._currentBytePos * 8;
+  }
+  totalBits() {
+    return this._value.length * 8;
+  }
+  take(bits) {
+    let pendingBits = bits;
+    let result = 0;
+    while (pendingBits > 0 && this._haveBits()) {
+      const byte = this._value[this._currentBytePos];
+      const availableBits = this._currentBitPos + 1;
+      const taking = Math.min(availableBits, pendingBits);
+      const value = byteBitsToInt2(byte, availableBits - taking, taking);
+      result = (result << taking) + value;
+      pendingBits -= taking;
+      this._currentBitPos -= taking;
+      if (this._currentBitPos < 0) {
+        this._currentBitPos = 7;
+        this._currentBytePos--;
+      }
+    }
+    return result;
+  }
+  untake(bits) {
+    this._currentBitPos += bits;
+    while (this._currentBitPos > 7) {
+      this._currentBitPos -= 8;
+      this._currentBytePos += 1;
+    }
+  }
+  _haveBits() {
+    return this._currentBytePos >= 0;
+  }
+};
+function byteBitsToInt2(byte, start, length4) {
+  const mask = maskFor2(start, length4);
+  return (byte & mask) >>> start;
+}
+function maskFor2(start, length4) {
+  return START_MASKS2[start] & STOP_MASKS2[Math.min(length4 + start - 1, 7)];
+}
+
+// node_modules/@helia/unixfs/dist/src/commands/utils/hamt-constants.js
+var hamtHashCode = BigInt(murmur3128.code);
+var hamtBucketBits = 8;
+async function hamtHashFn2(buf2) {
+  return (await murmur3128.encode(buf2)).subarray(0, 8).reverse();
+}
+
+// node_modules/@helia/unixfs/dist/src/commands/utils/hamt-utils.js
+var import_sparse_array2 = __toESM(require_sparse_array(), 1);
+
+// node_modules/@helia/unixfs/dist/src/commands/utils/persist.js
+var persist2 = async (buffer2, blockstore, options) => {
+  if (options.codec == null) {
+    options.codec = src_exports2;
+  }
+  const multihash = await sha256.digest(buffer2);
+  const cid = CID2.create(options.cidVersion, options.codec.code, multihash);
+  await blockstore.put(cid, buffer2, {
+    ...options,
+    signal: options.signal
+  });
+  return cid;
+};
+
+// node_modules/@helia/unixfs/dist/src/commands/utils/dir-sharded.js
+var Dir2 = class {
+  options;
+  root;
+  dir;
+  path;
+  dirty;
+  flat;
+  parent;
+  parentKey;
+  unixfs;
+  mode;
+  mtime;
+  cid;
+  size;
+  nodeSize;
+  constructor(props, options) {
+    this.options = options ?? {};
+    this.root = props.root;
+    this.dir = props.dir;
+    this.path = props.path;
+    this.dirty = props.dirty;
+    this.flat = props.flat;
+    this.parent = props.parent;
+    this.parentKey = props.parentKey;
+    this.unixfs = props.unixfs;
+    this.mode = props.mode;
+    this.mtime = props.mtime;
+  }
+};
+var DirSharded2 = class extends Dir2 {
+  _bucket;
+  constructor(props, options) {
+    super(props, options);
+    this._bucket = createHAMT({
+      hashFn: hamtHashFn2,
+      bits: 8
+    });
+  }
+  async put(name4, value) {
+    this.cid = void 0;
+    this.size = void 0;
+    this.nodeSize = void 0;
+    await this._bucket.put(name4, value);
+  }
+  async get(name4) {
+    return this._bucket.get(name4);
+  }
+  childCount() {
+    return this._bucket.leafCount();
+  }
+  directChildrenCount() {
+    return this._bucket.childrenCount();
+  }
+  onlyChild() {
+    return this._bucket.onlyChild();
+  }
+  async *eachChildSeries() {
+    for await (const { key, value } of this._bucket.eachLeafSeries()) {
+      yield {
+        key,
+        child: value
+      };
+    }
+  }
+  estimateNodeSize() {
+    if (this.nodeSize !== void 0) {
+      return this.nodeSize;
+    }
+    this.nodeSize = calculateSize2(this._bucket, this, this.options);
+    return this.nodeSize;
+  }
+  async *flush(blockstore) {
+    for await (const entry of flush2(this._bucket, blockstore, this, this.options)) {
+      yield {
+        ...entry,
+        path: this.path
+      };
+    }
+  }
+};
+async function* flush2(bucket, blockstore, shardRoot, options) {
+  const children = bucket._children;
+  const links = [];
+  let childrenSize = 0n;
+  for (let i = 0; i < children.length; i++) {
+    const child = children.get(i);
+    if (child == null) {
+      continue;
+    }
+    const labelPrefix = i.toString(16).toUpperCase().padStart(2, "0");
+    if (child instanceof Bucket) {
+      let shard;
+      for await (const subShard of flush2(child, blockstore, null, options)) {
+        shard = subShard;
+      }
+      if (shard == null) {
+        throw new Error("Could not flush sharded directory, no subshard found");
+      }
+      links.push({
+        Name: labelPrefix,
+        Tsize: Number(shard.size),
+        Hash: shard.cid
+      });
+      childrenSize += shard.size;
+    } else if (isDir2(child.value)) {
+      const dir2 = child.value;
+      let flushedDir;
+      for await (const entry of dir2.flush(blockstore)) {
+        flushedDir = entry;
+        yield flushedDir;
+      }
+      if (flushedDir == null) {
+        throw new Error("Did not flush dir");
+      }
+      const label = labelPrefix + child.key;
+      links.push({
+        Name: label,
+        Tsize: Number(flushedDir.size),
+        Hash: flushedDir.cid
+      });
+      childrenSize += flushedDir.size;
+    } else {
+      const value = child.value;
+      if (value.cid == null) {
+        continue;
+      }
+      const label = labelPrefix + child.key;
+      const size2 = value.size;
+      links.push({
+        Name: label,
+        Tsize: Number(size2),
+        Hash: value.cid
+      });
+      childrenSize += BigInt(size2 ?? 0);
+    }
+  }
+  const data = Uint8Array.from(children.bitField().reverse());
+  const dir = new UnixFS({
+    type: "hamt-sharded-directory",
+    data,
+    fanout: BigInt(bucket.tableSize()),
+    hashType: hamtHashCode,
+    mtime: shardRoot == null ? void 0 : shardRoot.mtime,
+    mode: shardRoot == null ? void 0 : shardRoot.mode
+  });
+  const node = {
+    Data: dir.marshal(),
+    Links: links
+  };
+  const buffer2 = encode7(prepare(node));
+  const cid = await persist2(buffer2, blockstore, options);
+  const size = BigInt(buffer2.byteLength) + childrenSize;
+  yield {
+    cid,
+    unixfs: dir,
+    size
+  };
+}
+function isDir2(obj) {
+  return typeof obj.flush === "function";
+}
+function calculateSize2(bucket, shardRoot, options) {
+  const children = bucket._children;
+  const links = [];
+  for (let i = 0; i < children.length; i++) {
+    const child = children.get(i);
+    if (child == null) {
+      continue;
+    }
+    const labelPrefix = i.toString(16).toUpperCase().padStart(2, "0");
+    if (child instanceof Bucket) {
+      const size = calculateSize2(child, null, options);
+      links.push({
+        Name: labelPrefix,
+        Tsize: Number(size),
+        Hash: options.cidVersion === 0 ? CID_V02 : CID_V12
+      });
+    } else if (typeof child.value.flush === "function") {
+      const dir2 = child.value;
+      const size = dir2.nodeSize();
+      links.push({
+        Name: labelPrefix + child.key,
+        Tsize: Number(size),
+        Hash: options.cidVersion === 0 ? CID_V02 : CID_V12
+      });
+    } else {
+      const value = child.value;
+      if (value.cid == null) {
+        continue;
+      }
+      const label = labelPrefix + child.key;
+      const size = value.size;
+      links.push({
+        Name: label,
+        Tsize: Number(size),
+        Hash: value.cid
+      });
+    }
+  }
+  const data = Uint8Array.from(children.bitField().reverse());
+  const dir = new UnixFS({
+    type: "hamt-sharded-directory",
+    data,
+    fanout: BigInt(bucket.tableSize()),
+    hashType: hamtHashCode,
+    mtime: shardRoot == null ? void 0 : shardRoot.mtime,
+    mode: shardRoot == null ? void 0 : shardRoot.mode
+  });
+  const buffer2 = encode7(prepare({
+    Data: dir.marshal(),
+    Links: links
+  }));
+  return buffer2.length;
+}
+var CID_V02 = CID2.parse("QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn");
+var CID_V12 = CID2.parse("zdj7WbTaiJT1fgatdet9Ei9iDB5hdCxkbVyhyh8YTUnXMiwYi");
+
+// node_modules/@helia/unixfs/dist/src/commands/utils/hamt-utils.js
+var log = logger("helia:unixfs:commands:utils:hamt-utils");
+var toPrefix2 = (position) => {
+  return position.toString(16).toUpperCase().padStart(2, "0").substring(0, 2);
+};
+var createShard = async (blockstore, contents, options) => {
+  const shard = new DirSharded2({
+    root: true,
+    dir: true,
+    parent: void 0,
+    parentKey: void 0,
+    path: "",
+    dirty: true,
+    flat: false,
+    mtime: options.mtime,
+    mode: options.mode
+  }, options);
+  for (let i = 0; i < contents.length; i++) {
+    await shard._bucket.put(contents[i].name, {
+      size: contents[i].size,
+      cid: contents[i].cid
+    });
+  }
+  const res = await src_default7(shard.flush(blockstore));
+  if (res == null) {
+    throw new Error("Flushing shard yielded no result");
+  }
+  return res;
+};
+var updateShardedDirectory = async (path6, blockstore, options) => {
+  const shardRoot = UnixFS.unmarshal(path6[0].node.Data ?? new Uint8Array(0));
+  const fanout = BigInt(Math.pow(2, hamtBucketBits));
+  path6.reverse();
+  let cid;
+  let node;
+  for (let i = 0; i < path6.length; i++) {
+    const isRoot = i === path6.length - 1;
+    const segment = path6[i];
+    const data = Uint8Array.from(segment.children.bitField().reverse());
+    const dir = new UnixFS({
+      type: "hamt-sharded-directory",
+      data,
+      fanout,
+      hashType: hamtHashCode
+    });
+    if (isRoot) {
+      dir.mtime = shardRoot.mtime;
+      dir.mode = shardRoot.mode;
+    }
+    node = {
+      Data: dir.marshal(),
+      Links: segment.node.Links
+    };
+    const block = encode7(prepare(node));
+    cid = await persist2(block, blockstore, options);
+    if (!isRoot) {
+      const nextSegment = path6[i + 1];
+      if (nextSegment == null) {
+        throw new Error("Was not operating on shard root but also had no parent?");
+      }
+      log("updating link in parent sub-shard with prefix %s", nextSegment.prefix);
+      nextSegment.node.Links = nextSegment.node.Links.filter((l) => l.Name !== nextSegment.prefix);
+      nextSegment.node.Links.push({
+        Name: nextSegment.prefix,
+        Hash: cid,
+        Tsize: segment.node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), block.byteLength)
+      });
+    }
+  }
+  if (cid == null || node == null) {
+    throw new Error("Noting persisted");
+  }
+  return { cid, node };
+};
+var recreateShardedDirectory = async (cid, fileName, blockstore, options) => {
+  const wrapped = wrapHash2(hamtHashFn2);
+  const hash = wrapped(fromString3(fileName));
+  const path6 = [];
+  while (true) {
+    const block = await blockstore.get(cid, options);
+    const node = decode11(block);
+    const children = new import_sparse_array2.default();
+    const index = await hash.take(hamtBucketBits);
+    const prefix = toPrefix2(index);
+    path6.push({
+      prefix,
+      children,
+      node
+    });
+    let childLink;
+    for (const link of node.Links) {
+      const linkName2 = link.Name ?? "";
+      if (linkName2.length < 2) {
+        throw new Error("Invalid HAMT - link name was too short");
+      }
+      const position = parseInt(linkName2.substring(0, 2), 16);
+      children.set(position, true);
+      if (linkName2.startsWith(prefix)) {
+        childLink = link;
+      }
+    }
+    if (childLink == null) {
+      log("no link found with prefix %s for %s", prefix, fileName);
+      break;
+    }
+    const linkName = childLink.Name ?? "";
+    if (linkName.length < 2) {
+      throw new Error("Invalid HAMT - link name was too short");
+    }
+    if (linkName.length === 2) {
+      cid = childLink.Hash;
+      log("descend into sub-shard with prefix %s", linkName);
+      continue;
+    }
+    break;
+  }
+  return { path: path6, hash };
+};
+
+// node_modules/@helia/unixfs/dist/src/commands/utils/is-over-shard-threshold.js
+async function isOverShardThreshold(node, blockstore, threshold, options) {
+  if (node.Data == null) {
+    throw new Error("DagPB node had no data");
+  }
+  const unixfs2 = UnixFS.unmarshal(node.Data);
+  let size;
+  if (unixfs2.type === "directory") {
+    size = estimateNodeSize(node);
+  } else if (unixfs2.type === "hamt-sharded-directory") {
+    size = await estimateShardSize(node, 0, threshold, blockstore, options);
+  } else {
+    throw new Error("Can only estimate the size of directories or shards");
+  }
+  return size > threshold;
+}
+function estimateNodeSize(node) {
+  let size = 0;
+  for (const link of node.Links) {
+    size += (link.Name ?? "").length;
+    size += link.Hash.version === 1 ? CID_V12.bytes.byteLength : CID_V02.bytes.byteLength;
+  }
+  return size;
+}
+async function estimateShardSize(node, current, max, blockstore, options) {
+  if (current > max) {
+    return max;
+  }
+  if (node.Data == null) {
+    return current;
+  }
+  const unixfs2 = UnixFS.unmarshal(node.Data);
+  if (!unixfs2.isDirectory()) {
+    return current;
+  }
+  for (const link of node.Links) {
+    let name4 = link.Name ?? "";
+    name4 = name4.substring(2);
+    current += name4.length;
+    current += link.Hash.bytes.byteLength;
+    if (link.Hash.code === code2) {
+      const block = await blockstore.get(link.Hash, options);
+      const node2 = decode11(block);
+      current += await estimateShardSize(node2, current, max, blockstore, options);
+    }
+  }
+  return current;
+}
+
+// node_modules/@helia/unixfs/dist/src/commands/utils/add-link.js
+var log2 = logger("helia:unixfs:components:utils:add-link");
+async function addLink(parent, child, blockstore, options) {
+  if (parent.node.Data == null) {
+    throw new InvalidParametersError("Invalid parent passed to addLink");
+  }
+  const meta = UnixFS.unmarshal(parent.node.Data);
+  if (meta.type === "hamt-sharded-directory") {
+    log2("adding link to sharded directory");
+    return addToShardedDirectory(parent, child, blockstore, options);
+  }
+  log2(`adding ${child.Name} (${child.Hash}) to regular directory`);
+  const result = await addToDirectory(parent, child, blockstore, options);
+  if (await isOverShardThreshold(result.node, blockstore, options.shardSplitThresholdBytes, options)) {
+    log2("converting directory to sharded directory");
+    const converted = await convertToShardedDirectory(result, blockstore);
+    result.cid = converted.cid;
+    result.node = decode11(await blockstore.get(converted.cid, options));
+  }
+  return result;
+}
+var convertToShardedDirectory = async (parent, blockstore) => {
+  if (parent.node.Data == null) {
+    throw new InvalidParametersError("Invalid parent passed to convertToShardedDirectory");
+  }
+  const unixfs2 = UnixFS.unmarshal(parent.node.Data);
+  const result = await createShard(blockstore, parent.node.Links.map((link) => ({
+    name: link.Name ?? "",
+    size: BigInt(link.Tsize ?? 0),
+    cid: link.Hash
+  })), {
+    mode: unixfs2.mode,
+    mtime: unixfs2.mtime,
+    cidVersion: parent.cid.version
+  });
+  log2(`converted directory to sharded directory ${result.cid}`);
+  return result;
+};
+var addToDirectory = async (parent, child, blockstore, options) => {
+  const parentLinks = parent.node.Links.filter((link) => {
+    const matches = link.Name === child.Name;
+    if (matches && !options.allowOverwriting) {
+      throw new AlreadyExistsError();
+    }
+    return !matches;
+  });
+  parentLinks.push(child);
+  if (parent.node.Data == null) {
+    throw new InvalidPBNodeError("Parent node with no data passed to addToDirectory");
+  }
+  const node = UnixFS.unmarshal(parent.node.Data);
+  let data;
+  if (node.mtime != null) {
+    const ms = Date.now();
+    const secs = Math.floor(ms / 1e3);
+    node.mtime = {
+      secs: BigInt(secs),
+      nsecs: (ms - secs * 1e3) * 1e3
+    };
+    data = node.marshal();
+  } else {
+    data = parent.node.Data;
+  }
+  parent.node = prepare({
+    Data: data,
+    Links: parentLinks
+  });
+  const buf2 = encode7(parent.node);
+  const hash = await sha256.digest(buf2);
+  const cid = CID2.create(parent.cid.version, code2, hash);
+  await blockstore.put(cid, buf2);
+  return {
+    node: parent.node,
+    cid
+  };
+};
+var addToShardedDirectory = async (parent, child, blockstore, options) => {
+  var _a;
+  const { path: path6, hash } = await recreateShardedDirectory(parent.cid, child.Name, blockstore, options);
+  const finalSegment = path6[path6.length - 1];
+  if (finalSegment == null) {
+    throw new Error("Invalid HAMT, could not generate path");
+  }
+  const prefix = finalSegment.prefix;
+  const index = parseInt(prefix, 16);
+  log2("next prefix for %s is %s", child.Name, prefix);
+  const linkName = `${prefix}${child.Name}`;
+  const existingLink = finalSegment.node.Links.find((l) => (l.Name ?? "").startsWith(prefix));
+  if (existingLink != null) {
+    log2("link %s was present in shard", linkName);
+    if (existingLink.Name === linkName) {
+      if (!options.allowOverwriting) {
+        throw new AlreadyExistsError();
+      }
+      log2("overwriting %s in subshard", child.Name);
+      finalSegment.node.Links = finalSegment.node.Links.filter((l) => l.Name !== linkName);
+      finalSegment.node.Links.push({
+        Name: linkName,
+        Hash: child.Hash,
+        Tsize: child.Tsize
+      });
+    } else if (((_a = existingLink.Name) == null ? void 0 : _a.length) === 2) {
+      throw new Error("Existing link was subshard?!");
+    } else {
+      log2("prefix %s already exists, creating new subshard", prefix);
+      const index2 = finalSegment.node.Links.findIndex((l) => {
+        var _a2;
+        return (_a2 = l.Name) == null ? void 0 : _a2.startsWith(prefix);
+      });
+      const sibling = finalSegment.node.Links.splice(index2, 1)[0];
+      const siblingName = (sibling.Name ?? "").substring(2);
+      const wrapped = wrapHash2(hamtHashFn2);
+      const siblingHash = wrapped(fromString3(siblingName));
+      for (let i = 0; i < path6.length; i++) {
+        await siblingHash.take(hamtBucketBits);
+      }
+      while (true) {
+        const siblingIndex = await siblingHash.take(hamtBucketBits);
+        const siblingPrefix = toPrefix2(siblingIndex);
+        sibling.Name = `${siblingPrefix}${siblingName}`;
+        const newIndex = await hash.take(hamtBucketBits);
+        const newPrefix = toPrefix2(newIndex);
+        if (siblingPrefix === newPrefix) {
+          const children2 = new import_sparse_array3.default();
+          children2.set(newIndex, true);
+          path6.push({
+            prefix: newPrefix,
+            children: children2,
+            node: {
+              Links: []
+            }
+          });
+          continue;
+        }
+        const children = new import_sparse_array3.default();
+        children.set(newIndex, true);
+        children.set(siblingIndex, true);
+        path6.push({
+          prefix,
+          children,
+          node: {
+            Links: [
+              sibling,
+              {
+                Name: `${newPrefix}${child.Name}`,
+                Hash: child.Hash,
+                Tsize: child.Tsize
+              }
+            ]
+          }
+        });
+        break;
+      }
+    }
+  } else {
+    log2("link %s was not present in sub-shard", linkName);
+    child.Name = linkName;
+    finalSegment.node.Links.push(child);
+    finalSegment.children.set(index, true);
+    log2("adding %s to existing sub-shard", linkName);
+  }
+  return updateShardedDirectory(path6, blockstore, options);
+};
+
+// node_modules/@helia/unixfs/dist/src/commands/utils/cid-to-directory.js
+async function cidToDirectory(cid, blockstore, options = {}) {
+  const entry = await exporter(cid, blockstore, options);
+  if (entry.type !== "directory") {
+    throw new NotADirectoryError(`${cid.toString()} was not a UnixFS directory`);
+  }
+  return {
+    cid,
+    node: entry.node
+  };
+}
+
+// node_modules/@helia/unixfs/dist/src/commands/utils/cid-to-pblink.js
+async function cidToPBLink(cid, name4, blockstore, options) {
+  const sourceEntry = await exporter(cid, blockstore, options);
+  if (sourceEntry.type !== "directory" && sourceEntry.type !== "file" && sourceEntry.type !== "raw") {
+    throw new NotUnixFSError(`${cid.toString()} was not a UnixFS node`);
+  }
+  return {
+    Name: name4,
+    Tsize: sourceEntry.node instanceof Uint8Array ? sourceEntry.node.byteLength : dagNodeTsize(sourceEntry.node),
+    Hash: cid
+  };
+}
+function dagNodeTsize(node) {
+  const linkSizes = node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0);
+  return encode7(node).byteLength + linkSizes;
+}
+
+// node_modules/@helia/unixfs/dist/src/commands/utils/resolve.js
+var log3 = logger("helia:unixfs:components:utils:resolve");
+async function resolve5(cid, path6, blockstore, options) {
+  if (path6 == null || path6 === "") {
+    return { cid };
+  }
+  log3('resolve "%s" under %c', path6, cid);
+  const parts = path6.split("/").filter(Boolean);
+  const segments = [{
+    name: "",
+    cid,
+    size: 0n
+  }];
+  for (let i = 0; i < parts.length; i++) {
+    const part = parts[i];
+    const result = await exporter(cid, blockstore, options);
+    log3('resolving "%s"', part, result);
+    if (result.type === "file") {
+      if (i < parts.length - 1) {
+        throw new InvalidParametersError("Path was invalid");
+      }
+      cid = result.cid;
+    } else if (result.type === "directory") {
+      let dirCid;
+      for await (const entry of result.content()) {
+        if (entry.name === part) {
+          dirCid = entry.cid;
+          break;
+        }
+      }
+      if (dirCid == null) {
+        throw new DoesNotExistError("Could not find path in directory");
+      }
+      cid = dirCid;
+      segments.push({
+        name: part,
+        cid,
+        size: result.size
+      });
+    } else {
+      throw new InvalidParametersError("Could not resolve path");
+    }
+  }
+  log3("resolved %s to %c", path6, cid);
+  return {
+    cid,
+    path: path6,
+    segments
+  };
+}
+async function updatePathCids(cid, result, blockstore, options) {
+  if (result.segments == null || result.segments.length === 0) {
+    return cid;
+  }
+  let child = result.segments.pop();
+  if (child == null) {
+    throw new Error("Insufficient segments");
+  }
+  child.cid = cid;
+  result.segments.reverse();
+  for (const parent of result.segments) {
+    const [directory, pblink] = await Promise.all([
+      cidToDirectory(parent.cid, blockstore, options),
+      cidToPBLink(child.cid, child.name, blockstore, options)
+    ]);
+    const result2 = await addLink(directory, pblink, blockstore, {
+      ...options,
+      allowOverwriting: true,
+      cidVersion: cid.version
+    });
+    cid = result2.cid;
+    parent.cid = cid;
+    child = parent;
+  }
+  return cid;
+}
+
+// node_modules/@helia/unixfs/dist/src/commands/cat.js
+var mergeOptions2 = merge_options_default.bind({ ignoreUndefined: true });
+var defaultOptions = {};
+async function* cat(cid, blockstore, options = {}) {
+  const opts = mergeOptions2(defaultOptions, options);
+  const resolved = await resolve5(cid, opts.path, blockstore, opts);
+  const result = await exporter(resolved.cid, blockstore, opts);
+  if (result.type !== "file" && result.type !== "raw") {
+    throw new NotAFileError();
+  }
+  if (result.content == null) {
+    throw new NoContentError();
+  }
+  yield* result.content(opts);
+}
+
+// node_modules/@helia/unixfs/dist/src/commands/utils/constants.js
+var SHARD_SPLIT_THRESHOLD_BYTES = 262144;
+
+// node_modules/@helia/unixfs/dist/src/commands/chmod.js
+var mergeOptions3 = merge_options_default.bind({ ignoreUndefined: true });
+var log4 = logger("helia:unixfs:chmod");
+var defaultOptions2 = {
+  recursive: false,
+  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
+};
+async function chmod(cid, mode, blockstore, options = {}) {
+  const opts = mergeOptions3(defaultOptions2, options);
+  const resolved = await resolve5(cid, opts.path, blockstore, options);
+  log4("chmod %c %d", resolved.cid, mode);
+  if (opts.recursive) {
+    const root = await pipe(
+      async function* () {
+        for await (const entry of recursive(resolved.cid, blockstore, options)) {
+          let metadata2;
+          let links2 = [];
+          if (entry.type === "raw") {
+            metadata2 = new UnixFS({ type: "file", data: entry.node });
+          } else if (entry.type === "file" || entry.type === "directory") {
+            metadata2 = entry.unixfs;
+            links2 = entry.node.Links;
+          } else {
+            throw new NotUnixFSError();
+          }
+          metadata2.mode = mode;
+          const node = {
+            Data: metadata2.marshal(),
+            Links: links2
+          };
+          yield {
+            path: entry.path,
+            content: node
+          };
+        }
+      },
+      // @ts-expect-error cannot combine progress types
+      (source) => importer(source, blockstore, {
+        ...opts,
+        dagBuilder: async function* (source2, block2) {
+          for await (const entry of source2) {
+            yield async function() {
+              const node = entry.content;
+              const buf2 = encode7(node);
+              const updatedCid2 = await persist2(buf2, block2, {
+                ...opts,
+                cidVersion: cid.version
+              });
+              if (node.Data == null) {
+                throw new InvalidPBNodeError(`${updatedCid2} had no data`);
+              }
+              const unixfs2 = UnixFS.unmarshal(node.Data);
+              return {
+                cid: updatedCid2,
+                size: BigInt(buf2.length),
+                path: entry.path,
+                unixfs: unixfs2
+              };
+            };
+          }
+        }
+      }),
+      async (nodes) => src_default7(nodes)
+    );
+    if (root == null) {
+      throw new UnknownError(`Could not chmod ${resolved.cid.toString()}`);
+    }
+    return updatePathCids(root.cid, resolved, blockstore, opts);
+  }
+  const block = await blockstore.get(resolved.cid, options);
+  let metadata;
+  let links = [];
+  if (resolved.cid.code === code3) {
+    metadata = new UnixFS({ type: "file", data: block });
+  } else {
+    const node = decode11(block);
+    if (node.Data == null) {
+      throw new InvalidPBNodeError(`${resolved.cid.toString()} had no data`);
+    }
+    links = node.Links;
+    metadata = UnixFS.unmarshal(node.Data);
+  }
+  metadata.mode = mode;
+  const updatedBlock = encode7({
+    Data: metadata.marshal(),
+    Links: links
+  });
+  const hash = await sha256.digest(updatedBlock);
+  const updatedCid = CID2.create(resolved.cid.version, code2, hash);
+  await blockstore.put(updatedCid, updatedBlock);
+  return updatePathCids(updatedCid, resolved, blockstore, opts);
+}
+
+// node_modules/@helia/unixfs/dist/src/commands/cp.js
+var mergeOptions4 = merge_options_default.bind({ ignoreUndefined: true });
+var log5 = logger("helia:unixfs:cp");
+var defaultOptions3 = {
+  force: false,
+  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
+};
+async function cp(source, target, name4, blockstore, options = {}) {
+  const opts = mergeOptions4(defaultOptions3, options);
+  if (name4.includes("/")) {
+    throw new InvalidParametersError("Name must not have slashes");
+  }
+  const [directory, pblink] = await Promise.all([
+    cidToDirectory(target, blockstore, opts),
+    cidToPBLink(source, name4, blockstore, opts)
+  ]);
+  log5('Adding %c as "%s" to %c', source, name4, target);
+  const result = await addLink(directory, pblink, blockstore, {
+    allowOverwriting: opts.force,
+    cidVersion: target.version,
+    ...opts
+  });
+  return result.cid;
+}
+
+// node_modules/@helia/unixfs/dist/src/commands/ls.js
+var mergeOptions5 = merge_options_default.bind({ ignoreUndefined: true });
+var defaultOptions4 = {};
+async function* ls(cid, blockstore, options = {}) {
+  const opts = mergeOptions5(defaultOptions4, options);
+  const resolved = await resolve5(cid, opts.path, blockstore, opts);
+  const result = await exporter(resolved.cid, blockstore);
+  if (result.type === "file" || result.type === "raw") {
+    yield result;
+    return;
+  }
+  if (result.content == null) {
+    throw new NoContentError();
+  }
+  if (result.type !== "directory") {
+    throw new NotADirectoryError();
+  }
+  yield* result.content({
+    offset: options.offset,
+    length: options.length
+  });
+}
+
+// node_modules/@helia/unixfs/dist/src/commands/mkdir.js
+var mergeOptions6 = merge_options_default.bind({ ignoreUndefined: true });
+var log6 = logger("helia:unixfs:mkdir");
+var defaultOptions5 = {
+  cidVersion: 1,
+  force: false,
+  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
+};
+async function mkdir(parentCid, dirname, blockstore, options = {}) {
+  const opts = mergeOptions6(defaultOptions5, options);
+  if (dirname.includes("/")) {
+    throw new InvalidParametersError("Path must not have slashes");
+  }
+  const entry = await exporter(parentCid, blockstore, options);
+  if (entry.type !== "directory") {
+    throw new NotADirectoryError(`${parentCid.toString()} was not a UnixFS directory`);
+  }
+  log6("creating %s", dirname);
+  const metadata = new UnixFS({
+    type: "directory",
+    mode: opts.mode,
+    mtime: opts.mtime
+  });
+  const node = {
+    Data: metadata.marshal(),
+    Links: []
+  };
+  const buf2 = encode7(node);
+  const hash = await sha256.digest(buf2);
+  const emptyDirCid = CID2.create(opts.cidVersion, code2, hash);
+  await blockstore.put(emptyDirCid, buf2);
+  const [directory, pblink] = await Promise.all([
+    cidToDirectory(parentCid, blockstore, opts),
+    cidToPBLink(emptyDirCid, dirname, blockstore, opts)
+  ]);
+  log6("adding empty dir called %s to %c", dirname, parentCid);
+  const result = await addLink(directory, pblink, blockstore, {
+    ...opts,
+    allowOverwriting: opts.force
+  });
+  return result.cid;
+}
+
+// node_modules/@helia/unixfs/dist/src/commands/utils/remove-link.js
+var log7 = logger("helia:unixfs:utils:remove-link");
+async function removeLink(parent, name4, blockstore, options) {
+  if (parent.node.Data == null) {
+    throw new InvalidPBNodeError("Parent node had no data");
+  }
+  const meta = UnixFS.unmarshal(parent.node.Data);
+  if (meta.type === "hamt-sharded-directory") {
+    log7(`removing ${name4} from sharded directory`);
+    const result = await removeFromShardedDirectory(parent, name4, blockstore, options);
+    if (!await isOverShardThreshold(result.node, blockstore, options.shardSplitThresholdBytes, options)) {
+      log7("converting shard to flat directory %c", parent.cid);
+      return convertToFlatDirectory(result, blockstore, options);
+    }
+    return result;
+  }
+  log7(`removing link ${name4} regular directory`);
+  return removeFromDirectory(parent, name4, blockstore, options);
+}
+var removeFromDirectory = async (parent, name4, blockstore, options) => {
+  parent.node.Links = parent.node.Links.filter((link) => {
+    return link.Name !== name4;
+  });
+  const parentBlock = encode7(parent.node);
+  const parentCid = await persist2(parentBlock, blockstore, {
+    ...options,
+    cidVersion: parent.cid.version
+  });
+  log7(`Updated regular directory ${parentCid}`);
+  return {
+    node: parent.node,
+    cid: parentCid
+  };
+};
+var removeFromShardedDirectory = async (parent, name4, blockstore, options) => {
+  const { path: path6 } = await recreateShardedDirectory(parent.cid, name4, blockstore, options);
+  const finalSegment = path6[path6.length - 1];
+  if (finalSegment == null) {
+    throw new Error("Invalid HAMT, could not generate path");
+  }
+  const linkName = finalSegment.node.Links.filter((l) => (l.Name ?? "").substring(2) === name4).map((l) => l.Name).pop();
+  if (linkName == null) {
+    throw new Error("File not found");
+  }
+  const prefix = linkName.substring(0, 2);
+  const index = parseInt(prefix, 16);
+  finalSegment.node.Links = finalSegment.node.Links.filter((link) => link.Name !== linkName);
+  finalSegment.children.unset(index);
+  if (finalSegment.node.Links.length === 1) {
+    while (true) {
+      if (path6.length === 1) {
+        break;
+      }
+      const segment = path6[path6.length - 1];
+      if (segment == null || segment.node.Links.length > 1) {
+        break;
+      }
+      path6.pop();
+      const nextSegment = path6[path6.length - 1];
+      if (nextSegment == null) {
+        break;
+      }
+      const link = segment.node.Links[0];
+      nextSegment.node.Links = nextSegment.node.Links.filter((l) => !(l.Name ?? "").startsWith(nextSegment.prefix));
+      nextSegment.node.Links.push({
+        Hash: link.Hash,
+        Name: `${nextSegment.prefix}${(link.Name ?? "").substring(2)}`,
+        Tsize: link.Tsize
+      });
+    }
+  }
+  return updateShardedDirectory(path6, blockstore, options);
+};
+var convertToFlatDirectory = async (parent, blockstore, options) => {
+  if (parent.node.Data == null) {
+    throw new InvalidParametersError("Invalid parent passed to convertToFlatDirectory");
+  }
+  const rootNode = {
+    Links: []
+  };
+  const dir = await exporter(parent.cid, blockstore);
+  if (dir.type !== "directory") {
+    throw new Error("Unexpected node type");
+  }
+  for await (const entry of dir.content()) {
+    let tsize = 0;
+    if (entry.node instanceof Uint8Array) {
+      tsize = entry.node.byteLength;
+    } else {
+      tsize = encode7(entry.node).length;
+    }
+    rootNode.Links.push({
+      Hash: entry.cid,
+      Name: entry.name,
+      Tsize: tsize
+    });
+  }
+  const oldUnixfs = UnixFS.unmarshal(parent.node.Data);
+  rootNode.Data = new UnixFS({ type: "directory", mode: oldUnixfs.mode, mtime: oldUnixfs.mtime }).marshal();
+  const block = encode7(prepare(rootNode));
+  const cid = await persist2(block, blockstore, {
+    codec: src_exports2,
+    cidVersion: parent.cid.version,
+    signal: options.signal
+  });
+  return {
+    cid,
+    node: rootNode
+  };
+};
+
+// node_modules/@helia/unixfs/dist/src/commands/rm.js
+var mergeOptions7 = merge_options_default.bind({ ignoreUndefined: true });
+var log8 = logger("helia:unixfs:rm");
+var defaultOptions6 = {
+  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
+};
+async function rm(target, name4, blockstore, options = {}) {
+  const opts = mergeOptions7(defaultOptions6, options);
+  if (name4.includes("/")) {
+    throw new InvalidParametersError("Name must not have slashes");
+  }
+  const directory = await cidToDirectory(target, blockstore, opts);
+  log8("Removing %s from %c", name4, target);
+  const result = await removeLink(directory, name4, blockstore, {
+    ...opts,
+    cidVersion: target.version
+  });
+  return result.cid;
+}
+
+// node_modules/@helia/unixfs/dist/src/commands/stat.js
+var mergeOptions8 = merge_options_default.bind({ ignoreUndefined: true });
+var log9 = logger("helia:unixfs:stat");
+var defaultOptions7 = {};
+async function stat(cid, blockstore, options = {}) {
+  var _a;
+  const opts = mergeOptions8(defaultOptions7, options);
+  const resolved = await resolve5(cid, options.path, blockstore, opts);
+  log9("stat %c", resolved.cid);
+  const result = await exporter(resolved.cid, blockstore, opts);
+  if (result.type !== "file" && result.type !== "directory" && result.type !== "raw") {
+    throw new NotUnixFSError();
+  }
+  let fileSize = 0n;
+  let dagSize = 0n;
+  let localFileSize = 0n;
+  let localDagSize = 0n;
+  let blocks = 0;
+  let mode;
+  let mtime;
+  const type = result.type;
+  let unixfs2;
+  if (result.type === "raw") {
+    fileSize = BigInt(result.node.byteLength);
+    dagSize = BigInt(result.node.byteLength);
+    localFileSize = BigInt(result.node.byteLength);
+    localDagSize = BigInt(result.node.byteLength);
+    blocks = 1;
+  }
+  if (result.type === "directory") {
+    fileSize = 0n;
+    dagSize = BigInt(result.unixfs.marshal().byteLength);
+    localFileSize = 0n;
+    localDagSize = dagSize;
+    blocks = 1;
+    mode = result.unixfs.mode;
+    mtime = result.unixfs.mtime;
+    unixfs2 = result.unixfs;
+  }
+  if (result.type === "file") {
+    const results = await inspectDag(resolved.cid, blockstore, opts);
+    fileSize = result.unixfs.fileSize();
+    dagSize = BigInt((((_a = result.node.Data) == null ? void 0 : _a.byteLength) ?? 0) + result.node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0));
+    localFileSize = BigInt(results.localFileSize);
+    localDagSize = BigInt(results.localDagSize);
+    blocks = results.blocks;
+    mode = result.unixfs.mode;
+    mtime = result.unixfs.mtime;
+    unixfs2 = result.unixfs;
+  }
+  return {
+    cid: resolved.cid,
+    mode,
+    mtime,
+    fileSize,
+    dagSize,
+    localFileSize,
+    localDagSize,
+    blocks,
+    type,
+    unixfs: unixfs2
+  };
+}
+async function inspectDag(cid, blockstore, options) {
+  const results = {
+    localFileSize: 0,
+    localDagSize: 0,
+    blocks: 0
+  };
+  if (await blockstore.has(cid, options)) {
+    const block = await blockstore.get(cid, options);
+    results.blocks++;
+    results.localDagSize += block.byteLength;
+    if (cid.code === code3) {
+      results.localFileSize += block.byteLength;
+    } else if (cid.code === code2) {
+      const pbNode = decode11(block);
+      if (pbNode.Links.length > 0) {
+        for (const link of pbNode.Links) {
+          const linkResult = await inspectDag(link.Hash, blockstore, options);
+          results.localFileSize += linkResult.localFileSize;
+          results.localDagSize += linkResult.localDagSize;
+          results.blocks += linkResult.blocks;
+        }
+      } else {
+        if (pbNode.Data == null) {
+          throw new InvalidPBNodeError(`PBNode ${cid.toString()} had no data`);
+        }
+        const unixfs2 = UnixFS.unmarshal(pbNode.Data);
+        if (unixfs2.data == null) {
+          throw new InvalidPBNodeError(`UnixFS node ${cid.toString()} had no data`);
+        }
+        results.localFileSize += unixfs2.data.byteLength ?? 0;
+      }
+    } else {
+      throw new UnknownError(`${cid.toString()} was neither DAG_PB nor RAW`);
+    }
+  }
+  return results;
+}
+
+// node_modules/@helia/unixfs/dist/src/commands/touch.js
+var mergeOptions9 = merge_options_default.bind({ ignoreUndefined: true });
+var log10 = logger("helia:unixfs:touch");
+var defaultOptions8 = {
+  recursive: false,
+  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
+};
+async function touch(cid, blockstore, options = {}) {
+  const opts = mergeOptions9(defaultOptions8, options);
+  const resolved = await resolve5(cid, opts.path, blockstore, opts);
+  const mtime = opts.mtime ?? {
+    secs: BigInt(Math.round(Date.now() / 1e3)),
+    nsecs: 0
+  };
+  log10("touch %c %o", resolved.cid, mtime);
+  if (opts.recursive) {
+    const root = await pipe(
+      async function* () {
+        for await (const entry of recursive(resolved.cid, blockstore)) {
+          let metadata2;
+          let links2;
+          if (entry.type === "raw") {
+            metadata2 = new UnixFS({ data: entry.node });
+            links2 = [];
+          } else if (entry.type === "file" || entry.type === "directory") {
+            metadata2 = entry.unixfs;
+            links2 = entry.node.Links;
+          } else {
+            throw new NotUnixFSError();
+          }
+          metadata2.mtime = mtime;
+          const node = {
+            Data: metadata2.marshal(),
+            Links: links2
+          };
+          yield {
+            path: entry.path,
+            content: node
+          };
+        }
+      },
+      // @ts-expect-error blockstore types are incompatible
+      (source) => importer(source, blockstore, {
+        ...opts,
+        dagBuilder: async function* (source2, block2) {
+          for await (const entry of source2) {
+            yield async function() {
+              const node = entry.content;
+              const buf2 = encode7(node);
+              const updatedCid2 = await persist2(buf2, block2, {
+                ...opts,
+                cidVersion: cid.version
+              });
+              if (node.Data == null) {
+                throw new InvalidPBNodeError(`${updatedCid2} had no data`);
+              }
+              const unixfs2 = UnixFS.unmarshal(node.Data);
+              return {
+                cid: updatedCid2,
+                size: BigInt(buf2.length),
+                path: entry.path,
+                unixfs: unixfs2
+              };
+            };
+          }
+        }
+      }),
+      async (nodes) => src_default7(nodes)
+    );
+    if (root == null) {
+      throw new UnknownError(`Could not chmod ${resolved.cid.toString()}`);
+    }
+    return updatePathCids(root.cid, resolved, blockstore, opts);
+  }
+  const block = await blockstore.get(resolved.cid, options);
+  let metadata;
+  let links = [];
+  if (resolved.cid.code === code3) {
+    metadata = new UnixFS({ data: block });
+  } else {
+    const node = decode11(block);
+    links = node.Links;
+    if (node.Data == null) {
+      throw new InvalidPBNodeError(`${resolved.cid.toString()} had no data`);
+    }
+    metadata = UnixFS.unmarshal(node.Data);
+  }
+  metadata.mtime = mtime;
+  const updatedBlock = encode7({
+    Data: metadata.marshal(),
+    Links: links
+  });
+  const hash = await sha256.digest(updatedBlock);
+  const updatedCid = CID2.create(resolved.cid.version, code2, hash);
+  await blockstore.put(updatedCid, updatedBlock);
+  return updatePathCids(updatedCid, resolved, blockstore, opts);
+}
+
+// node_modules/it-glob/dist/src/index.js
+import fs4 from "fs/promises";
+import path2 from "path";
+
+// node_modules/minimatch/dist/mjs/index.js
+var import_brace_expansion = __toESM(require_brace_expansion(), 1);
+
+// node_modules/minimatch/dist/mjs/assert-valid-pattern.js
+var MAX_PATTERN_LENGTH = 1024 * 64;
+var assertValidPattern = (pattern) => {
+  if (typeof pattern !== "string") {
+    throw new TypeError("invalid pattern");
+  }
+  if (pattern.length > MAX_PATTERN_LENGTH) {
+    throw new TypeError("pattern is too long");
+  }
+};
+
+// node_modules/minimatch/dist/mjs/brace-expressions.js
+var posixClasses = {
+  "[:alnum:]": ["\\p{L}\\p{Nl}\\p{Nd}", true],
+  "[:alpha:]": ["\\p{L}\\p{Nl}", true],
+  "[:ascii:]": ["\\x00-\\x7f", false],
+  "[:blank:]": ["\\p{Zs}\\t", true],
+  "[:cntrl:]": ["\\p{Cc}", true],
+  "[:digit:]": ["\\p{Nd}", true],
+  "[:graph:]": ["\\p{Z}\\p{C}", true, true],
+  "[:lower:]": ["\\p{Ll}", true],
+  "[:print:]": ["\\p{C}", true],
+  "[:punct:]": ["\\p{P}", true],
+  "[:space:]": ["\\p{Z}\\t\\r\\n\\v\\f", true],
+  "[:upper:]": ["\\p{Lu}", true],
+  "[:word:]": ["\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}", true],
+  "[:xdigit:]": ["A-Fa-f0-9", false]
+};
+var braceEscape = (s) => s.replace(/[[\]\\-]/g, "\\$&");
+var regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
+var rangesToString = (ranges) => ranges.join("");
+var parseClass = (glob2, position) => {
+  const pos = position;
+  if (glob2.charAt(pos) !== "[") {
+    throw new Error("not in a brace expression");
+  }
+  const ranges = [];
+  const negs = [];
+  let i = pos + 1;
+  let sawStart = false;
+  let uflag = false;
+  let escaping = false;
+  let negate = false;
+  let endPos = pos;
+  let rangeStart = "";
+  WHILE:
+    while (i < glob2.length) {
+      const c = glob2.charAt(i);
+      if ((c === "!" || c === "^") && i === pos + 1) {
+        negate = true;
+        i++;
+        continue;
+      }
+      if (c === "]" && sawStart && !escaping) {
+        endPos = i + 1;
+        break;
+      }
+      sawStart = true;
+      if (c === "\\") {
+        if (!escaping) {
+          escaping = true;
+          i++;
+          continue;
+        }
+      }
+      if (c === "[" && !escaping) {
+        for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+          if (glob2.startsWith(cls, i)) {
+            if (rangeStart) {
+              return ["$.", false, glob2.length - pos, true];
+            }
+            i += cls.length;
+            if (neg)
+              negs.push(unip);
+            else
+              ranges.push(unip);
+            uflag = uflag || u;
+            continue WHILE;
+          }
+        }
+      }
+      escaping = false;
+      if (rangeStart) {
+        if (c > rangeStart) {
+          ranges.push(braceEscape(rangeStart) + "-" + braceEscape(c));
+        } else if (c === rangeStart) {
+          ranges.push(braceEscape(c));
+        }
+        rangeStart = "";
+        i++;
+        continue;
+      }
+      if (glob2.startsWith("-]", i + 1)) {
+        ranges.push(braceEscape(c + "-"));
+        i += 2;
+        continue;
+      }
+      if (glob2.startsWith("-", i + 1)) {
+        rangeStart = c;
+        i += 2;
+        continue;
+      }
+      ranges.push(braceEscape(c));
+      i++;
+    }
+  if (endPos < i) {
+    return ["", false, 0, false];
+  }
+  if (!ranges.length && !negs.length) {
+    return ["$.", false, glob2.length - pos, true];
+  }
+  if (negs.length === 0 && ranges.length === 1 && /^\\?.$/.test(ranges[0]) && !negate) {
+    const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+    return [regexpEscape(r), false, endPos - pos, false];
+  }
+  const sranges = "[" + (negate ? "^" : "") + rangesToString(ranges) + "]";
+  const snegs = "[" + (negate ? "" : "^") + rangesToString(negs) + "]";
+  const comb = ranges.length && negs.length ? "(" + sranges + "|" + snegs + ")" : ranges.length ? sranges : snegs;
+  return [comb, uflag, endPos - pos, true];
+};
+
+// node_modules/minimatch/dist/mjs/unescape.js
+var unescape = (s, { windowsPathsNoEscape = false } = {}) => {
+  return windowsPathsNoEscape ? s.replace(/\[([^\/\\])\]/g, "$1") : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, "$1$2").replace(/\\([^\/])/g, "$1");
+};
+
+// node_modules/minimatch/dist/mjs/ast.js
+var types2 = /* @__PURE__ */ new Set(["!", "?", "+", "*", "@"]);
+var isExtglobType = (c) => types2.has(c);
+var startNoTraversal = "(?!(?:^|/)\\.\\.?(?:$|/))";
+var startNoDot = "(?!\\.)";
+var addPatternStart = /* @__PURE__ */ new Set(["[", "."]);
+var justDots = /* @__PURE__ */ new Set(["..", "."]);
+var reSpecials = new Set("().*{}+?[]^$\\!");
+var regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
+var qmark = "[^/]";
+var star = qmark + "*?";
+var starNoEmpty = qmark + "+?";
+var AST = class _AST {
+  type;
+  #root;
+  #hasMagic;
+  #uflag = false;
+  #parts = [];
+  #parent;
+  #parentIndex;
+  #negs;
+  #filledNegs = false;
+  #options;
+  #toString;
+  // set to true if it's an extglob with no children
+  // (which really means one child of '')
+  #emptyExt = false;
+  constructor(type, parent, options = {}) {
+    this.type = type;
+    if (type)
+      this.#hasMagic = true;
+    this.#parent = parent;
+    this.#root = this.#parent ? this.#parent.#root : this;
+    this.#options = this.#root === this ? options : this.#root.#options;
+    this.#negs = this.#root === this ? [] : this.#root.#negs;
+    if (type === "!" && !this.#root.#filledNegs)
+      this.#negs.push(this);
+    this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+  }
+  get hasMagic() {
+    if (this.#hasMagic !== void 0)
+      return this.#hasMagic;
+    for (const p of this.#parts) {
+      if (typeof p === "string")
+        continue;
+      if (p.type || p.hasMagic)
+        return this.#hasMagic = true;
+    }
+    return this.#hasMagic;
+  }
+  // reconstructs the pattern
+  toString() {
+    if (this.#toString !== void 0)
+      return this.#toString;
+    if (!this.type) {
+      return this.#toString = this.#parts.map((p) => String(p)).join("");
+    } else {
+      return this.#toString = this.type + "(" + this.#parts.map((p) => String(p)).join("|") + ")";
+    }
+  }
+  #fillNegs() {
+    if (this !== this.#root)
+      throw new Error("should only call on root");
+    if (this.#filledNegs)
+      return this;
+    this.toString();
+    this.#filledNegs = true;
+    let n;
+    while (n = this.#negs.pop()) {
+      if (n.type !== "!")
+        continue;
+      let p = n;
+      let pp = p.#parent;
+      while (pp) {
+        for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+          for (const part of n.#parts) {
+            if (typeof part === "string") {
+              throw new Error("string part in extglob AST??");
+            }
+            part.copyIn(pp.#parts[i]);
+          }
+        }
+        p = pp;
+        pp = p.#parent;
+      }
+    }
+    return this;
+  }
+  push(...parts) {
+    for (const p of parts) {
+      if (p === "")
+        continue;
+      if (typeof p !== "string" && !(p instanceof _AST && p.#parent === this)) {
+        throw new Error("invalid part: " + p);
+      }
+      this.#parts.push(p);
+    }
+  }
+  toJSON() {
+    var _a;
+    const ret = this.type === null ? this.#parts.slice().map((p) => typeof p === "string" ? p : p.toJSON()) : [this.type, ...this.#parts.map((p) => p.toJSON())];
+    if (this.isStart() && !this.type)
+      ret.unshift([]);
+    if (this.isEnd() && (this === this.#root || this.#root.#filledNegs && ((_a = this.#parent) == null ? void 0 : _a.type) === "!")) {
+      ret.push({});
+    }
+    return ret;
+  }
+  isStart() {
+    var _a;
+    if (this.#root === this)
+      return true;
+    if (!((_a = this.#parent) == null ? void 0 : _a.isStart()))
+      return false;
+    if (this.#parentIndex === 0)
+      return true;
+    const p = this.#parent;
+    for (let i = 0; i < this.#parentIndex; i++) {
+      const pp = p.#parts[i];
+      if (!(pp instanceof _AST && pp.type === "!")) {
+        return false;
+      }
+    }
+    return true;
+  }
+  isEnd() {
+    var _a, _b, _c;
+    if (this.#root === this)
+      return true;
+    if (((_a = this.#parent) == null ? void 0 : _a.type) === "!")
+      return true;
+    if (!((_b = this.#parent) == null ? void 0 : _b.isEnd()))
+      return false;
+    if (!this.type)
+      return (_c = this.#parent) == null ? void 0 : _c.isEnd();
+    const pl = this.#parent ? this.#parent.#parts.length : 0;
+    return this.#parentIndex === pl - 1;
+  }
+  copyIn(part) {
+    if (typeof part === "string")
+      this.push(part);
+    else
+      this.push(part.clone(this));
+  }
+  clone(parent) {
+    const c = new _AST(this.type, parent);
+    for (const p of this.#parts) {
+      c.copyIn(p);
+    }
+    return c;
+  }
+  static #parseAST(str, ast, pos, opt) {
+    let escaping = false;
+    let inBrace = false;
+    let braceStart = -1;
+    let braceNeg = false;
+    if (ast.type === null) {
+      let i2 = pos;
+      let acc2 = "";
+      while (i2 < str.length) {
+        const c = str.charAt(i2++);
+        if (escaping || c === "\\") {
+          escaping = !escaping;
+          acc2 += c;
+          continue;
+        }
+        if (inBrace) {
+          if (i2 === braceStart + 1) {
+            if (c === "^" || c === "!") {
+              braceNeg = true;
+            }
+          } else if (c === "]" && !(i2 === braceStart + 2 && braceNeg)) {
+            inBrace = false;
+          }
+          acc2 += c;
+          continue;
+        } else if (c === "[") {
+          inBrace = true;
+          braceStart = i2;
+          braceNeg = false;
+          acc2 += c;
+          continue;
+        }
+        if (!opt.noext && isExtglobType(c) && str.charAt(i2) === "(") {
+          ast.push(acc2);
+          acc2 = "";
+          const ext2 = new _AST(c, ast);
+          i2 = _AST.#parseAST(str, ext2, i2, opt);
+          ast.push(ext2);
+          continue;
+        }
+        acc2 += c;
+      }
+      ast.push(acc2);
+      return i2;
+    }
+    let i = pos + 1;
+    let part = new _AST(null, ast);
+    const parts = [];
+    let acc = "";
+    while (i < str.length) {
+      const c = str.charAt(i++);
+      if (escaping || c === "\\") {
+        escaping = !escaping;
+        acc += c;
+        continue;
+      }
+      if (inBrace) {
+        if (i === braceStart + 1) {
+          if (c === "^" || c === "!") {
+            braceNeg = true;
+          }
+        } else if (c === "]" && !(i === braceStart + 2 && braceNeg)) {
+          inBrace = false;
+        }
+        acc += c;
+        continue;
+      } else if (c === "[") {
+        inBrace = true;
+        braceStart = i;
+        braceNeg = false;
+        acc += c;
+        continue;
+      }
+      if (isExtglobType(c) && str.charAt(i) === "(") {
+        part.push(acc);
+        acc = "";
+        const ext2 = new _AST(c, part);
+        part.push(ext2);
+        i = _AST.#parseAST(str, ext2, i, opt);
+        continue;
+      }
+      if (c === "|") {
+        part.push(acc);
+        acc = "";
+        parts.push(part);
+        part = new _AST(null, ast);
+        continue;
+      }
+      if (c === ")") {
+        if (acc === "" && ast.#parts.length === 0) {
+          ast.#emptyExt = true;
+        }
+        part.push(acc);
+        acc = "";
+        ast.push(...parts, part);
+        return i;
+      }
+      acc += c;
+    }
+    ast.type = null;
+    ast.#hasMagic = void 0;
+    ast.#parts = [str.substring(pos - 1)];
+    return i;
+  }
+  static fromGlob(pattern, options = {}) {
+    const ast = new _AST(null, void 0, options);
+    _AST.#parseAST(pattern, ast, 0, options);
+    return ast;
+  }
+  // returns the regular expression if there's magic, or the unescaped
+  // string if not.
+  toMMPattern() {
+    if (this !== this.#root)
+      return this.#root.toMMPattern();
+    const glob2 = this.toString();
+    const [re, body, hasMagic, uflag] = this.toRegExpSource();
+    const anyMagic = hasMagic || this.#hasMagic || this.#options.nocase && !this.#options.nocaseMagicOnly && glob2.toUpperCase() !== glob2.toLowerCase();
+    if (!anyMagic) {
+      return body;
+    }
+    const flags = (this.#options.nocase ? "i" : "") + (uflag ? "u" : "");
+    return Object.assign(new RegExp(`^${re}$`, flags), {
+      _src: re,
+      _glob: glob2
+    });
+  }
+  // returns the string match, the regexp source, whether there's magic
+  // in the regexp (so a regular expression is required) and whether or
+  // not the uflag is needed for the regular expression (for posix classes)
+  // TODO: instead of injecting the start/end at this point, just return
+  // the BODY of the regexp, along with the start/end portions suitable
+  // for binding the start/end in either a joined full-path makeRe context
+  // (where we bind to (^|/), or a standalone matchPart context (where
+  // we bind to ^, and not /).  Otherwise slashes get duped!
+  //
+  // In part-matching mode, the start is:
+  // - if not isStart: nothing
+  // - if traversal possible, but not allowed: ^(?!\.\.?$)
+  // - if dots allowed or not possible: ^
+  // - if dots possible and not allowed: ^(?!\.)
+  // end is:
+  // - if not isEnd(): nothing
+  // - else: $
+  //
+  // In full-path matching mode, we put the slash at the START of the
+  // pattern, so start is:
+  // - if first pattern: same as part-matching mode
+  // - if not isStart(): nothing
+  // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+  // - if dots allowed or not possible: /
+  // - if dots possible and not allowed: /(?!\.)
+  // end is:
+  // - if last pattern, same as part-matching mode
+  // - else nothing
+  //
+  // Always put the (?:$|/) on negated tails, though, because that has to be
+  // there to bind the end of the negated pattern portion, and it's easier to
+  // just stick it in now rather than try to inject it later in the middle of
+  // the pattern.
+  //
+  // We can just always return the same end, and leave it up to the caller
+  // to know whether it's going to be used joined or in parts.
+  // And, if the start is adjusted slightly, can do the same there:
+  // - if not isStart: nothing
+  // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+  // - if dots allowed or not possible: (?:/|^)
+  // - if dots possible and not allowed: (?:/|^)(?!\.)
+  //
+  // But it's better to have a simpler binding without a conditional, for
+  // performance, so probably better to return both start options.
+  //
+  // Then the caller just ignores the end if it's not the first pattern,
+  // and the start always gets applied.
+  //
+  // But that's always going to be $ if it's the ending pattern, or nothing,
+  // so the caller can just attach $ at the end of the pattern when building.
+  //
+  // So the todo is:
+  // - better detect what kind of start is needed
+  // - return both flavors of starting pattern
+  // - attach $ at the end of the pattern when creating the actual RegExp
+  //
+  // Ah, but wait, no, that all only applies to the root when the first pattern
+  // is not an extglob. If the first pattern IS an extglob, then we need all
+  // that dot prevention biz to live in the extglob portions, because eg
+  // +(*|.x*) can match .xy but not .yx.
+  //
+  // So, return the two flavors if it's #root and the first child is not an
+  // AST, otherwise leave it to the child AST to handle it, and there,
+  // use the (?:^|/) style of start binding.
+  //
+  // Even simplified further:
+  // - Since the start for a join is eg /(?!\.) and the start for a part
+  // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+  // or start or whatever) and prepend ^ or / at the Regexp construction.
+  toRegExpSource(allowDot) {
+    var _a;
+    const dot = allowDot ?? !!this.#options.dot;
+    if (this.#root === this)
+      this.#fillNegs();
+    if (!this.type) {
+      const noEmpty = this.isStart() && this.isEnd();
+      const src3 = this.#parts.map((p) => {
+        const [re, _, hasMagic, uflag] = typeof p === "string" ? _AST.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot);
+        this.#hasMagic = this.#hasMagic || hasMagic;
+        this.#uflag = this.#uflag || uflag;
+        return re;
+      }).join("");
+      let start2 = "";
+      if (this.isStart()) {
+        if (typeof this.#parts[0] === "string") {
+          const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+          if (!dotTravAllowed) {
+            const aps = addPatternStart;
+            const needNoTrav = (
+              // dots are allowed, and the pattern starts with [ or .
+              dot && aps.has(src3.charAt(0)) || // the pattern starts with \., and then [ or .
+              src3.startsWith("\\.") && aps.has(src3.charAt(2)) || // the pattern starts with \.\., and then [ or .
+              src3.startsWith("\\.\\.") && aps.has(src3.charAt(4))
+            );
+            const needNoDot = !dot && !allowDot && aps.has(src3.charAt(0));
+            start2 = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : "";
+          }
+        }
+      }
+      let end = "";
+      if (this.isEnd() && this.#root.#filledNegs && ((_a = this.#parent) == null ? void 0 : _a.type) === "!") {
+        end = "(?:$|\\/)";
+      }
+      const final2 = start2 + src3 + end;
+      return [
+        final2,
+        unescape(src3),
+        this.#hasMagic = !!this.#hasMagic,
+        this.#uflag
+      ];
+    }
+    const repeated = this.type === "*" || this.type === "+";
+    const start = this.type === "!" ? "(?:(?!(?:" : "(?:";
+    let body = this.#partsToRegExp(dot);
+    if (this.isStart() && this.isEnd() && !body && this.type !== "!") {
+      const s = this.toString();
+      this.#parts = [s];
+      this.type = null;
+      this.#hasMagic = void 0;
+      return [s, unescape(this.toString()), false, false];
+    }
+    let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot ? "" : this.#partsToRegExp(true);
+    if (bodyDotAllowed === body) {
+      bodyDotAllowed = "";
+    }
+    if (bodyDotAllowed) {
+      body = `(?:${body})(?:${bodyDotAllowed})*?`;
+    }
+    let final = "";
+    if (this.type === "!" && this.#emptyExt) {
+      final = (this.isStart() && !dot ? startNoDot : "") + starNoEmpty;
+    } else {
+      const close = this.type === "!" ? (
+        // !() must match something,but !(x) can match ''
+        "))" + (this.isStart() && !dot && !allowDot ? startNoDot : "") + star + ")"
+      ) : this.type === "@" ? ")" : this.type === "?" ? ")?" : this.type === "+" && bodyDotAllowed ? ")" : this.type === "*" && bodyDotAllowed ? `)?` : `)${this.type}`;
+      final = start + body + close;
+    }
+    return [
+      final,
+      unescape(body),
+      this.#hasMagic = !!this.#hasMagic,
+      this.#uflag
+    ];
+  }
+  #partsToRegExp(dot) {
+    return this.#parts.map((p) => {
+      if (typeof p === "string") {
+        throw new Error("string type in extglob ast??");
+      }
+      const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+      this.#uflag = this.#uflag || uflag;
+      return re;
+    }).filter((p) => !(this.isStart() && this.isEnd()) || !!p).join("|");
+  }
+  static #parseGlob(glob2, hasMagic, noEmpty = false) {
+    let escaping = false;
+    let re = "";
+    let uflag = false;
+    for (let i = 0; i < glob2.length; i++) {
+      const c = glob2.charAt(i);
+      if (escaping) {
+        escaping = false;
+        re += (reSpecials.has(c) ? "\\" : "") + c;
+        continue;
+      }
+      if (c === "\\") {
+        if (i === glob2.length - 1) {
+          re += "\\\\";
+        } else {
+          escaping = true;
+        }
+        continue;
+      }
+      if (c === "[") {
+        const [src3, needUflag, consumed, magic] = parseClass(glob2, i);
+        if (consumed) {
+          re += src3;
+          uflag = uflag || needUflag;
+          i += consumed - 1;
+          hasMagic = hasMagic || magic;
+          continue;
+        }
+      }
+      if (c === "*") {
+        if (noEmpty && glob2 === "*")
+          re += starNoEmpty;
+        else
+          re += star;
+        hasMagic = true;
+        continue;
+      }
+      if (c === "?") {
+        re += qmark;
+        hasMagic = true;
+        continue;
+      }
+      re += regExpEscape(c);
+    }
+    return [re, unescape(glob2), !!hasMagic, uflag];
+  }
+};
+
+// node_modules/minimatch/dist/mjs/escape.js
+var escape = (s, { windowsPathsNoEscape = false } = {}) => {
+  return windowsPathsNoEscape ? s.replace(/[?*()[\]]/g, "[$&]") : s.replace(/[?*()[\]\\]/g, "\\$&");
+};
+
+// node_modules/minimatch/dist/mjs/index.js
+var minimatch = (p, pattern, options = {}) => {
+  assertValidPattern(pattern);
+  if (!options.nocomment && pattern.charAt(0) === "#") {
+    return false;
+  }
+  return new Minimatch(pattern, options).match(p);
+};
+var starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+var starDotExtTest = (ext2) => (f) => !f.startsWith(".") && f.endsWith(ext2);
+var starDotExtTestDot = (ext2) => (f) => f.endsWith(ext2);
+var starDotExtTestNocase = (ext2) => {
+  ext2 = ext2.toLowerCase();
+  return (f) => !f.startsWith(".") && f.toLowerCase().endsWith(ext2);
+};
+var starDotExtTestNocaseDot = (ext2) => {
+  ext2 = ext2.toLowerCase();
+  return (f) => f.toLowerCase().endsWith(ext2);
+};
+var starDotStarRE = /^\*+\.\*+$/;
+var starDotStarTest = (f) => !f.startsWith(".") && f.includes(".");
+var starDotStarTestDot = (f) => f !== "." && f !== ".." && f.includes(".");
+var dotStarRE = /^\.\*+$/;
+var dotStarTest = (f) => f !== "." && f !== ".." && f.startsWith(".");
+var starRE = /^\*+$/;
+var starTest = (f) => f.length !== 0 && !f.startsWith(".");
+var starTestDot = (f) => f.length !== 0 && f !== "." && f !== "..";
+var qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+var qmarksTestNocase = ([$0, ext2 = ""]) => {
+  const noext = qmarksTestNoExt([$0]);
+  if (!ext2)
+    return noext;
+  ext2 = ext2.toLowerCase();
+  return (f) => noext(f) && f.toLowerCase().endsWith(ext2);
+};
+var qmarksTestNocaseDot = ([$0, ext2 = ""]) => {
+  const noext = qmarksTestNoExtDot([$0]);
+  if (!ext2)
+    return noext;
+  ext2 = ext2.toLowerCase();
+  return (f) => noext(f) && f.toLowerCase().endsWith(ext2);
+};
+var qmarksTestDot = ([$0, ext2 = ""]) => {
+  const noext = qmarksTestNoExtDot([$0]);
+  return !ext2 ? noext : (f) => noext(f) && f.endsWith(ext2);
+};
+var qmarksTest = ([$0, ext2 = ""]) => {
+  const noext = qmarksTestNoExt([$0]);
+  return !ext2 ? noext : (f) => noext(f) && f.endsWith(ext2);
+};
+var qmarksTestNoExt = ([$0]) => {
+  const len = $0.length;
+  return (f) => f.length === len && !f.startsWith(".");
+};
+var qmarksTestNoExtDot = ([$0]) => {
+  const len = $0.length;
+  return (f) => f.length === len && f !== "." && f !== "..";
+};
+var defaultPlatform = typeof process === "object" && process ? typeof process.env === "object" && process.env && process.env.__MINIMATCH_TESTING_PLATFORM__ || process.platform : "posix";
+var path = {
+  win32: { sep: "\\" },
+  posix: { sep: "/" }
+};
+var sep = defaultPlatform === "win32" ? path.win32.sep : path.posix.sep;
+minimatch.sep = sep;
+var GLOBSTAR = Symbol("globstar **");
+minimatch.GLOBSTAR = GLOBSTAR;
+var qmark2 = "[^/]";
+var star2 = qmark2 + "*?";
+var twoStarDot = "(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?";
+var twoStarNoDot = "(?:(?!(?:\\/|^)\\.).)*?";
+var filter2 = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
+minimatch.filter = filter2;
+var ext = (a, b = {}) => Object.assign({}, a, b);
+var defaults = (def) => {
+  if (!def || typeof def !== "object" || !Object.keys(def).length) {
+    return minimatch;
+  }
+  const orig = minimatch;
+  const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+  return Object.assign(m, {
+    Minimatch: class Minimatch extends orig.Minimatch {
+      constructor(pattern, options = {}) {
+        super(pattern, ext(def, options));
+      }
+      static defaults(options) {
+        return orig.defaults(ext(def, options)).Minimatch;
+      }
+    },
+    AST: class AST extends orig.AST {
+      /* c8 ignore start */
+      constructor(type, parent, options = {}) {
+        super(type, parent, ext(def, options));
+      }
+      /* c8 ignore stop */
+      static fromGlob(pattern, options = {}) {
+        return orig.AST.fromGlob(pattern, ext(def, options));
+      }
+    },
+    unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+    escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+    filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+    defaults: (options) => orig.defaults(ext(def, options)),
+    makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+    braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+    match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+    sep: orig.sep,
+    GLOBSTAR
+  });
+};
+minimatch.defaults = defaults;
+var braceExpand = (pattern, options = {}) => {
+  assertValidPattern(pattern);
+  if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+    return [pattern];
+  }
+  return (0, import_brace_expansion.default)(pattern);
+};
+minimatch.braceExpand = braceExpand;
+var makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+minimatch.makeRe = makeRe;
+var match = (list, pattern, options = {}) => {
+  const mm = new Minimatch(pattern, options);
+  list = list.filter((f) => mm.match(f));
+  if (mm.options.nonull && !list.length) {
+    list.push(pattern);
+  }
+  return list;
+};
+minimatch.match = match;
+var globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+var regExpEscape2 = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
+var Minimatch = class {
+  options;
+  set;
+  pattern;
+  windowsPathsNoEscape;
+  nonegate;
+  negate;
+  comment;
+  empty;
+  preserveMultipleSlashes;
+  partial;
+  globSet;
+  globParts;
+  nocase;
+  isWindows;
+  platform;
+  windowsNoMagicRoot;
+  regexp;
+  constructor(pattern, options = {}) {
+    assertValidPattern(pattern);
+    options = options || {};
+    this.options = options;
+    this.pattern = pattern;
+    this.platform = options.platform || defaultPlatform;
+    this.isWindows = this.platform === "win32";
+    this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+    if (this.windowsPathsNoEscape) {
+      this.pattern = this.pattern.replace(/\\/g, "/");
+    }
+    this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+    this.regexp = null;
+    this.negate = false;
+    this.nonegate = !!options.nonegate;
+    this.comment = false;
+    this.empty = false;
+    this.partial = !!options.partial;
+    this.nocase = !!this.options.nocase;
+    this.windowsNoMagicRoot = options.windowsNoMagicRoot !== void 0 ? options.windowsNoMagicRoot : !!(this.isWindows && this.nocase);
+    this.globSet = [];
+    this.globParts = [];
+    this.set = [];
+    this.make();
+  }
+  hasMagic() {
+    if (this.options.magicalBraces && this.set.length > 1) {
+      return true;
+    }
+    for (const pattern of this.set) {
+      for (const part of pattern) {
+        if (typeof part !== "string")
+          return true;
+      }
+    }
+    return false;
+  }
+  debug(..._) {
+  }
+  make() {
+    const pattern = this.pattern;
+    const options = this.options;
+    if (!options.nocomment && pattern.charAt(0) === "#") {
+      this.comment = true;
+      return;
+    }
+    if (!pattern) {
+      this.empty = true;
+      return;
+    }
+    this.parseNegate();
+    this.globSet = [...new Set(this.braceExpand())];
+    if (options.debug) {
+      this.debug = (...args) => console.error(...args);
+    }
+    this.debug(this.pattern, this.globSet);
+    const rawGlobParts = this.globSet.map((s) => this.slashSplit(s));
+    this.globParts = this.preprocess(rawGlobParts);
+    this.debug(this.pattern, this.globParts);
+    let set = this.globParts.map((s, _, __) => {
+      if (this.isWindows && this.windowsNoMagicRoot) {
+        const isUNC = s[0] === "" && s[1] === "" && (s[2] === "?" || !globMagic.test(s[2])) && !globMagic.test(s[3]);
+        const isDrive = /^[a-z]:/i.test(s[0]);
+        if (isUNC) {
+          return [...s.slice(0, 4), ...s.slice(4).map((ss) => this.parse(ss))];
+        } else if (isDrive) {
+          return [s[0], ...s.slice(1).map((ss) => this.parse(ss))];
+        }
+      }
+      return s.map((ss) => this.parse(ss));
+    });
+    this.debug(this.pattern, set);
+    this.set = set.filter((s) => s.indexOf(false) === -1);
+    if (this.isWindows) {
+      for (let i = 0; i < this.set.length; i++) {
+        const p = this.set[i];
+        if (p[0] === "" && p[1] === "" && this.globParts[i][2] === "?" && typeof p[3] === "string" && /^[a-z]:$/i.test(p[3])) {
+          p[2] = "?";
+        }
+      }
+    }
+    this.debug(this.pattern, this.set);
+  }
+  // various transforms to equivalent pattern sets that are
+  // faster to process in a filesystem walk.  The goal is to
+  // eliminate what we can, and push all ** patterns as far
+  // to the right as possible, even if it increases the number
+  // of patterns that we have to process.
+  preprocess(globParts) {
+    if (this.options.noglobstar) {
+      for (let i = 0; i < globParts.length; i++) {
+        for (let j = 0; j < globParts[i].length; j++) {
+          if (globParts[i][j] === "**") {
+            globParts[i][j] = "*";
+          }
+        }
+      }
+    }
+    const { optimizationLevel = 1 } = this.options;
+    if (optimizationLevel >= 2) {
+      globParts = this.firstPhasePreProcess(globParts);
+      globParts = this.secondPhasePreProcess(globParts);
+    } else if (optimizationLevel >= 1) {
+      globParts = this.levelOneOptimize(globParts);
+    } else {
+      globParts = this.adjascentGlobstarOptimize(globParts);
+    }
+    return globParts;
+  }
+  // just get rid of adjascent ** portions
+  adjascentGlobstarOptimize(globParts) {
+    return globParts.map((parts) => {
+      let gs = -1;
+      while (-1 !== (gs = parts.indexOf("**", gs + 1))) {
+        let i = gs;
+        while (parts[i + 1] === "**") {
+          i++;
+        }
+        if (i !== gs) {
+          parts.splice(gs, i - gs);
+        }
+      }
+      return parts;
+    });
+  }
+  // get rid of adjascent ** and resolve .. portions
+  levelOneOptimize(globParts) {
+    return globParts.map((parts) => {
+      parts = parts.reduce((set, part) => {
+        const prev = set[set.length - 1];
+        if (part === "**" && prev === "**") {
+          return set;
+        }
+        if (part === "..") {
+          if (prev && prev !== ".." && prev !== "." && prev !== "**") {
+            set.pop();
+            return set;
+          }
+        }
+        set.push(part);
+        return set;
+      }, []);
+      return parts.length === 0 ? [""] : parts;
+    });
+  }
+  levelTwoFileOptimize(parts) {
+    if (!Array.isArray(parts)) {
+      parts = this.slashSplit(parts);
+    }
+    let didSomething = false;
+    do {
+      didSomething = false;
+      if (!this.preserveMultipleSlashes) {
+        for (let i = 1; i < parts.length - 1; i++) {
+          const p = parts[i];
+          if (i === 1 && p === "" && parts[0] === "")
+            continue;
+          if (p === "." || p === "") {
+            didSomething = true;
+            parts.splice(i, 1);
+            i--;
+          }
+        }
+        if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
+          didSomething = true;
+          parts.pop();
+        }
+      }
+      let dd = 0;
+      while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
+        const p = parts[dd - 1];
+        if (p && p !== "." && p !== ".." && p !== "**") {
+          didSomething = true;
+          parts.splice(dd - 1, 2);
+          dd -= 2;
+        }
+      }
+    } while (didSomething);
+    return parts.length === 0 ? [""] : parts;
+  }
+  // First phase: single-pattern processing
+  // 
 is 1 or more portions
+  //  is 1 or more portions
+  // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+  // 
/

/../ ->

/
+  // **/**/ -> **/
+  //
+  // **/*/ -> */**/ <== not valid because ** doesn't follow
+  // this WOULD be allowed if ** did follow symlinks, or * didn't
+  firstPhasePreProcess(globParts) {
+    let didSomething = false;
+    do {
+      didSomething = false;
+      for (let parts of globParts) {
+        let gs = -1;
+        while (-1 !== (gs = parts.indexOf("**", gs + 1))) {
+          let gss = gs;
+          while (parts[gss + 1] === "**") {
+            gss++;
+          }
+          if (gss > gs) {
+            parts.splice(gs + 1, gss - gs);
+          }
+          let next = parts[gs + 1];
+          const p = parts[gs + 2];
+          const p2 = parts[gs + 3];
+          if (next !== "..")
+            continue;
+          if (!p || p === "." || p === ".." || !p2 || p2 === "." || p2 === "..") {
+            continue;
+          }
+          didSomething = true;
+          parts.splice(gs, 1);
+          const other = parts.slice(0);
+          other[gs] = "**";
+          globParts.push(other);
+          gs--;
+        }
+        if (!this.preserveMultipleSlashes) {
+          for (let i = 1; i < parts.length - 1; i++) {
+            const p = parts[i];
+            if (i === 1 && p === "" && parts[0] === "")
+              continue;
+            if (p === "." || p === "") {
+              didSomething = true;
+              parts.splice(i, 1);
+              i--;
+            }
+          }
+          if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
+            didSomething = true;
+            parts.pop();
+          }
+        }
+        let dd = 0;
+        while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
+          const p = parts[dd - 1];
+          if (p && p !== "." && p !== ".." && p !== "**") {
+            didSomething = true;
+            const needDot = dd === 1 && parts[dd + 1] === "**";
+            const splin = needDot ? ["."] : [];
+            parts.splice(dd - 1, 2, ...splin);
+            if (parts.length === 0)
+              parts.push("");
+            dd -= 2;
+          }
+        }
+      }
+    } while (didSomething);
+    return globParts;
+  }
+  // second phase: multi-pattern dedupes
+  // {
/*/,
/

/} ->

/*/
+  // {
/,
/} -> 
/
+  // {
/**/,
/} -> 
/**/
+  //
+  // {
/**/,
/**/

/} ->

/**/
+  // ^-- not valid because ** doens't follow symlinks
+  secondPhasePreProcess(globParts) {
+    for (let i = 0; i < globParts.length - 1; i++) {
+      for (let j = i + 1; j < globParts.length; j++) {
+        const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+        if (!matched)
+          continue;
+        globParts[i] = matched;
+        globParts[j] = [];
+      }
+    }
+    return globParts.filter((gs) => gs.length);
+  }
+  partsMatch(a, b, emptyGSMatch = false) {
+    let ai = 0;
+    let bi = 0;
+    let result = [];
+    let which = "";
+    while (ai < a.length && bi < b.length) {
+      if (a[ai] === b[bi]) {
+        result.push(which === "b" ? b[bi] : a[ai]);
+        ai++;
+        bi++;
+      } else if (emptyGSMatch && a[ai] === "**" && b[bi] === a[ai + 1]) {
+        result.push(a[ai]);
+        ai++;
+      } else if (emptyGSMatch && b[bi] === "**" && a[ai] === b[bi + 1]) {
+        result.push(b[bi]);
+        bi++;
+      } else if (a[ai] === "*" && b[bi] && (this.options.dot || !b[bi].startsWith(".")) && b[bi] !== "**") {
+        if (which === "b")
+          return false;
+        which = "a";
+        result.push(a[ai]);
+        ai++;
+        bi++;
+      } else if (b[bi] === "*" && a[ai] && (this.options.dot || !a[ai].startsWith(".")) && a[ai] !== "**") {
+        if (which === "a")
+          return false;
+        which = "b";
+        result.push(b[bi]);
+        ai++;
+        bi++;
+      } else {
+        return false;
+      }
+    }
+    return a.length === b.length && result;
+  }
+  parseNegate() {
+    if (this.nonegate)
+      return;
+    const pattern = this.pattern;
+    let negate = false;
+    let negateOffset = 0;
+    for (let i = 0; i < pattern.length && pattern.charAt(i) === "!"; i++) {
+      negate = !negate;
+      negateOffset++;
+    }
+    if (negateOffset)
+      this.pattern = pattern.slice(negateOffset);
+    this.negate = negate;
+  }
+  // set partial to true to test if, for example,
+  // "/a/b" matches the start of "/*/b/*/d"
+  // Partial means, if you run out of file before you run
+  // out of pattern, then that's fine, as long as all
+  // the parts match.
+  matchOne(file, pattern, partial = false) {
+    const options = this.options;
+    if (this.isWindows) {
+      const fileDrive = typeof file[0] === "string" && /^[a-z]:$/i.test(file[0]);
+      const fileUNC = !fileDrive && file[0] === "" && file[1] === "" && file[2] === "?" && /^[a-z]:$/i.test(file[3]);
+      const patternDrive = typeof pattern[0] === "string" && /^[a-z]:$/i.test(pattern[0]);
+      const patternUNC = !patternDrive && pattern[0] === "" && pattern[1] === "" && pattern[2] === "?" && typeof pattern[3] === "string" && /^[a-z]:$/i.test(pattern[3]);
+      const fdi = fileUNC ? 3 : fileDrive ? 0 : void 0;
+      const pdi = patternUNC ? 3 : patternDrive ? 0 : void 0;
+      if (typeof fdi === "number" && typeof pdi === "number") {
+        const [fd, pd] = [file[fdi], pattern[pdi]];
+        if (fd.toLowerCase() === pd.toLowerCase()) {
+          pattern[pdi] = fd;
+          if (pdi > fdi) {
+            pattern = pattern.slice(pdi);
+          } else if (fdi > pdi) {
+            file = file.slice(fdi);
+          }
+        }
+      }
+    }
+    const { optimizationLevel = 1 } = this.options;
+    if (optimizationLevel >= 2) {
+      file = this.levelTwoFileOptimize(file);
+    }
+    this.debug("matchOne", this, { file, pattern });
+    this.debug("matchOne", file.length, pattern.length);
+    for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+      this.debug("matchOne loop");
+      var p = pattern[pi];
+      var f = file[fi];
+      this.debug(pattern, p, f);
+      if (p === false) {
+        return false;
+      }
+      if (p === GLOBSTAR) {
+        this.debug("GLOBSTAR", [pattern, p, f]);
+        var fr = fi;
+        var pr = pi + 1;
+        if (pr === pl) {
+          this.debug("** at the end");
+          for (; fi < fl; fi++) {
+            if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".")
+              return false;
+          }
+          return true;
+        }
+        while (fr < fl) {
+          var swallowee = file[fr];
+          this.debug("\nglobstar while", file, fr, pattern, pr, swallowee);
+          if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+            this.debug("globstar found match!", fr, fl, swallowee);
+            return true;
+          } else {
+            if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") {
+              this.debug("dot detected!", file, fr, pattern, pr);
+              break;
+            }
+            this.debug("globstar swallow a segment, and continue");
+            fr++;
+          }
+        }
+        if (partial) {
+          this.debug("\n>>> no match, partial?", file, fr, pattern, pr);
+          if (fr === fl) {
+            return true;
+          }
+        }
+        return false;
+      }
+      let hit;
+      if (typeof p === "string") {
+        hit = f === p;
+        this.debug("string match", p, f, hit);
+      } else {
+        hit = p.test(f);
+        this.debug("pattern match", p, f, hit);
+      }
+      if (!hit)
+        return false;
+    }
+    if (fi === fl && pi === pl) {
+      return true;
+    } else if (fi === fl) {
+      return partial;
+    } else if (pi === pl) {
+      return fi === fl - 1 && file[fi] === "";
+    } else {
+      throw new Error("wtf?");
+    }
+  }
+  braceExpand() {
+    return braceExpand(this.pattern, this.options);
+  }
+  parse(pattern) {
+    assertValidPattern(pattern);
+    const options = this.options;
+    if (pattern === "**")
+      return GLOBSTAR;
+    if (pattern === "")
+      return "";
+    let m;
+    let fastTest = null;
+    if (m = pattern.match(starRE)) {
+      fastTest = options.dot ? starTestDot : starTest;
+    } else if (m = pattern.match(starDotExtRE)) {
+      fastTest = (options.nocase ? options.dot ? starDotExtTestNocaseDot : starDotExtTestNocase : options.dot ? starDotExtTestDot : starDotExtTest)(m[1]);
+    } else if (m = pattern.match(qmarksRE)) {
+      fastTest = (options.nocase ? options.dot ? qmarksTestNocaseDot : qmarksTestNocase : options.dot ? qmarksTestDot : qmarksTest)(m);
+    } else if (m = pattern.match(starDotStarRE)) {
+      fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+    } else if (m = pattern.match(dotStarRE)) {
+      fastTest = dotStarTest;
+    }
+    const re = AST.fromGlob(pattern, this.options).toMMPattern();
+    return fastTest ? Object.assign(re, { test: fastTest }) : re;
+  }
+  makeRe() {
+    if (this.regexp || this.regexp === false)
+      return this.regexp;
+    const set = this.set;
+    if (!set.length) {
+      this.regexp = false;
+      return this.regexp;
+    }
+    const options = this.options;
+    const twoStar = options.noglobstar ? star2 : options.dot ? twoStarDot : twoStarNoDot;
+    const flags = new Set(options.nocase ? ["i"] : []);
+    let re = set.map((pattern) => {
+      const pp = pattern.map((p) => {
+        if (p instanceof RegExp) {
+          for (const f of p.flags.split(""))
+            flags.add(f);
+        }
+        return typeof p === "string" ? regExpEscape2(p) : p === GLOBSTAR ? GLOBSTAR : p._src;
+      });
+      pp.forEach((p, i) => {
+        const next = pp[i + 1];
+        const prev = pp[i - 1];
+        if (p !== GLOBSTAR || prev === GLOBSTAR) {
+          return;
+        }
+        if (prev === void 0) {
+          if (next !== void 0 && next !== GLOBSTAR) {
+            pp[i + 1] = "(?:\\/|" + twoStar + "\\/)?" + next;
+          } else {
+            pp[i] = twoStar;
+          }
+        } else if (next === void 0) {
+          pp[i - 1] = prev + "(?:\\/|" + twoStar + ")?";
+        } else if (next !== GLOBSTAR) {
+          pp[i - 1] = prev + "(?:\\/|\\/" + twoStar + "\\/)" + next;
+          pp[i + 1] = GLOBSTAR;
+        }
+      });
+      return pp.filter((p) => p !== GLOBSTAR).join("/");
+    }).join("|");
+    const [open, close] = set.length > 1 ? ["(?:", ")"] : ["", ""];
+    re = "^" + open + re + close + "$";
+    if (this.negate)
+      re = "^(?!" + re + ").+$";
+    try {
+      this.regexp = new RegExp(re, [...flags].join(""));
+    } catch (ex) {
+      this.regexp = false;
+    }
+    return this.regexp;
+  }
+  slashSplit(p) {
+    if (this.preserveMultipleSlashes) {
+      return p.split("/");
+    } else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+      return ["", ...p.split(/\/+/)];
+    } else {
+      return p.split(/\/+/);
+    }
+  }
+  match(f, partial = this.partial) {
+    this.debug("match", f, this.pattern);
+    if (this.comment) {
+      return false;
+    }
+    if (this.empty) {
+      return f === "";
+    }
+    if (f === "/" && partial) {
+      return true;
+    }
+    const options = this.options;
+    if (this.isWindows) {
+      f = f.split("\\").join("/");
+    }
+    const ff = this.slashSplit(f);
+    this.debug(this.pattern, "split", ff);
+    const set = this.set;
+    this.debug(this.pattern, "set", set);
+    let filename = ff[ff.length - 1];
+    if (!filename) {
+      for (let i = ff.length - 2; !filename && i >= 0; i--) {
+        filename = ff[i];
+      }
+    }
+    for (let i = 0; i < set.length; i++) {
+      const pattern = set[i];
+      let file = ff;
+      if (options.matchBase && pattern.length === 1) {
+        file = [filename];
+      }
+      const hit = this.matchOne(file, pattern, partial);
+      if (hit) {
+        if (options.flipNegate) {
+          return true;
+        }
+        return !this.negate;
+      }
+    }
+    if (options.flipNegate) {
+      return false;
+    }
+    return this.negate;
+  }
+  static defaults(def) {
+    return minimatch.defaults(def).Minimatch;
+  }
+};
+minimatch.AST = AST;
+minimatch.Minimatch = Minimatch;
+minimatch.escape = escape;
+minimatch.unescape = unescape;
+
+// node_modules/it-glob/dist/src/index.js
+async function* glob(dir, pattern, options = {}) {
+  const absoluteDir = path2.resolve(dir);
+  const relativeDir = path2.relative(options.cwd ?? process.cwd(), dir);
+  const stats = await fs4.stat(absoluteDir);
+  if (stats.isDirectory()) {
+    for await (const entry of _glob(absoluteDir, "", pattern, options)) {
+      yield entry;
+    }
+    return;
+  }
+  if (minimatch(relativeDir, pattern, options)) {
+    yield options.absolute === true ? absoluteDir : relativeDir;
+  }
+}
+async function* _glob(base3, dir, pattern, options) {
+  for await (const entry of await fs4.opendir(path2.join(base3, dir))) {
+    const relativeEntryPath = path2.join(dir, entry.name);
+    const absoluteEntryPath = path2.join(base3, dir, entry.name);
+    let match2 = minimatch(relativeEntryPath, pattern, options);
+    const isDirectory = entry.isDirectory();
+    if (isDirectory && options.nodir === true) {
+      match2 = false;
+    }
+    if (match2) {
+      yield options.absolute === true ? absoluteEntryPath : relativeEntryPath;
+    }
+    if (isDirectory) {
+      yield* _glob(base3, relativeEntryPath, pattern, options);
+    }
+  }
+}
+
+// node_modules/@helia/unixfs/dist/src/index.js
+var DefaultUnixFS = class {
+  components;
+  constructor(components) {
+    this.components = components;
+  }
+  async *addAll(source, options = {}) {
+    yield* addAll(source, this.components.blockstore, options);
+  }
+  async addBytes(bytes, options = {}) {
+    return addBytes(bytes, this.components.blockstore, options);
+  }
+  async addByteStream(bytes, options = {}) {
+    return addByteStream(bytes, this.components.blockstore, options);
+  }
+  async addFile(file, options = {}) {
+    return addFile(file, this.components.blockstore, options);
+  }
+  async addDirectory(dir = {}, options = {}) {
+    return addDirectory(dir, this.components.blockstore, options);
+  }
+  async *cat(cid, options = {}) {
+    yield* cat(cid, this.components.blockstore, options);
+  }
+  async chmod(cid, mode, options = {}) {
+    return chmod(cid, mode, this.components.blockstore, options);
+  }
+  async cp(source, target, name4, options = {}) {
+    return cp(source, target, name4, this.components.blockstore, options);
+  }
+  async *ls(cid, options = {}) {
+    yield* ls(cid, this.components.blockstore, options);
+  }
+  async mkdir(cid, dirname, options = {}) {
+    return mkdir(cid, dirname, this.components.blockstore, options);
+  }
+  async rm(cid, path6, options = {}) {
+    return rm(cid, path6, this.components.blockstore, options);
+  }
+  async stat(cid, options = {}) {
+    return stat(cid, this.components.blockstore, options);
+  }
+  async touch(cid, options = {}) {
+    return touch(cid, this.components.blockstore, options);
+  }
+};
+function unixfs(helia) {
+  return new DefaultUnixFS(helia);
+}
+
+// node_modules/blockstore-fs/dist/src/index.js
+import fs5 from "node:fs/promises";
+import path4 from "node:path";
+import { promisify as promisify3 } from "node:util";
+
+// node_modules/blockstore-core/dist/src/errors.js
+var errors_exports = {};
+__export(errors_exports, {
+  abortedError: () => abortedError,
+  closeFailedError: () => closeFailedError,
+  deleteFailedError: () => deleteFailedError,
+  getFailedError: () => getFailedError,
+  hasFailedError: () => hasFailedError,
+  notFoundError: () => notFoundError,
+  openFailedError: () => openFailedError,
+  putFailedError: () => putFailedError
+});
+var import_err_code16 = __toESM(require_err_code(), 1);
+function openFailedError(err) {
+  err = err ?? new Error("Open failed");
+  return (0, import_err_code16.default)(err, "ERR_OPEN_FAILED");
+}
+function closeFailedError(err) {
+  err = err ?? new Error("Close failed");
+  return (0, import_err_code16.default)(err, "ERR_CLOSE_FAILED");
+}
+function putFailedError(err) {
+  err = err ?? new Error("Put failed");
+  return (0, import_err_code16.default)(err, "ERR_PUT_FAILED");
+}
+function getFailedError(err) {
+  err = err ?? new Error("Get failed");
+  return (0, import_err_code16.default)(err, "ERR_GET_FAILED");
+}
+function deleteFailedError(err) {
+  err = err ?? new Error("Delete failed");
+  return (0, import_err_code16.default)(err, "ERR_DELETE_FAILED");
+}
+function hasFailedError(err) {
+  err = err ?? new Error("Has failed");
+  return (0, import_err_code16.default)(err, "ERR_HAS_FAILED");
+}
+function notFoundError(err) {
+  err = err ?? new Error("Not Found");
+  return (0, import_err_code16.default)(err, "ERR_NOT_FOUND");
+}
+function abortedError(err) {
+  err = err ?? new Error("Aborted");
+  return (0, import_err_code16.default)(err, "ERR_ABORTED");
+}
+
+// node_modules/blockstore-core/node_modules/@libp2p/logger/dist/src/index.js
+var import_debug2 = __toESM(require_src2(), 1);
+import_debug2.default.formatters.b = (v) => {
+  return v == null ? "undefined" : base58btc2.baseEncode(v);
+};
+import_debug2.default.formatters.t = (v) => {
+  return v == null ? "undefined" : base322.baseEncode(v);
+};
+import_debug2.default.formatters.m = (v) => {
+  return v == null ? "undefined" : base64.baseEncode(v);
+};
+import_debug2.default.formatters.p = (v) => {
+  return v == null ? "undefined" : v.toString();
+};
+import_debug2.default.formatters.c = (v) => {
+  return v == null ? "undefined" : v.toString();
+};
+import_debug2.default.formatters.k = (v) => {
+  return v == null ? "undefined" : v.toString();
+};
+import_debug2.default.formatters.a = (v) => {
+  return v == null ? "undefined" : v.toString();
+};
+function createDisabledLogger2(namespace) {
+  const logger3 = () => {
+  };
+  logger3.enabled = false;
+  logger3.color = "";
+  logger3.diff = 0;
+  logger3.log = () => {
+  };
+  logger3.namespace = namespace;
+  logger3.destroy = () => true;
+  logger3.extend = () => logger3;
+  return logger3;
+}
+function logger2(name4) {
+  let trace = createDisabledLogger2(`${name4}:trace`);
+  if (import_debug2.default.enabled(`${name4}:trace`) && import_debug2.default.names.map((r) => r.toString()).find((n) => n.includes(":trace")) != null) {
+    trace = (0, import_debug2.default)(`${name4}:trace`);
+  }
+  return Object.assign((0, import_debug2.default)(name4), {
+    error: (0, import_debug2.default)(`${name4}:error`),
+    trace
+  });
+}
+
+// node_modules/blockstore-core/dist/src/tiered.js
+var log11 = logger2("blockstore:core:tiered");
+
+// node_modules/blockstore-core/dist/src/index.js
+var Errors = {
+  ...errors_exports
+};
+
+// node_modules/blockstore-fs/dist/src/index.js
+var import_fast_write_atomic = __toESM(require_fast_write_atomic(), 1);
+
+// node_modules/blockstore-fs/dist/src/sharding.js
+import path3 from "node:path";
+var NextToLast = class {
+  extension;
+  prefixLength;
+  base;
+  constructor(init = {}) {
+    this.extension = init.extension ?? ".data";
+    this.prefixLength = init.prefixLength ?? 2;
+    this.base = init.base ?? base32upper2;
+  }
+  encode(cid) {
+    const str = this.base.encoder.encode(cid.multihash.bytes);
+    const prefix = str.substring(str.length - this.prefixLength);
+    return {
+      dir: prefix,
+      file: `${str}${this.extension}`
+    };
+  }
+  decode(str) {
+    let fileName = path3.basename(str);
+    if (fileName.endsWith(this.extension)) {
+      fileName = fileName.substring(0, fileName.length - this.extension.length);
+    }
+    return CID2.decode(this.base.decoder.decode(fileName));
+  }
+};
+
+// node_modules/blockstore-fs/dist/src/index.js
+var writeAtomic = promisify3(import_fast_write_atomic.default);
+async function writeFile(file, contents) {
+  try {
+    await writeAtomic(file, contents);
+  } catch (err) {
+    if (err.code === "EPERM" && err.syscall === "rename") {
+      await fs5.access(file, fs5.constants.F_OK | fs5.constants.W_OK);
+      return;
+    }
+    throw err;
+  }
+}
+var FsBlockstore = class {
+  path;
+  createIfMissing;
+  errorIfExists;
+  putManyConcurrency;
+  getManyConcurrency;
+  deleteManyConcurrency;
+  shardingStrategy;
+  constructor(location, init = {}) {
+    this.path = path4.resolve(location);
+    this.createIfMissing = init.createIfMissing ?? true;
+    this.errorIfExists = init.errorIfExists ?? false;
+    this.deleteManyConcurrency = init.deleteManyConcurrency ?? 50;
+    this.getManyConcurrency = init.getManyConcurrency ?? 50;
+    this.putManyConcurrency = init.putManyConcurrency ?? 50;
+    this.shardingStrategy = init.shardingStrategy ?? new NextToLast();
+  }
+  async open() {
+    try {
+      await fs5.access(this.path, fs5.constants.F_OK | fs5.constants.W_OK);
+      if (this.errorIfExists) {
+        throw Errors.openFailedError(new Error(`Blockstore directory: ${this.path} already exists`));
+      }
+    } catch (err) {
+      if (err.code === "ENOENT") {
+        if (this.createIfMissing) {
+          await fs5.mkdir(this.path, { recursive: true });
+          return;
+        } else {
+          throw Errors.openFailedError(new Error(`Blockstore directory: ${this.path} does not exist`));
+        }
+      }
+      throw err;
+    }
+  }
+  async close() {
+    await Promise.resolve();
+  }
+  async put(key, val) {
+    const { dir, file } = this.shardingStrategy.encode(key);
+    try {
+      if (dir != null && dir !== "") {
+        await fs5.mkdir(path4.join(this.path, dir), {
+          recursive: true
+        });
+      }
+      await writeFile(path4.join(this.path, dir, file), val);
+      return key;
+    } catch (err) {
+      throw Errors.putFailedError(err);
+    }
+  }
+  async *putMany(source) {
+    yield* parallelBatch(src_default3(source, ({ cid, block }) => {
+      return async () => {
+        await this.put(cid, block);
+        return cid;
+      };
+    }), this.putManyConcurrency);
+  }
+  async get(key) {
+    const { dir, file } = this.shardingStrategy.encode(key);
+    try {
+      return await fs5.readFile(path4.join(this.path, dir, file));
+    } catch (err) {
+      throw Errors.notFoundError(err);
+    }
+  }
+  async *getMany(source) {
+    yield* parallelBatch(src_default3(source, (key) => {
+      return async () => {
+        return {
+          cid: key,
+          block: await this.get(key)
+        };
+      };
+    }), this.getManyConcurrency);
+  }
+  async delete(key) {
+    const { dir, file } = this.shardingStrategy.encode(key);
+    try {
+      await fs5.unlink(path4.join(this.path, dir, file));
+    } catch (err) {
+      if (err.code === "ENOENT") {
+        return;
+      }
+      throw Errors.deleteFailedError(err);
+    }
+  }
+  async *deleteMany(source) {
+    yield* parallelBatch(src_default3(source, (key) => {
+      return async () => {
+        await this.delete(key);
+        return key;
+      };
+    }), this.deleteManyConcurrency);
+  }
+  /**
+   * Check for the existence of the given key
+   */
+  async has(key) {
+    const { dir, file } = this.shardingStrategy.encode(key);
+    try {
+      await fs5.access(path4.join(this.path, dir, file));
+    } catch (err) {
+      return false;
+    }
+    return true;
+  }
+  async *getAll() {
+    const pattern = `**/*${this.shardingStrategy.extension}`.split(path4.sep).join("/");
+    const files = glob(this.path, pattern, {
+      absolute: true
+    });
+    for await (const file of files) {
+      try {
+        const buf2 = await fs5.readFile(file);
+        const pair = {
+          cid: this.shardingStrategy.decode(file),
+          block: buf2
+        };
+        yield pair;
+      } catch (err) {
+        if (err.code !== "ENOENT") {
+          throw err;
+        }
+      }
+    }
+  }
+};
+
+// src/objectManager.js
+import { createReadStream, createWriteStream } from "node:fs";
+import { mkdir as mkdir2, rm as rm2 } from "node:fs/promises";
+import os from "node:os";
+import path5 from "node:path";
+import { Readable } from "node:stream";
+import { v4 as uuidv4 } from "uuid";
+var ObjectManager = class {
+  #DEFAULT_ENDPOINT = "https://s3.filebase.com";
+  #DEFAULT_REGION = "us-east-1";
+  #DEFAULT_MAX_CONCURRENT_UPLOADS = 4;
+  #client;
+  #credentials;
+  #defaultBucket;
+  #gatewayConfiguration;
+  #maxConcurrentUploads;
+  /**
+   * @typedef {Object} objectManagerOptions Optional settings for the constructor.
+   * @property {string} [bucket] Default bucket to use.
+   * @property {objectDownloadOptions} [gateway] Default gateway to use.
+   * @property {number} [maxConcurrentUploads] The maximum number of concurrent uploads.
+   */
+  /**
+   * @typedef {Object} objectDownloadOptions Optional settings for downloading objects
+   * @property {string} endpoint Default gateway to use.
+   * @property {string} [token] Token for the default gateway.
+   * @property {number} [timeout=60000] Timeout for the default gateway
+   */
+  /**
+   * @summary Creates a new instance of the constructor.
+   * @param {string} clientKey - The access key ID for authentication.
+   * @param {string} clientSecret - The secret access key for authentication.
+   * @param {objectManagerOptions} options - Optional settings for the constructor.
+   * @tutorial quickstart-object
+   * @example
+   * import { ObjectManager } from "@filebase/sdk";
+   * const objectManager = new ObjectManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", {
+   *   bucket: "my-default-bucket",
+   *   maxConcurrentUploads: 4,
+   *   gateway: {
+   *     endpoint: "https://my-default-gateway.mydomain.com
+   *     token: SUPER_SECRET_GATEWAY_TOKEN
+   *   }
+   * });
+   */
+  constructor(clientKey, clientSecret, options) {
+    var _a, _b, _c;
+    const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, clientConfiguration = {
+      credentials: {
+        accessKeyId: clientKey,
+        secretAccessKey: clientSecret
+      },
+      endpoint: clientEndpoint,
+      region: this.#DEFAULT_REGION,
+      forcePathStyle: true
+    };
+    this.#defaultBucket = options == null ? void 0 : options.bucket;
+    this.#maxConcurrentUploads = (options == null ? void 0 : options.maxConcurrentUploads) || this.#DEFAULT_MAX_CONCURRENT_UPLOADS;
+    this.#credentials = {
+      key: clientKey,
+      secret: clientSecret
+    };
+    this.#client = new S3Client2(clientConfiguration);
+    this.#gatewayConfiguration = {
+      endpoint: (_a = options == null ? void 0 : options.gateway) == null ? void 0 : _a.endpoint,
+      token: (_b = options == null ? void 0 : options.gateway) == null ? void 0 : _b.token,
+      timeout: (_c = options == null ? void 0 : options.gateway) == null ? void 0 : _c.timeout
+    };
+  }
+  /**
+   * @typedef {Object} objectOptions
+   * @property {string} [bucket] - The bucket to pin the IPFS CID into.
+   */
+  /**
+   * @typedef {Object} objectHeadResult
+   * @property {string} cid The CID of the uploaded object
+   * @property {function} download Convenience function to download the object via S3 or the selected gateway
+   * @property {array} [entries] If a directory then returns an array of the containing objects
+   * @property {string} entries.cid The CID of the uploaded object
+   * @property {string} entries.path The path of the object
+   */
+  /**
+   * If the source parameter is an array of objects, it will pack multiple files into a CAR file for upload.
+   * The method returns a Promise that resolves to an object containing the CID (Content Identifier) of the uploaded file
+   * and an optional entries object when uploading a CAR file.
+   *
+   * @summary Uploads a file or a CAR file to the specified bucket.
+   * @param {string} key - The key or path of the file in the bucket.
+   * @param {Buffer|ReadableStream|Array} source - The content of the object to be uploaded.
+   *    If an array of files is provided, each file should have a 'path' property specifying the path of the file
+   *    and a 'content' property specifying the content of the file.  The SDK will then construct a CAR file locally
+   *    and use that as the content of the object to be uploaded.
+   * @param {Object} [metadata] Optional metadata for pin object
+   * @param {objectOptions} [options] - The options for uploading the object.
+   * @returns {Promise}
+   * @example
+   * // Upload Object
+   * await objectManager.upload("my-object", Buffer.from("Hello World!"));
+   * // Upload Object with Metadata
+   * await objectManager.upload("my-custom-object", Buffer.from("Hello Big World!"), {
+   *   "application": "my-filebase-app"
+   * });
+   * // Upload Directory
+   * await objectManager.upload("my-first-directory", [
+   *  {
+   *   path: "/testObjects/1.txt",
+   *   content: Buffer.from("upload test object", "utf-8"),
+   *  },
+   *  {
+   *   path: "/testObjects/deep/1.txt",
+   *   content: Buffer.from("upload deep test object", "utf-8"),
+   *  },
+   *  {
+   *   path: "/topLevel.txt",
+   *   content: Buffer.from("upload top level test object", "utf-8"),
+   *  },
+   * ]);
+   */
+  async upload(key, source, metadata, options) {
+    const uploadUUID = uuidv4();
+    const bucket = (options == null ? void 0 : options.bucket) || this.#defaultBucket, uploadOptions = {
+      client: this.#client,
+      params: {
+        Bucket: bucket,
+        Key: key,
+        Body: source,
+        Metadata: metadata || {}
+      },
+      queueSize: this.#maxConcurrentUploads,
+      partSize: 26843546
+      //25.6Mb || 250Gb Max File Size
+    };
+    let parsedEntries = {};
+    if (Array.isArray(source)) {
+      uploadOptions.params.Metadata = {
+        ...uploadOptions.params.Metadata,
+        import: "car"
+      };
+      let temporaryCarFilePath, temporaryBlockstoreDir;
+      try {
+        temporaryBlockstoreDir = path5.resolve(
+          os.tmpdir(),
+          "filebase-sdk",
+          "uploads",
+          uploadUUID
+        );
+        temporaryCarFilePath = `${temporaryBlockstoreDir}/main.car`;
+        await mkdir2(temporaryBlockstoreDir, { recursive: true });
+        const temporaryBlockstore = new FsBlockstore(temporaryBlockstoreDir);
+        const heliaFs = unixfs({
+          blockstore: temporaryBlockstore
+        });
+        for (let sourceEntry of source) {
+          sourceEntry.path = sourceEntry.path[0] === "/" ? `/${uploadUUID}${sourceEntry.path}` : `/${uploadUUID}/${sourceEntry.path}`;
+        }
+        for await (const entry of heliaFs.addAll(source)) {
+          parsedEntries[entry.path] = entry;
+        }
+        const rootEntry = parsedEntries[uploadUUID];
+        const carExporter = car({ blockstore: temporaryBlockstore }), { writer, out } = CarWriter2.create([rootEntry.cid]);
+        const output = createWriteStream(temporaryCarFilePath);
+        Readable.from(out).pipe(output);
+        await carExporter.export(rootEntry.cid, writer);
+        uploadOptions.params.Body = createReadStream(temporaryCarFilePath);
+        const parallelUploads3 = new Upload(uploadOptions);
+        await parallelUploads3.done();
+        await temporaryBlockstore.close();
+      } finally {
+        if (typeof temporaryBlockstoreDir !== "undefined") {
+          await rm2(temporaryBlockstoreDir, { recursive: true, force: true });
+        }
+      }
+    } else {
+      const parallelUploads3 = new Upload(uploadOptions);
+      await parallelUploads3.done();
+    }
+    const command = new HeadObjectCommand({
+      Bucket: bucket,
+      Key: key,
+      Body: source
+    }), headResult = await this.#client.send(command), responseCid = headResult.Metadata.cid;
+    if (Object.keys(parsedEntries).length === 0) {
+      return {
+        cid: responseCid,
+        download: () => {
+          return this.#routeDownload(responseCid, key, options);
+        }
+      };
+    }
+    return {
+      cid: responseCid,
+      download: () => {
+        return this.#routeDownload(responseCid, key, options);
+      },
+      entries: parsedEntries
+    };
+  }
+  async #routeDownload(cid, key, options) {
+    return typeof this.#gatewayConfiguration.endpoint !== "undefined" ? downloadFromGateway(cid, this.#gatewayConfiguration) : this.download(key, options);
+  }
+  /**
+   * @summary Gets an objects info and metadata using the S3 API.
+   * @param {string} key - The key of the object to be inspected.
+   * @param {objectOptions} [options] - The options for inspecting the object.
+   * @returns {Promise}
+   */
+  async get(key, options) {
+    const bucket = (options == null ? void 0 : options.bucket) || this.#defaultBucket;
+    try {
+      const command = new HeadObjectCommand({
+        Bucket: bucket,
+        Key: key
+      }), response = await this.#client.send(command);
+      response.download = () => {
+        return this.#routeDownload(response.Metadata.cid, key, options);
+      };
+      return response;
+    } catch (err) {
+      if (err.name === "NotFound") {
+        return false;
+      }
+      throw err;
+    }
+  }
+  /**
+   * @summary Downloads an object from the specified bucket using the provided key.
+   * @param {string} key - The key of the object to be downloaded.
+   * @param {objectOptions} [options] - The options for downloading the object..
+   * @returns {Promise} - A promise that resolves with the contents of the downloaded object as a Stream.
+   * @example
+   * // Download object with name of `download-object-example`
+   * await objectManager.download(`download-object-example`);
+   */
+  async download(key, options) {
+    if (typeof this.#gatewayConfiguration.endpoint === "string") {
+      const objectToFetch = await this.get(key, options);
+      return objectToFetch.download();
+    } else {
+      const command = new GetObjectCommand({
+        Bucket: (options == null ? void 0 : options.bucket) || this.#defaultBucket,
+        Key: key
+      }), response = await this.#client.send(command);
+      return response.Body;
+    }
+  }
+  /**
+   * @typedef {Object} listObjectsResult
+   * @property {boolean} IsTruncated Indicates if more results exist on the server
+   * @property {string} NextContinuationToken ContinuationToken used to paginate list requests
+   * @property {Array} Contents List of Keys stored in the S3 Bucket
+   * @property {string} Contents.Key Key of the Object
+   * @property {string} Contents.LastModified Date Last Modified of the Object
+   * @property {string} Contents.CID CID of the Object
+   * @property {string} Contents.ETag ETag of the Object
+   * @property {number} Contents.Size Size in Bytes of the Object
+   * @property {string} Contents.StorageClass Class of Storage of the Object
+   * @property {function} Contents.download Convenience function to download the item using the S3 gateway
+   */
+  /**
+   * @typedef {Object} listObjectOptions
+   * @property {string} [Bucket] The name of the bucket. If not provided, the default bucket will be used.
+   * @property {string} [ContinuationToken=null] Continues listing from this objects name.
+   * @property {string} [Delimiter=null] Character used to group keys
+   * @property {number} [MaxKeys=1000] The maximum number of objects to retrieve. Defaults to 1000.
+   */
+  /**
+   * Retrieves a list of objects from a specified bucket.
+   *
+   * @param {listObjectOptions} options - The options for listing objects.
+   * @returns {Promise} - A promise that resolves to an array of objects.
+   * @example
+   * // List objects in bucket with a limit of 1000
+   * await objectManager.list({
+   *   MaxKeys: 1000
+   * });
+   */
+  async list(options = {
+    Bucket: this.#defaultBucket,
+    ContinuationToken: null,
+    Delimiter: null,
+    MaxKeys: 1e3
+  }) {
+    if ((options == null ? void 0 : options.MaxKeys) && options.MaxKeys > 1e5) {
+      throw new Error(`MaxKeys Maximum value is 100000`);
+    }
+    const bucket = (options == null ? void 0 : options.Bucket) || this.#defaultBucket, limit = (options == null ? void 0 : options.MaxKeys) || 1e3, commandOptions = {
+      Bucket: bucket,
+      MaxKeys: limit
+    }, command = new ListObjectsV2Command({
+      ...options,
+      ...commandOptions
+    });
+    const { Contents, IsTruncated, NextContinuationToken } = await this.#client.send(command);
+    return { Contents, IsTruncated, NextContinuationToken };
+  }
+  /**
+   * @summary Deletes an object from the specified bucket using the provided key.
+   * @param {string} key - The key of the object to be deleted.
+   * @param {objectOptions} [options] - The options for deleting the file.
+   * @returns {Promise} - A Promise that resolves with the result of the delete operation.
+   * @example
+   * // Delete object with name of `delete-object-example`
+   * await objectManager.delete(`delete-object-example`);
+   */
+  async delete(key, options) {
+    const command = new DeleteObjectCommand({
+      Bucket: (options == null ? void 0 : options.bucket) || this.#defaultBucket,
+      Key: key
+    });
+    await this.#client.send(command);
+    return true;
+  }
+  /**
+   * @typedef {Object} copyObjectOptions
+   * @property {string} [sourceBucket] The source bucket from where the object is to be copied.
+   * @property {string} [destinationKey] The key of the object in the destination bucket. By default, it is the same as the sourceKey.
+   */
+  /**
+   * If the destinationKey is not provided, the object will be copied with the same key as the sourceKey.
+   *
+   * @summary Copy the object from sourceKey in the sourceBucket to destinationKey in the destinationBucket.
+   * @param {string} sourceKey - The key of the object to be copied from the sourceBucket.
+   * @param {string} destinationBucket - The bucket where the object will be copied to.
+   * @param {copyObjectOptions} [options] - Additional options for the copy operation.
+   *
+   * @returns {Promise} - A Promise that resolves with the result of the copy operation.
+   * @example
+   * // Copy object `copy-object-test` from `copy-object-test-pass-src` to `copy-object-test-pass-dest`
+   * // TIP: Set bucket on constructor and it will be used as the default source for copying objects.
+   * await objectManager.copy(`copy-object-test`, `copy-object-dest`, {
+   *   sourceBucket: `copy-object-src`
+   * });
+   */
+  async copy(sourceKey, destinationBucket, options = {
+    sourceBucket: this.#defaultBucket,
+    destinationKey: void 0
+  }) {
+    const copySource = `${(options == null ? void 0 : options.sourceBucket) || this.#defaultBucket}/${sourceKey}`, command = new CopyObjectCommand({
+      CopySource: copySource,
+      Bucket: destinationBucket,
+      Key: (options == null ? void 0 : options.destinationKey) || sourceKey
+    });
+    await this.#client.send(command);
+    return true;
+  }
+};
+var objectManager_default = ObjectManager;
+
+// src/pinManager.js
+import axios4 from "axios";
+var PinManager = class {
+  #DEFAULT_ENDPOINT = "https://api.filebase.io";
+  #DEFAULT_TIMEOUT = 6e4;
+  #client;
+  #credentials;
+  #gatewayConfiguration;
+  #defaultBucket;
+  /**
+   * @typedef {Object} pinManagerOptions Optional settings for the constructor.
+   * @property {string} [bucket] Default bucket to use.
+   * @property {pinDownloadOptions} [gateway] Default gateway to use.
+   */
+  /**
+   * @typedef {Object} pinDownloadOptions Optional settings for downloading pins
+   * @property {string} endpoint Default gateway to use.
+   * @property {string} [token] Token for the default gateway.
+   * @property {number} [timeout=60000] Timeout for the default gateway
+   */
+  /**
+   * @summary Creates a new instance of the constructor.
+   * @param {string} clientKey - The access key ID for authentication.
+   * @param {string} clientSecret - The secret access key for authentication.
+   * @param {pinManagerOptions} [options] - Optional settings for the constructor.
+   * @tutorial quickstart-pin
+   * @example
+   * import { PinManager } from "@filebase/sdk";
+   * const pinManager = new PinManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", {
+   *   bucket: "my-default-bucket",
+   *   gateway: {
+   *     endpoint: "https://my-default-gateway.mydomain.com
+   *     token: SUPER_SECRET_GATEWAY_TOKEN
+   *   }
+   * });
+   */
+  constructor(clientKey, clientSecret, options) {
+    var _a, _b, _c;
+    this.#defaultBucket = options == null ? void 0 : options.bucket;
+    const PSAClientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, baseURL = `${PSAClientEndpoint}/v1/ipfs/pins`;
+    this.#credentials = {
+      key: clientKey,
+      secret: clientSecret
+    };
+    this.#client = axios4.create({
+      baseURL,
+      timeout: this.#DEFAULT_TIMEOUT
+    });
+    this.#gatewayConfiguration = {
+      endpoint: (_a = options == null ? void 0 : options.gateway) == null ? void 0 : _a.endpoint,
+      token: (_b = options == null ? void 0 : options.gateway) == null ? void 0 : _b.token,
+      timeout: ((_c = options == null ? void 0 : options.gateway) == null ? void 0 : _c.timeout) || this.#DEFAULT_TIMEOUT
+    };
+  }
+  /**
+   * @typedef {Object} pinStatus
+   * @property {string} requestid Globally unique identifier of the pin request; can be used to check the status of ongoing pinning, or pin removal
+   * @property {string} status Status a pin object can have at a pinning service. ("queued","pinning","pinned","failed")
+   * @property {string} created Immutable timestamp indicating when a pin request entered a pinning service; can be used for filtering results and pagination
+   * @property {Object} pin Pin object
+   * @property {string} pin.cid Content Identifier (CID) pinned recursively
+   * @property {string} pin.name Name for pinned data; can be used for lookups later
+   * @property {Array} pin.origins Optional list of multiaddrs known to provide the data
+   * @property {Object} pin.meta Optional metadata for pin object
+   * @property {Array} delegates List of multiaddrs designated by pinning service that will receive the pin data
+   * @property {object} [info] Optional info for PinStatus response
+   * @property {function} download Convenience function to download pin
+   */
+  /**
+   * @typedef {Object} pinOptions
+   * @property {string} [bucket] - The bucket to pin the IPFS CID into.
+   */
+  /**
+   * @typedef {Object} listPinOptions
+   * @property {Array} [cid] Return pin objects responsible for pinning the specified CID(s); be aware that using longer hash functions introduces further constraints on the number of CIDs that will fit under the limit of 2000 characters per URL in browser contexts
+   * @property {string} [name] Return pin objects with specified name (by default a case-sensitive, exact match)
+   * @property {string} [match] Customize the text matching strategy applied when the name filter is present; exact (the default) is a case-sensitive exact match, partial matches anywhere in the name, iexact and ipartial are case-insensitive versions of the exact and partial strategies
+   * @property {Array} [status] Return pin objects for pins with the specified status (when missing, service defaults to pinned only)
+   * @property {string} [before] Return results created (queued) before provided timestamp
+   * @property {string} [after] Return results created (queued) after provided timestamp
+   * @property {number} [limit] Max records to return
+   * @property {Object} [meta] Return pin objects that match specified metadata keys passed as a string representation of a JSON object; when implementing a client library, make sure the parameter is URL-encoded to ensure safe transport
+   */
+  /**
+   * @typedef {Object} listPinResults
+   * @property {number} count Total number of pin objects that exist for passed query filters
+   * @property {Array} Array of PinStatus results
+   */
+  /**
+   * @summary List the pins in a given bucket
+   * @param {listPinOptions} [listOptions]
+   * @param {pinOptions} [options]
+   * @returns {Promise}
+   * @example
+   * // List pins in bucket with a limit of 1000
+   * await pinManager.list({
+   *   limit: 1000
+   * });
+   */
+  async list(listOptions, options) {
+    try {
+      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), getResponse = await this.#client.request({
+        method: "GET",
+        params: listOptions,
+        headers: { Authorization: `Bearer ${encodedToken}` }
+      });
+      for (let pinStatus of getResponse.data.results) {
+        pinStatus.download = () => {
+          return this.download(pinStatus.pin.cid);
+        };
+      }
+      return getResponse.data;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Create a pin in the selected bucket
+   * @param {string} key Key or path of the file in the bucket
+   * @param {string} cid Content Identifier (CID) to be pinned recursively
+   * @param {Object} [metadata] Optional metadata for pin object
+   * @param {pinOptions} [options] Options for pinning the object
+   * @returns {Promise}
+   * @example
+   * // Create Pin with Metadata
+   * await pinManager.create("my-pin", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", {
+   *   "application": "my-custom-app-on-filebase"
+   * });
+   */
+  async create(key, cid, metadata, options) {
+    try {
+      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), pinStatus = await this.#client.request({
+        method: "POST",
+        data: {
+          cid,
+          name: key,
+          meta: metadata
+        },
+        headers: { Authorization: `Bearer ${encodedToken}` }
+      });
+      pinStatus.data.download = () => {
+        return this.download(pinStatus.data.pin.cid);
+      };
+      return pinStatus.data;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @typedef {Object} replacePinOptions
+   * @augments pinOptions
+   * @property {Object} [metadata] Optional metadata to set on pin during replacement
+   * @property {string} [name] Optional name for pin to set during replacement
+   */
+  /**
+   * @summary Replace a pinned object in the selected bucket
+   * @param {string} requestid Unique ID for the pinned object
+   * @param {string} cid Content Identifier (CID) to be pinned recursively
+   * @param {replacePinOptions} [options] Options for pinning the object
+   * @returns {Promise}
+   * @example
+   * // Replace Pin with Metadata
+   * await pinManager.create("qr4231213", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", {
+   *   "revision": Date.now()
+   * }
+   */
+  async replace(requestid, cid, options) {
+    try {
+      let replaceData = {
+        cid,
+        meta: (options == null ? void 0 : options.metadata) || {}
+      };
+      if (options == null ? void 0 : options.name) {
+        replaceData.name = options.name;
+      }
+      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), pinStatusResult = await this.#client.request({
+        method: "POST",
+        url: `/${requestid}`,
+        data: replaceData,
+        validateStatus: (status) => {
+          return status === 200;
+        },
+        headers: { Authorization: `Bearer ${encodedToken}` }
+      });
+      const pinStatus = pinStatusResult.data;
+      pinStatus.download = () => {
+        return this.download(pinStatus.pin.cid);
+      };
+      return pinStatus;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Download a pin from the selected IPFS gateway
+   * @param {string} cid
+   * @param {pinDownloadOptions} [options]
+   * @returns {Promise}
+   * @example
+   * // Download Pin by CID
+   * await pinManager.download("QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF");
+   */
+  async download(cid, options) {
+    const downloadOptions = Object.assign(this.#gatewayConfiguration, options);
+    return downloadFromGateway(cid, downloadOptions);
+  }
+  /**
+   * @summary Get details about a pinned object
+   * @param {string} requestid Globally unique identifier of the pin request
+   * @param {pinOptions} [options] Options for getting the pin
+   * @returns {Promise}
+   * @example
+   * // Get Pin Info by RequestId
+   * await pinManager.get("qr4231214");
+   */
+  async get(requestid, options) {
+    try {
+      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), getResponseResult = await this.#client.request({
+        method: "GET",
+        url: `/${requestid}`,
+        headers: { Authorization: `Bearer ${encodedToken}` },
+        validateStatus: (status) => {
+          return status === 200 || status === 404;
+        }
+      });
+      if (getResponseResult.status === 404) {
+        return false;
+      }
+      const pinStatus = getResponseResult.data;
+      pinStatus.download = () => {
+        return this.download(pinStatus.pin.cid);
+      };
+      return pinStatus;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  /**
+   * @summary Delete a pinned object from the selected bucket
+   * @param requestid Globally unique identifier of the pin request
+   * @param {pinOptions} [options] Options for deleting the pin
+   * @returns {Promise}
+   * @example
+   * // Delete Pin by RequestId
+   * await pinManager.delete("qr4231213");
+   */
+  async delete(requestid, options) {
+    try {
+      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket);
+      await this.#client.request({
+        method: "DELETE",
+        url: `/${requestid}`,
+        headers: { Authorization: `Bearer ${encodedToken}` },
+        validateStatus: (status) => {
+          return status === 202;
+        }
+      });
+      return true;
+    } catch (err) {
+      apiErrorHandler(err);
+    }
+  }
+  #getEncodedToken(bucket) {
+    bucket = bucket || this.#defaultBucket;
+    return Buffer.from(
+      `${this.#credentials.key}:${this.#credentials.secret}:${bucket}`
+    ).toString("base64");
+  }
+};
+var pinManager_default = PinManager;
+export {
+  bucketManager_default as BucketManager,
+  gatewayManager_default as GatewayManager,
+  nameManager_default as NameManager,
+  objectManager_default as ObjectManager,
+  pinManager_default as PinManager
+};
diff --git a/package.json b/package.json
index 0cae379..41e297c 100644
--- a/package.json
+++ b/package.json
@@ -7,22 +7,22 @@
     "url": "git+https://github.com/filebase/filebase-sdk.git"
   },
   "license": "MIT",
-  "type": "module",
+  "main": "./dist/index.js",
+  "module": "./dist/index.mjs",
+  "types": "./dist/index.d.ts",
   "exports": {
     ".": {
+      "require": "./dist/index.js",
       "import": "./dist/index.mjs",
-      "require": "./dist/index.cjs"
+      "types": "./dist/index.d.ts"
     }
   },
-  "files": [
-    "dist"
-  ],
   "engines": {
     "node": ">=16.0.0",
     "npm": ">=8.0.0"
   },
   "scripts": {
-    "build": "unbuild",
+    "build": "tsup src/index.js --format cjs,esm --dts --clean",
     "test": "node --test",
     "doc": "jsdoc -c jsdoc.json"
   },
@@ -40,8 +40,8 @@
     "clean-jsdoc-theme": "4.2.17",
     "jsdoc": "4.0.2",
     "prettier": "3.1.0",
-    "typescript": "5.3.3",
-    "unbuild": "2.0.0"
+    "tsup": "^8.0.1",
+    "typescript": "5.3.3"
   },
   "dependencies": {
     "@aws-sdk/client-s3": "3.478.0",
diff --git a/tsup.config.js b/tsup.config.js
new file mode 100644
index 0000000..2aa3a3b
--- /dev/null
+++ b/tsup.config.js
@@ -0,0 +1,11 @@
+import { defineConfig } from 'tsup'
+
+export default defineConfig({
+  entry: ['src/index.js'],
+  splitting: false,
+  sourcemap: false,
+  noExternal: ['@ipld/car', '@helia/car', '@helia/unixfs', 'blockstore-fs'],
+  dts: true,
+  format: ['cjs', 'esm'],
+  clean: true,
+})
diff --git a/yarn.lock b/yarn.lock
index 074e4c1..771fb84 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -2,22 +2,14 @@
 # yarn lockfile v1
 
 
-"@ampproject/remapping@^2.2.0":
-  version "2.2.1"
-  resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630"
-  integrity sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==
-  dependencies:
-    "@jridgewell/gen-mapping" "^0.3.0"
-    "@jridgewell/trace-mapping" "^0.3.9"
-
 "@assemblyscript/loader@^0.9.4":
   version "0.9.4"
-  resolved "https://registry.yarnpkg.com/@assemblyscript/loader/-/loader-0.9.4.tgz#a483c54c1253656bb33babd464e3154a173e1577"
+  resolved "https://registry.npmjs.org/@assemblyscript/loader/-/loader-0.9.4.tgz"
   integrity sha512-HazVq9zwTVwGmqdwYzu7WyQ6FQVZ7SwET0KKQuKm55jD0IfUpZgN0OPIiZG3zV1iSrVYcN0bdwLRXI/VNCYsUA==
 
 "@aws-crypto/crc32@3.0.0":
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/@aws-crypto/crc32/-/crc32-3.0.0.tgz#07300eca214409c33e3ff769cd5697b57fdd38fa"
+  resolved "https://registry.npmjs.org/@aws-crypto/crc32/-/crc32-3.0.0.tgz"
   integrity sha512-IzSgsrxUcsrejQbPVilIKy16kAT52EwB6zSaI+M3xxIhKh5+aldEyvI+z6erM7TCLB2BJsFrtHjp6/4/sr+3dA==
   dependencies:
     "@aws-crypto/util" "^3.0.0"
@@ -26,7 +18,7 @@
 
 "@aws-crypto/crc32c@3.0.0":
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/@aws-crypto/crc32c/-/crc32c-3.0.0.tgz#016c92da559ef638a84a245eecb75c3e97cb664f"
+  resolved "https://registry.npmjs.org/@aws-crypto/crc32c/-/crc32c-3.0.0.tgz"
   integrity sha512-ENNPPManmnVJ4BTXlOjAgD7URidbAznURqD0KvfREyc4o20DPYdEldU1f5cQ7Jbj0CJJSPaMIk/9ZshdB3210w==
   dependencies:
     "@aws-crypto/util" "^3.0.0"
@@ -35,14 +27,14 @@
 
 "@aws-crypto/ie11-detection@^3.0.0":
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/@aws-crypto/ie11-detection/-/ie11-detection-3.0.0.tgz#640ae66b4ec3395cee6a8e94ebcd9f80c24cd688"
+  resolved "https://registry.npmjs.org/@aws-crypto/ie11-detection/-/ie11-detection-3.0.0.tgz"
   integrity sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q==
   dependencies:
     tslib "^1.11.1"
 
 "@aws-crypto/sha1-browser@3.0.0":
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/@aws-crypto/sha1-browser/-/sha1-browser-3.0.0.tgz#f9083c00782b24714f528b1a1fef2174002266a3"
+  resolved "https://registry.npmjs.org/@aws-crypto/sha1-browser/-/sha1-browser-3.0.0.tgz"
   integrity sha512-NJth5c997GLHs6nOYTzFKTbYdMNA6/1XlKVgnZoaZcQ7z7UJlOgj2JdbHE8tiYLS3fzXNCguct77SPGat2raSw==
   dependencies:
     "@aws-crypto/ie11-detection" "^3.0.0"
@@ -55,7 +47,7 @@
 
 "@aws-crypto/sha256-browser@3.0.0":
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-browser/-/sha256-browser-3.0.0.tgz#05f160138ab893f1c6ba5be57cfd108f05827766"
+  resolved "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-3.0.0.tgz"
   integrity sha512-8VLmW2B+gjFbU5uMeqtQM6Nj0/F1bro80xQXCW6CQBWgosFWXTx77aeOF5CAIAmbOK64SdMBJdNr6J41yP5mvQ==
   dependencies:
     "@aws-crypto/ie11-detection" "^3.0.0"
@@ -67,9 +59,9 @@
     "@aws-sdk/util-utf8-browser" "^3.0.0"
     tslib "^1.11.1"
 
-"@aws-crypto/sha256-js@3.0.0", "@aws-crypto/sha256-js@^3.0.0":
+"@aws-crypto/sha256-js@^3.0.0", "@aws-crypto/sha256-js@3.0.0":
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-3.0.0.tgz#f06b84d550d25521e60d2a0e2a90139341e007c2"
+  resolved "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-3.0.0.tgz"
   integrity sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ==
   dependencies:
     "@aws-crypto/util" "^3.0.0"
@@ -78,23 +70,23 @@
 
 "@aws-crypto/supports-web-crypto@^3.0.0":
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/@aws-crypto/supports-web-crypto/-/supports-web-crypto-3.0.0.tgz#5d1bf825afa8072af2717c3e455f35cda0103ec2"
+  resolved "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-3.0.0.tgz"
   integrity sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg==
   dependencies:
     tslib "^1.11.1"
 
 "@aws-crypto/util@^3.0.0":
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/@aws-crypto/util/-/util-3.0.0.tgz#1c7ca90c29293f0883468ad48117937f0fe5bfb0"
+  resolved "https://registry.npmjs.org/@aws-crypto/util/-/util-3.0.0.tgz"
   integrity sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w==
   dependencies:
     "@aws-sdk/types" "^3.222.0"
     "@aws-sdk/util-utf8-browser" "^3.0.0"
     tslib "^1.11.1"
 
-"@aws-sdk/client-s3@3.478.0":
+"@aws-sdk/client-s3@^3.0.0", "@aws-sdk/client-s3@3.478.0":
   version "3.478.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/client-s3/-/client-s3-3.478.0.tgz#072a3dbdc94d3d20db0f2e4cb641567949c46e54"
+  resolved "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.478.0.tgz"
   integrity sha512-OUpbCCnK71lQQ07BohJOx9ZER0rPqRAGOVIIVhNEkeN0uYFLzB7/o5a7+FEPUQXEd5rZRZgbxN5xEmnNW/0Waw==
   dependencies:
     "@aws-crypto/sha1-browser" "3.0.0"
@@ -158,7 +150,7 @@
 
 "@aws-sdk/client-sso@3.478.0":
   version "3.478.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso/-/client-sso-3.478.0.tgz#6e9bf99898b4601c792c5b66e4871c6b69f20652"
+  resolved "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.478.0.tgz"
   integrity sha512-Jxy9cE1JMkPR0PklCpq3cORHnZq/Z4klhSTNGgZNeBWovMa+plor52kyh8iUNHKl3XEJvTbHM7V+dvrr/x0P1g==
   dependencies:
     "@aws-crypto/sha256-browser" "3.0.0"
@@ -201,7 +193,7 @@
 
 "@aws-sdk/client-sts@3.478.0":
   version "3.478.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/client-sts/-/client-sts-3.478.0.tgz#8d7425803a3d7b8af3eba37f56e14a0259eb0557"
+  resolved "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.478.0.tgz"
   integrity sha512-D+QID0dYzmn9dcxgKP3/nMndUqiQbDLsqI0Zf2pG4MW5gPhVNKlDGIV3Ztz8SkMjzGJExNOLW2L569o8jshJVw==
   dependencies:
     "@aws-crypto/sha256-browser" "3.0.0"
@@ -247,7 +239,7 @@
 
 "@aws-sdk/core@3.477.0":
   version "3.477.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/core/-/core-3.477.0.tgz#2fa8ae4e19b8082b019a56efbce859c93df4e205"
+  resolved "https://registry.npmjs.org/@aws-sdk/core/-/core-3.477.0.tgz"
   integrity sha512-o0434EH+d1BxHZvgG7z8vph2SYefciQ5RnJw2MgvETGnthgqsnI4nnNJLSw0FVeqCeS18n6vRtzqlGYR2YPCNg==
   dependencies:
     "@smithy/core" "^1.2.0"
@@ -259,7 +251,7 @@
 
 "@aws-sdk/credential-provider-env@3.468.0":
   version "3.468.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-env/-/credential-provider-env-3.468.0.tgz#4196d717d3f5485af863bd1fd84374ea3dcd6210"
+  resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.468.0.tgz"
   integrity sha512-k/1WHd3KZn0EQYjadooj53FC0z24/e4dUZhbSKTULgmxyO62pwh9v3Brvw4WRa/8o2wTffU/jo54tf4vGuP/ZA==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -269,7 +261,7 @@
 
 "@aws-sdk/credential-provider-ini@3.478.0":
   version "3.478.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.478.0.tgz#452195c28b7138e26b5220ce9dfcc8cf9e9cca7d"
+  resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.478.0.tgz"
   integrity sha512-SsrYEYUvTG9ZoPC+zB19AnVoOKID+QIEHJDIi1GCZXW5kTVyr1saTVm4orG2TjYvbHQMddsWtHOvGYXZWAYMbw==
   dependencies:
     "@aws-sdk/credential-provider-env" "3.468.0"
@@ -285,7 +277,7 @@
 
 "@aws-sdk/credential-provider-node@3.478.0":
   version "3.478.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-node/-/credential-provider-node-3.478.0.tgz#43e8c7bfeb32c561c642bb2f4bffcbe0f483bc29"
+  resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.478.0.tgz"
   integrity sha512-nwDutJYeHiIZCQDgKIUrsgwAWTil0mNe+cbd+j8fi+wwxkWUzip+F0+z02molJ8WrUUKNRhqB1V5aVx7IranuA==
   dependencies:
     "@aws-sdk/credential-provider-env" "3.468.0"
@@ -302,7 +294,7 @@
 
 "@aws-sdk/credential-provider-process@3.468.0":
   version "3.468.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-process/-/credential-provider-process-3.468.0.tgz#770ed72db036c5d011445e5abf4a4bcc4424c486"
+  resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.468.0.tgz"
   integrity sha512-OYSn1A/UsyPJ7Z8Q2cNhTf55O36shPmSsvOfND04nSfu1nPaR+VUvvsP7v+brhGpwC/GAKTIdGAo4blH31BS6A==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -313,7 +305,7 @@
 
 "@aws-sdk/credential-provider-sso@3.478.0":
   version "3.478.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.478.0.tgz#90e6aa0db7e4d20a42fc0d842cfae8f2f10b9483"
+  resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.478.0.tgz"
   integrity sha512-LsDShG51X/q+s5ZFN7kHVqrd8ZHdyEyHqdhoocmRvvw2Dif50M0AqQfvCrW1ndj5CNzXO4x/eH8EK5ZOVlS6Sg==
   dependencies:
     "@aws-sdk/client-sso" "3.478.0"
@@ -326,7 +318,7 @@
 
 "@aws-sdk/credential-provider-web-identity@3.468.0":
   version "3.468.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.468.0.tgz#5befcb593d99a84e16af9e9f285f0d59ed42771f"
+  resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.468.0.tgz"
   integrity sha512-rexymPmXjtkwCPfhnUq3EjO1rSkf39R4Jz9CqiM7OsqK2qlT5Y/V3gnMKn0ZMXsYaQOMfM3cT5xly5R+OKDHlw==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -336,7 +328,7 @@
 
 "@aws-sdk/lib-storage@3.478.0":
   version "3.478.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/lib-storage/-/lib-storage-3.478.0.tgz#68a8e2b61e900ef0289fa0b14d1ea6ee8b9e6f29"
+  resolved "https://registry.npmjs.org/@aws-sdk/lib-storage/-/lib-storage-3.478.0.tgz"
   integrity sha512-R1D+hX8Lea9mBZqj2A4hWkSNGCTv5pCsX/+fmfnFtmKL4gPn2MZ1GtnI6Rnk0zyTrQL94SpVc6CcEvhWsm5oiQ==
   dependencies:
     "@smithy/abort-controller" "^2.0.1"
@@ -349,7 +341,7 @@
 
 "@aws-sdk/middleware-bucket-endpoint@3.470.0":
   version "3.470.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.470.0.tgz#76a6dde27e791ec8fad798dd5d53789b876498c3"
+  resolved "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.470.0.tgz"
   integrity sha512-vLXXNWtsRmEIwzJ9HUQfIuTNAsEzvCv0Icsnkvt2BiBZXnmHdp2vIC3e3+kfy1D7dVQloXqMmnfcLu/BUMu2Jw==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -362,7 +354,7 @@
 
 "@aws-sdk/middleware-expect-continue@3.468.0":
   version "3.468.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.468.0.tgz#664f7f1238e7bfb633cd44753f8cfb1a62ac624a"
+  resolved "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.468.0.tgz"
   integrity sha512-/wmLjmfgeulxhhmnxX3X3N933TvGsYckVIFjAtDSpLjqkbwzEcNiLq7AdmNJ4BfxG0MCMgcht561DCCD19x8Bg==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -372,7 +364,7 @@
 
 "@aws-sdk/middleware-flexible-checksums@3.468.0":
   version "3.468.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.468.0.tgz#96e26042e61724a4981edb3ba3fd2af280df57b6"
+  resolved "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.468.0.tgz"
   integrity sha512-LQwL/N5MCj3Y5keLLewHTqeAXUIMsHFZyxDXRm/uxrOon9ufLKDvGvzAmfwn1/CuSUo66ZfT8VPSA4BsC90RtA==
   dependencies:
     "@aws-crypto/crc32" "3.0.0"
@@ -386,7 +378,7 @@
 
 "@aws-sdk/middleware-host-header@3.468.0":
   version "3.468.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-host-header/-/middleware-host-header-3.468.0.tgz#6da7b19032e9afccea54fbf8aa10cccd2f817bcf"
+  resolved "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.468.0.tgz"
   integrity sha512-gwQ+/QhX+lhof304r6zbZ/V5l5cjhGRxLL3CjH1uJPMcOAbw9wUlMdl+ibr8UwBZ5elfKFGiB1cdW/0uMchw0w==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -396,7 +388,7 @@
 
 "@aws-sdk/middleware-location-constraint@3.468.0":
   version "3.468.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.468.0.tgz#cc9ebcdabed96414fc91f4a39b3b7c08e6374187"
+  resolved "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.468.0.tgz"
   integrity sha512-0gBX/lDynQr4YIhM9h1dVnkVWqrg+34iOCVIUq8jHxzUzgZWglGkG9lHGGg0r1xkLTmegeoo1OKH8wrQ6n33Cg==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -405,7 +397,7 @@
 
 "@aws-sdk/middleware-logger@3.468.0":
   version "3.468.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-logger/-/middleware-logger-3.468.0.tgz#a1883fb7ad8e156444d30689de4ab897357ef1d8"
+  resolved "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.468.0.tgz"
   integrity sha512-X5XHKV7DHRXI3f29SAhJPe/OxWRFgDWDMMCALfzhmJfCi6Jfh0M14cJKoC+nl+dk9lB+36+jKjhjETZaL2bPlA==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -414,7 +406,7 @@
 
 "@aws-sdk/middleware-recursion-detection@3.468.0":
   version "3.468.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.468.0.tgz#85b05636a5c2638bf9e15c8b6be17654757e1bf4"
+  resolved "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.468.0.tgz"
   integrity sha512-vch9IQib2Ng9ucSyRW2eKNQXHUPb5jUPCLA5otTW/8nGjcOU37LxQG4WrxO7uaJ9Oe8hjHO+hViE3P0KISUhtA==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -424,7 +416,7 @@
 
 "@aws-sdk/middleware-sdk-s3@3.474.0":
   version "3.474.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.474.0.tgz#7af224ba8c85f0190a153f4c4e2b48e549f1f34e"
+  resolved "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.474.0.tgz"
   integrity sha512-62aAo/8u5daIabeJ+gseYeHeShe9eYH6mH+kfWmLsHybXCCv1EaD/ZkdXWNhL0HZ3bUI1z1SF1p8jjTAWALnwA==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -439,7 +431,7 @@
 
 "@aws-sdk/middleware-signing@3.468.0":
   version "3.468.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-signing/-/middleware-signing-3.468.0.tgz#d1b5a92c395f55063cfa72ee95e4921b16f4c515"
+  resolved "https://registry.npmjs.org/@aws-sdk/middleware-signing/-/middleware-signing-3.468.0.tgz"
   integrity sha512-s+7fSB1gdnnTj5O0aCCarX3z5Vppop8kazbNSZADdkfHIDWCN80IH4ZNjY3OWqaAz0HmR4LNNrovdR304ojb4Q==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -452,7 +444,7 @@
 
 "@aws-sdk/middleware-ssec@3.468.0":
   version "3.468.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-ssec/-/middleware-ssec-3.468.0.tgz#8fe4ccfd6f0689b77b230ce17e44438d1ce1b419"
+  resolved "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.468.0.tgz"
   integrity sha512-y1qLW24wRkOGBTK5d6eJXf6d8HYo4rzT4a1mNDN1rd18NSffwQ6Yke5qeUiIaxa0y/l+FvvNYErbhYtij2rJoQ==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -461,7 +453,7 @@
 
 "@aws-sdk/middleware-user-agent@3.478.0":
   version "3.478.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.478.0.tgz#5b98d02ccaa8db880fb59096e5389960c4262fbe"
+  resolved "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.478.0.tgz"
   integrity sha512-Rec+nAPIzzwxgHPW+xqY6tooJGFOytpYg/xSRv8/IXl3xKGhmpMGs6gDWzmMBv/qy5nKTvLph/csNWJ98GWXCw==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -472,7 +464,7 @@
 
 "@aws-sdk/region-config-resolver@3.470.0":
   version "3.470.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/region-config-resolver/-/region-config-resolver-3.470.0.tgz#74e5c5f7a5633ad8c482503bf940a9330bd1cd09"
+  resolved "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.470.0.tgz"
   integrity sha512-C1o1J06iIw8cyAAOvHqT4Bbqf+PgQ/RDlSyjt2gFfP2OovDpc2o2S90dE8f8iZdSGpg70N5MikT1DBhW9NbhtQ==
   dependencies:
     "@smithy/node-config-provider" "^2.1.8"
@@ -483,7 +475,7 @@
 
 "@aws-sdk/signature-v4-multi-region@3.474.0":
   version "3.474.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.474.0.tgz#192f10924899c2ccf181932b4b5f59d6b01d79d3"
+  resolved "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.474.0.tgz"
   integrity sha512-93OWRQgTJZASXLrlUNX7mmXknNkYxFYldRLARmYQccONmnIqgYQW0lQj8BFwqkHJTzSMik3/UsU0SHKwZ9ynYA==
   dependencies:
     "@aws-sdk/middleware-sdk-s3" "3.474.0"
@@ -495,7 +487,7 @@
 
 "@aws-sdk/token-providers@3.478.0":
   version "3.478.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/token-providers/-/token-providers-3.478.0.tgz#145a35a82940190ff7a3db2fbc6b374258d0739f"
+  resolved "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.478.0.tgz"
   integrity sha512-7b5tj1y/wGHZIZ+ckjOUKgKrMuCJMF/G1UKZKIqqdekeEsjcThbvoxAMeY0FEowu2ODVk/ggOmpBFxcu0iYd6A==
   dependencies:
     "@aws-crypto/sha256-browser" "3.0.0"
@@ -536,9 +528,9 @@
     "@smithy/util-utf8" "^2.0.2"
     tslib "^2.5.0"
 
-"@aws-sdk/types@3.468.0", "@aws-sdk/types@^3.222.0":
+"@aws-sdk/types@^3.222.0", "@aws-sdk/types@3.468.0":
   version "3.468.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.468.0.tgz#f97b34fc92a800d1d8b866f47693ae8f3d46517b"
+  resolved "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz"
   integrity sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -546,14 +538,14 @@
 
 "@aws-sdk/util-arn-parser@3.465.0":
   version "3.465.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/util-arn-parser/-/util-arn-parser-3.465.0.tgz#2896f6b06f69770378586853c97a0f283cbb2e20"
+  resolved "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.465.0.tgz"
   integrity sha512-zOJ82vzDJFqBX9yZBlNeHHrul/kpx/DCoxzW5UBbZeb26kfV53QhMSoEmY8/lEbBqlqargJ/sgRC845GFhHNQw==
   dependencies:
     tslib "^2.5.0"
 
 "@aws-sdk/util-endpoints@3.478.0":
   version "3.478.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/util-endpoints/-/util-endpoints-3.478.0.tgz#d39c5667f22a7cbd2af93cdd6d883d25ef84e374"
+  resolved "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.478.0.tgz"
   integrity sha512-u9Mcg3euGJGs5clPt9mBuhBjHiEKiD0PnfvArhfq9i+dcY5mbCq/i1Dezp3iv1fZH9xxQt7hPXDfSpt1yUSM6g==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -562,14 +554,14 @@
 
 "@aws-sdk/util-locate-window@^3.0.0":
   version "3.465.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/util-locate-window/-/util-locate-window-3.465.0.tgz#0471428fb5eb749d4b72c427f5726f7b61fb90eb"
+  resolved "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.465.0.tgz"
   integrity sha512-f+QNcWGswredzC1ExNAB/QzODlxwaTdXkNT5cvke2RLX8SFU5pYk6h4uCtWC0vWPELzOfMfloBrJefBzlarhsw==
   dependencies:
     tslib "^2.5.0"
 
 "@aws-sdk/util-user-agent-browser@3.468.0":
   version "3.468.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.468.0.tgz#095caecb3fd75104ee38ae81ed78821de0f58e28"
+  resolved "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.468.0.tgz"
   integrity sha512-OJyhWWsDEizR3L+dCgMXSUmaCywkiZ7HSbnQytbeKGwokIhD69HTiJcibF/sgcM5gk4k3Mq3puUhGnEZ46GIig==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -579,7 +571,7 @@
 
 "@aws-sdk/util-user-agent-node@3.470.0":
   version "3.470.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.470.0.tgz#b78605f336859d6c3b5f573cff931ce41f83a27d"
+  resolved "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.470.0.tgz"
   integrity sha512-QxsZ9iVHcBB/XRdYvwfM5AMvNp58HfqkIrH88mY0cmxuvtlIGDfWjczdDrZMJk9y0vIq+cuoCHsGXHu7PyiEAQ==
   dependencies:
     "@aws-sdk/types" "3.468.0"
@@ -589,333 +581,44 @@
 
 "@aws-sdk/util-utf8-browser@^3.0.0":
   version "3.259.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz#3275a6f5eb334f96ca76635b961d3c50259fd9ff"
+  resolved "https://registry.npmjs.org/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz"
   integrity sha512-UvFa/vR+e19XookZF8RzFZBrw2EUkQWxiBW0yYQAhvk3C+QVGl0H3ouca8LDBlBfQKXwmW3huo/59H8rwb1wJw==
   dependencies:
     tslib "^2.3.1"
 
 "@aws-sdk/xml-builder@3.472.0":
   version "3.472.0"
-  resolved "https://registry.yarnpkg.com/@aws-sdk/xml-builder/-/xml-builder-3.472.0.tgz#fe804e26517779868f7093e361dce4816be546d6"
+  resolved "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.472.0.tgz"
   integrity sha512-PwjVxz1hr9up8QkddabuScPZ/d5aDHgvHYgK4acHYzltXL4wngfvimi5ZqXTzVWF2QANxHmWnHUr45QJX71oJQ==
   dependencies:
     "@smithy/types" "^2.7.0"
     tslib "^2.5.0"
 
-"@babel/code-frame@^7.22.13", "@babel/code-frame@^7.23.5":
-  version "7.23.5"
-  resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.23.5.tgz#9009b69a8c602293476ad598ff53e4562e15c244"
-  integrity sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA==
-  dependencies:
-    "@babel/highlight" "^7.23.4"
-    chalk "^2.4.2"
-
-"@babel/compat-data@^7.23.5":
-  version "7.23.5"
-  resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.23.5.tgz#ffb878728bb6bdcb6f4510aa51b1be9afb8cfd98"
-  integrity sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw==
-
-"@babel/core@^7.22.9":
-  version "7.23.6"
-  resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.23.6.tgz#8be77cd77c55baadcc1eae1c33df90ab6d2151d4"
-  integrity sha512-FxpRyGjrMJXh7X3wGLGhNDCRiwpWEF74sKjTLDJSG5Kyvow3QZaG0Adbqzi9ZrVjTWpsX+2cxWXD71NMg93kdw==
-  dependencies:
-    "@ampproject/remapping" "^2.2.0"
-    "@babel/code-frame" "^7.23.5"
-    "@babel/generator" "^7.23.6"
-    "@babel/helper-compilation-targets" "^7.23.6"
-    "@babel/helper-module-transforms" "^7.23.3"
-    "@babel/helpers" "^7.23.6"
-    "@babel/parser" "^7.23.6"
-    "@babel/template" "^7.22.15"
-    "@babel/traverse" "^7.23.6"
-    "@babel/types" "^7.23.6"
-    convert-source-map "^2.0.0"
-    debug "^4.1.0"
-    gensync "^1.0.0-beta.2"
-    json5 "^2.2.3"
-    semver "^6.3.1"
-
-"@babel/generator@^7.23.6":
-  version "7.23.6"
-  resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.6.tgz#9e1fca4811c77a10580d17d26b57b036133f3c2e"
-  integrity sha512-qrSfCYxYQB5owCmGLbl8XRpX1ytXlpueOb0N0UmQwA073KZxejgQTzAmJezxvpwQD9uGtK2shHdi55QT+MbjIw==
-  dependencies:
-    "@babel/types" "^7.23.6"
-    "@jridgewell/gen-mapping" "^0.3.2"
-    "@jridgewell/trace-mapping" "^0.3.17"
-    jsesc "^2.5.1"
-
-"@babel/helper-compilation-targets@^7.23.6":
-  version "7.23.6"
-  resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.23.6.tgz#4d79069b16cbcf1461289eccfbbd81501ae39991"
-  integrity sha512-9JB548GZoQVmzrFgp8o7KxdgkTGm6xs9DW0o/Pim72UDjzr5ObUQ6ZzYPqA+g9OTS2bBQoctLJrky0RDCAWRgQ==
-  dependencies:
-    "@babel/compat-data" "^7.23.5"
-    "@babel/helper-validator-option" "^7.23.5"
-    browserslist "^4.22.2"
-    lru-cache "^5.1.1"
-    semver "^6.3.1"
-
-"@babel/helper-environment-visitor@^7.22.20":
-  version "7.22.20"
-  resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167"
-  integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==
-
-"@babel/helper-function-name@^7.23.0":
-  version "7.23.0"
-  resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759"
-  integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==
-  dependencies:
-    "@babel/template" "^7.22.15"
-    "@babel/types" "^7.23.0"
-
-"@babel/helper-hoist-variables@^7.22.5":
-  version "7.22.5"
-  resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb"
-  integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==
-  dependencies:
-    "@babel/types" "^7.22.5"
-
-"@babel/helper-module-imports@^7.22.15":
-  version "7.22.15"
-  resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz#16146307acdc40cc00c3b2c647713076464bdbf0"
-  integrity sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w==
-  dependencies:
-    "@babel/types" "^7.22.15"
-
-"@babel/helper-module-transforms@^7.23.3":
-  version "7.23.3"
-  resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.23.3.tgz#d7d12c3c5d30af5b3c0fcab2a6d5217773e2d0f1"
-  integrity sha512-7bBs4ED9OmswdfDzpz4MpWgSrV7FXlc3zIagvLFjS5H+Mk7Snr21vQ6QwrsoCGMfNC4e4LQPdoULEt4ykz0SRQ==
-  dependencies:
-    "@babel/helper-environment-visitor" "^7.22.20"
-    "@babel/helper-module-imports" "^7.22.15"
-    "@babel/helper-simple-access" "^7.22.5"
-    "@babel/helper-split-export-declaration" "^7.22.6"
-    "@babel/helper-validator-identifier" "^7.22.20"
-
-"@babel/helper-simple-access@^7.22.5":
-  version "7.22.5"
-  resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz#4938357dc7d782b80ed6dbb03a0fba3d22b1d5de"
-  integrity sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w==
-  dependencies:
-    "@babel/types" "^7.22.5"
-
-"@babel/helper-split-export-declaration@^7.22.6":
-  version "7.22.6"
-  resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c"
-  integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==
-  dependencies:
-    "@babel/types" "^7.22.5"
-
-"@babel/helper-string-parser@^7.23.4":
-  version "7.23.4"
-  resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz#9478c707febcbbe1ddb38a3d91a2e054ae622d83"
-  integrity sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==
-
-"@babel/helper-validator-identifier@^7.22.20":
-  version "7.22.20"
-  resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0"
-  integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==
-
-"@babel/helper-validator-option@^7.23.5":
-  version "7.23.5"
-  resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.23.5.tgz#907a3fbd4523426285365d1206c423c4c5520307"
-  integrity sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw==
-
-"@babel/helpers@^7.23.6":
+"@babel/parser@^7.20.15":
   version "7.23.6"
-  resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.23.6.tgz#d03af2ee5fb34691eec0cda90f5ecbb4d4da145a"
-  integrity sha512-wCfsbN4nBidDRhpDhvcKlzHWCTlgJYUUdSJfzXb2NuBssDSIjc3xcb+znA7l+zYsFljAcGM0aFkN40cR3lXiGA==
-  dependencies:
-    "@babel/template" "^7.22.15"
-    "@babel/traverse" "^7.23.6"
-    "@babel/types" "^7.23.6"
-
-"@babel/highlight@^7.23.4":
-  version "7.23.4"
-  resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.23.4.tgz#edaadf4d8232e1a961432db785091207ead0621b"
-  integrity sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A==
-  dependencies:
-    "@babel/helper-validator-identifier" "^7.22.20"
-    chalk "^2.4.2"
-    js-tokens "^4.0.0"
-
-"@babel/parser@^7.20.15", "@babel/parser@^7.22.15", "@babel/parser@^7.23.6":
-  version "7.23.6"
-  resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.6.tgz#ba1c9e512bda72a47e285ae42aff9d2a635a9e3b"
+  resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.23.6.tgz"
   integrity sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ==
 
-"@babel/standalone@^7.22.9":
-  version "7.23.6"
-  resolved "https://registry.yarnpkg.com/@babel/standalone/-/standalone-7.23.6.tgz#b90a1739f05699de1f0fa10aa1a27ee262774d20"
-  integrity sha512-+AzS6BZwZdSosrgS/TiGDYLxtlefARKClWgJ4ql//XfmV9KbPWbkEekvbvDRJ8a6qog8E9j3CziHLz5dbIEMyw==
-
-"@babel/template@^7.22.15":
-  version "7.22.15"
-  resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38"
-  integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==
-  dependencies:
-    "@babel/code-frame" "^7.22.13"
-    "@babel/parser" "^7.22.15"
-    "@babel/types" "^7.22.15"
-
-"@babel/traverse@^7.23.6":
-  version "7.23.6"
-  resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.6.tgz#b53526a2367a0dd6edc423637f3d2d0f2521abc5"
-  integrity sha512-czastdK1e8YByZqezMPFiZ8ahwVMh/ESl9vPgvgdB9AmFMGP5jfpFax74AQgl5zj4XHzqeYAg2l8PuUeRS1MgQ==
-  dependencies:
-    "@babel/code-frame" "^7.23.5"
-    "@babel/generator" "^7.23.6"
-    "@babel/helper-environment-visitor" "^7.22.20"
-    "@babel/helper-function-name" "^7.23.0"
-    "@babel/helper-hoist-variables" "^7.22.5"
-    "@babel/helper-split-export-declaration" "^7.22.6"
-    "@babel/parser" "^7.23.6"
-    "@babel/types" "^7.23.6"
-    debug "^4.3.1"
-    globals "^11.1.0"
-
-"@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.23.0", "@babel/types@^7.23.6":
-  version "7.23.6"
-  resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.6.tgz#be33fdb151e1f5a56877d704492c240fc71c7ccd"
-  integrity sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg==
-  dependencies:
-    "@babel/helper-string-parser" "^7.23.4"
-    "@babel/helper-validator-identifier" "^7.22.20"
-    to-fast-properties "^2.0.0"
-
 "@chainsafe/is-ip@^2.0.1":
   version "2.0.2"
-  resolved "https://registry.yarnpkg.com/@chainsafe/is-ip/-/is-ip-2.0.2.tgz#7311e7403f11d8c5cfa48111f56fcecaac37c9f6"
+  resolved "https://registry.npmjs.org/@chainsafe/is-ip/-/is-ip-2.0.2.tgz"
   integrity sha512-ndGqEMG1W5WkGagaqOZHpPU172AGdxr+LD15sv3WIUvT5oCFUrG1Y0CW/v2Egwj4JXEvSibaIIIqImsm98y1nA==
 
 "@chainsafe/netmask@^2.0.0":
   version "2.0.0"
-  resolved "https://registry.yarnpkg.com/@chainsafe/netmask/-/netmask-2.0.0.tgz#0d4a75f47919f65011da4327a3845c9661f1038a"
+  resolved "https://registry.npmjs.org/@chainsafe/netmask/-/netmask-2.0.0.tgz"
   integrity sha512-I3Z+6SWUoaljh3TBzCnCxjlUyN8tA+NAk5L6m9IxvCf1BENQTePzPMis97CoN/iMW1St3WN+AWCCRp+TTBRiDg==
   dependencies:
     "@chainsafe/is-ip" "^2.0.1"
 
-"@esbuild/aix-ppc64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.19.10.tgz#fb3922a0183d27446de00cf60d4f7baaadf98d84"
-  integrity sha512-Q+mk96KJ+FZ30h9fsJl+67IjNJm3x2eX+GBWGmocAKgzp27cowCOOqSdscX80s0SpdFXZnIv/+1xD1EctFx96Q==
-
-"@esbuild/android-arm64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.19.10.tgz#ef31015416dd79398082409b77aaaa2ade4d531a"
-  integrity sha512-1X4CClKhDgC3by7k8aOWZeBXQX8dHT5QAMCAQDArCLaYfkppoARvh0fit3X2Qs+MXDngKcHv6XXyQCpY0hkK1Q==
-
-"@esbuild/android-arm@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.19.10.tgz#1c23c7e75473aae9fb323be5d9db225142f47f52"
-  integrity sha512-7W0bK7qfkw1fc2viBfrtAEkDKHatYfHzr/jKAHNr9BvkYDXPcC6bodtm8AyLJNNuqClLNaeTLuwURt4PRT9d7w==
-
-"@esbuild/android-x64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.19.10.tgz#df6a4e6d6eb8da5595cfce16d4e3f6bc24464707"
-  integrity sha512-O/nO/g+/7NlitUxETkUv/IvADKuZXyH4BHf/g/7laqKC4i/7whLpB0gvpPc2zpF0q9Q6FXS3TS75QHac9MvVWw==
-
-"@esbuild/darwin-arm64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.19.10.tgz#8462a55db07c1b2fad61c8244ce04469ef1043be"
-  integrity sha512-YSRRs2zOpwypck+6GL3wGXx2gNP7DXzetmo5pHXLrY/VIMsS59yKfjPizQ4lLt5vEI80M41gjm2BxrGZ5U+VMA==
-
-"@esbuild/darwin-x64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.19.10.tgz#d1de20bfd41bb75b955ba86a6b1004539e8218c1"
-  integrity sha512-alfGtT+IEICKtNE54hbvPg13xGBe4GkVxyGWtzr+yHO7HIiRJppPDhOKq3zstTcVf8msXb/t4eavW3jCDpMSmA==
-
-"@esbuild/freebsd-arm64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.10.tgz#16904879e34c53a2e039d1284695d2db3e664d57"
-  integrity sha512-dMtk1wc7FSH8CCkE854GyGuNKCewlh+7heYP/sclpOG6Cectzk14qdUIY5CrKDbkA/OczXq9WesqnPl09mj5dg==
-
-"@esbuild/freebsd-x64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.19.10.tgz#8ad9e5ca9786ca3f1ef1411bfd10b08dcd9d4cef"
-  integrity sha512-G5UPPspryHu1T3uX8WiOEUa6q6OlQh6gNl4CO4Iw5PS+Kg5bVggVFehzXBJY6X6RSOMS8iXDv2330VzaObm4Ag==
-
-"@esbuild/linux-arm64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.19.10.tgz#d82cf2c590faece82d28bbf1cfbe36f22ae25bd2"
-  integrity sha512-QxaouHWZ+2KWEj7cGJmvTIHVALfhpGxo3WLmlYfJ+dA5fJB6lDEIg+oe/0//FuyVHuS3l79/wyBxbHr0NgtxJQ==
-
-"@esbuild/linux-arm@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.19.10.tgz#477b8e7c7bcd34369717b04dd9ee6972c84f4029"
-  integrity sha512-j6gUW5aAaPgD416Hk9FHxn27On28H4eVI9rJ4az7oCGTFW48+LcgNDBN+9f8rKZz7EEowo889CPKyeaD0iw9Kg==
-
-"@esbuild/linux-ia32@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.19.10.tgz#d55ff822cf5b0252a57112f86857ff23be6cab0e"
-  integrity sha512-4ub1YwXxYjj9h1UIZs2hYbnTZBtenPw5NfXCRgEkGb0b6OJ2gpkMvDqRDYIDRjRdWSe/TBiZltm3Y3Q8SN1xNg==
-
-"@esbuild/linux-loong64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.19.10.tgz#a9ad057d7e48d6c9f62ff50f6f208e331c4543c7"
-  integrity sha512-lo3I9k+mbEKoxtoIbM0yC/MZ1i2wM0cIeOejlVdZ3D86LAcFXFRdeuZmh91QJvUTW51bOK5W2BznGNIl4+mDaA==
-
-"@esbuild/linux-mips64el@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.19.10.tgz#b011a96924773d60ebab396fbd7a08de66668179"
-  integrity sha512-J4gH3zhHNbdZN0Bcr1QUGVNkHTdpijgx5VMxeetSk6ntdt+vR1DqGmHxQYHRmNb77tP6GVvD+K0NyO4xjd7y4A==
-
-"@esbuild/linux-ppc64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.19.10.tgz#5d8b59929c029811e473f2544790ea11d588d4dd"
-  integrity sha512-tgT/7u+QhV6ge8wFMzaklOY7KqiyitgT1AUHMApau32ZlvTB/+efeCtMk4eXS+uEymYK249JsoiklZN64xt6oQ==
-
-"@esbuild/linux-riscv64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.19.10.tgz#292b06978375b271bd8bc0a554e0822957508d22"
-  integrity sha512-0f/spw0PfBMZBNqtKe5FLzBDGo0SKZKvMl5PHYQr3+eiSscfJ96XEknCe+JoOayybWUFQbcJTrk946i3j9uYZA==
-
-"@esbuild/linux-s390x@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.19.10.tgz#d30af63530f8d4fa96930374c9dd0d62bf59e069"
-  integrity sha512-pZFe0OeskMHzHa9U38g+z8Yx5FNCLFtUnJtQMpwhS+r4S566aK2ci3t4NCP4tjt6d5j5uo4h7tExZMjeKoehAA==
-
-"@esbuild/linux-x64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.19.10.tgz#898c72eeb74d9f2fb43acf316125b475548b75ce"
-  integrity sha512-SpYNEqg/6pZYoc+1zLCjVOYvxfZVZj6w0KROZ3Fje/QrM3nfvT2llI+wmKSrWuX6wmZeTapbarvuNNK/qepSgA==
-
-"@esbuild/netbsd-x64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.19.10.tgz#fd473a5ae261b43eab6dad4dbd5a3155906e6c91"
-  integrity sha512-ACbZ0vXy9zksNArWlk2c38NdKg25+L9pr/mVaj9SUq6lHZu/35nx2xnQVRGLrC1KKQqJKRIB0q8GspiHI3J80Q==
-
-"@esbuild/openbsd-x64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.19.10.tgz#96eb8992e526717b5272321eaad3e21f3a608e46"
-  integrity sha512-PxcgvjdSjtgPMiPQrM3pwSaG4kGphP+bLSb+cihuP0LYdZv1epbAIecHVl5sD3npkfYBZ0ZnOjR878I7MdJDFg==
-
-"@esbuild/sunos-x64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.19.10.tgz#c16ee1c167f903eaaa6acf7372bee42d5a89c9bc"
-  integrity sha512-ZkIOtrRL8SEJjr+VHjmW0znkPs+oJXhlJbNwfI37rvgeMtk3sxOQevXPXjmAPZPigVTncvFqLMd+uV0IBSEzqA==
-
-"@esbuild/win32-arm64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.19.10.tgz#7e417d1971dbc7e469b4eceb6a5d1d667b5e3dcc"
-  integrity sha512-+Sa4oTDbpBfGpl3Hn3XiUe4f8TU2JF7aX8cOfqFYMMjXp6ma6NJDztl5FDG8Ezx0OjwGikIHw+iA54YLDNNVfw==
-
-"@esbuild/win32-ia32@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.19.10.tgz#2b52dfec6cd061ecb36171c13bae554888b439e5"
-  integrity sha512-EOGVLK1oWMBXgfttJdPHDTiivYSjX6jDNaATeNOaCOFEVcfMjtbx7WVQwPSE1eIfCp/CaSF2nSrDtzc4I9f8TQ==
-
-"@esbuild/win32-x64@0.19.10":
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.19.10.tgz#bd123a74f243d2f3a1f046447bb9b363ee25d072"
-  integrity sha512-whqLG6Sc70AbU73fFYvuYzaE4MNMBIlR1Y/IrUeOXFrWHxBEjjbZaQ3IXIQS8wJdAzue2GwYZCjOrgrU1oUHoA==
+"@esbuild/darwin-arm64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.12.tgz"
+  integrity sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==
 
 "@helia/car@1.0.4":
   version "1.0.4"
-  resolved "https://registry.yarnpkg.com/@helia/car/-/car-1.0.4.tgz#31f881faa0fdd1cf2aba4429bd4c76331dffe4d1"
+  resolved "https://registry.npmjs.org/@helia/car/-/car-1.0.4.tgz"
   integrity sha512-HcHMC/eTRCFt3jrLkMvqKD7Og92PpNoBZTMQ9R+dOvtyIlZ2iG4zYRm2DipPdDi+OvXCqaPNg0oJxDx5fBnjRw==
   dependencies:
     "@helia/interface" "^2.0.0"
@@ -932,7 +635,7 @@
 
 "@helia/interface@^2.0.0":
   version "2.1.0"
-  resolved "https://registry.yarnpkg.com/@helia/interface/-/interface-2.1.0.tgz#ad70cf975971c332751e89fe50818a9d0fb74c79"
+  resolved "https://registry.npmjs.org/@helia/interface/-/interface-2.1.0.tgz"
   integrity sha512-Z7PwuDIR0BODfSMzYcdzgdTYLsshCawAoPvGuuazvBddWSD9y82/QBmsWp6CTkyM/ziEaWbz5wERmRS+wejDLg==
   dependencies:
     "@libp2p/interface" "^0.1.1"
@@ -945,7 +648,7 @@
 
 "@helia/unixfs@1.4.3":
   version "1.4.3"
-  resolved "https://registry.yarnpkg.com/@helia/unixfs/-/unixfs-1.4.3.tgz#5f0d5de880d300eb32d4861ded7567945de1c4ba"
+  resolved "https://registry.npmjs.org/@helia/unixfs/-/unixfs-1.4.3.tgz"
   integrity sha512-jS0En8fGhb01XH+nnxo3kQsmc1lwBEdlttAZFvTo7HCjBGPNFuaYdwTqF9S1wMVWV2fWqj7eS2zBZZa0MDsi1Q==
   dependencies:
     "@helia/interface" "^2.0.0"
@@ -966,9 +669,19 @@
     progress-events "^1.0.0"
     sparse-array "^1.3.2"
 
-"@ipld/car@5.2.4", "@ipld/car@^5.1.1":
+"@ipld/car@^5.1.1":
+  version "5.2.6"
+  resolved "https://registry.npmjs.org/@ipld/car/-/car-5.2.6.tgz"
+  integrity sha512-ZiIYan7UFLLQsR90GpKOrZ0t6/6owrevJI7dCG8McNj0zUO4vGzsPumpKRBP4pdBgek4oXt4TbFOwxqTPEh5mA==
+  dependencies:
+    "@ipld/dag-cbor" "^9.0.7"
+    cborg "^4.0.5"
+    multiformats "^13.0.0"
+    varint "^6.0.0"
+
+"@ipld/car@5.2.4":
   version "5.2.4"
-  resolved "https://registry.yarnpkg.com/@ipld/car/-/car-5.2.4.tgz#5770ad01075162e10cfc646142806758befbf3f5"
+  resolved "https://registry.npmjs.org/@ipld/car/-/car-5.2.4.tgz"
   integrity sha512-YoVXE/o5HLXKi/Oqh9Nhcn423sdn9brRFKnbUid68/1D332/XINcoyCTvBluFcCw/9IeiTx+sEAV+onXZ/A4eA==
   dependencies:
     "@ipld/dag-cbor" "^9.0.0"
@@ -976,24 +689,36 @@
     multiformats "^12.1.0"
     varint "^6.0.0"
 
-"@ipld/dag-cbor@^9.0.0":
-  version "9.0.6"
-  resolved "https://registry.yarnpkg.com/@ipld/dag-cbor/-/dag-cbor-9.0.6.tgz#cfc63d7a5f65ad1bfdd628ba01d025d9b759e3b7"
-  integrity sha512-3kNab5xMppgWw6DVYx2BzmFq8t7I56AGWfp5kaU1fIPkwHVpBRglJJTYsGtbVluCi/s/q97HZM3bC+aDW4sxbQ==
+"@ipld/dag-cbor@^9.0.0", "@ipld/dag-cbor@^9.0.7":
+  version "9.1.0"
+  resolved "https://registry.npmjs.org/@ipld/dag-cbor/-/dag-cbor-9.1.0.tgz"
+  integrity sha512-7pMKjBaapEh+1Nk/1umPPhQGT6znb5E71lke2ekxlcuVZLLrPPdDSy0UAMwWgj3a28cjir/ZJ6CQH2DEs3DUOQ==
   dependencies:
     cborg "^4.0.0"
-    multiformats "^12.0.1"
+    multiformats "^13.0.0"
 
 "@ipld/dag-pb@^4.0.0":
   version "4.0.6"
-  resolved "https://registry.yarnpkg.com/@ipld/dag-pb/-/dag-pb-4.0.6.tgz#8bd8b7b6424982fb239baec38beb08f31126a21e"
+  resolved "https://registry.npmjs.org/@ipld/dag-pb/-/dag-pb-4.0.6.tgz"
   integrity sha512-wOij3jfDKZsb9yjhQeHp+TQy0pu1vmUkGv324xciFFZ7xGbDfAGTQW03lSA5aJ/7HBBNYgjEE0nvHmNW1Qjfag==
   dependencies:
     multiformats "^12.0.1"
 
+"@isaacs/cliui@^8.0.2":
+  version "8.0.2"
+  resolved "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz"
+  integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==
+  dependencies:
+    string-width "^5.1.2"
+    string-width-cjs "npm:string-width@^4.2.0"
+    strip-ansi "^7.0.1"
+    strip-ansi-cjs "npm:strip-ansi@^6.0.1"
+    wrap-ansi "^8.1.0"
+    wrap-ansi-cjs "npm:wrap-ansi@^7.0.0"
+
 "@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2":
   version "0.3.3"
-  resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098"
+  resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz"
   integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==
   dependencies:
     "@jridgewell/set-array" "^1.0.1"
@@ -1002,30 +727,30 @@
 
 "@jridgewell/resolve-uri@^3.1.0":
   version "3.1.1"
-  resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721"
+  resolved "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz"
   integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==
 
 "@jridgewell/set-array@^1.0.1":
   version "1.1.2"
-  resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72"
+  resolved "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz"
   integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==
 
 "@jridgewell/source-map@^0.3.3":
   version "0.3.5"
-  resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.5.tgz#a3bb4d5c6825aab0d281268f47f6ad5853431e91"
+  resolved "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.5.tgz"
   integrity sha512-UTYAUj/wviwdsMfzoSJspJxbkH5o1snzwX0//0ENX1u/55kkZZkcTZP6u9bwKGkv+dkk9at4m1Cpt0uY80kcpQ==
   dependencies:
     "@jridgewell/gen-mapping" "^0.3.0"
     "@jridgewell/trace-mapping" "^0.3.9"
 
-"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14", "@jridgewell/sourcemap-codec@^1.4.15":
+"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14":
   version "1.4.15"
-  resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32"
+  resolved "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz"
   integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==
 
-"@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9":
+"@jridgewell/trace-mapping@^0.3.9":
   version "0.3.20"
-  resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.20.tgz#72e45707cf240fa6b081d0366f8265b0cd10197f"
+  resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.20.tgz"
   integrity sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q==
   dependencies:
     "@jridgewell/resolve-uri" "^3.1.0"
@@ -1033,14 +758,14 @@
 
 "@jsdoc/salty@^0.2.1", "@jsdoc/salty@^0.2.4":
   version "0.2.7"
-  resolved "https://registry.yarnpkg.com/@jsdoc/salty/-/salty-0.2.7.tgz#98ddce519fd95d7bee605a658fabf6e8cbf7556d"
+  resolved "https://registry.npmjs.org/@jsdoc/salty/-/salty-0.2.7.tgz"
   integrity sha512-mh8LbS9d4Jq84KLw8pzho7XC2q2/IJGiJss3xwRoLD1A+EE16SjN4PfaG4jRCzKegTFLlN0Zd8SdUPE6XdoPFg==
   dependencies:
     lodash "^4.17.21"
 
 "@libp2p/interface@^0.1.1", "@libp2p/interface@^0.1.2", "@libp2p/interface@^0.1.6":
   version "0.1.6"
-  resolved "https://registry.yarnpkg.com/@libp2p/interface/-/interface-0.1.6.tgz#1328cf6086f02c499183489ccb143fe9c159e871"
+  resolved "https://registry.npmjs.org/@libp2p/interface/-/interface-0.1.6.tgz"
   integrity sha512-Lzc5cS/hXuoXhuAbVIxJIHLCYmfPcbU0vVgrpMoiP1Qb2Q3ETU4A46GB8s8mWXgSU6tr9RcqerUqzFYD6+OAag==
   dependencies:
     "@multiformats/multiaddr" "^12.1.5"
@@ -1052,9 +777,20 @@
     race-signal "^1.0.0"
     uint8arraylist "^2.4.3"
 
-"@libp2p/interface@^1.0.0", "@libp2p/interface@^1.0.2":
+"@libp2p/interface@^1.0.0":
+  version "1.0.2"
+  resolved "https://registry.npmjs.org/@libp2p/interface/-/interface-1.0.2.tgz"
+  integrity sha512-z/3Yyg+7cVyzRXwzdrDkJd7YmNaLE9iZjQaixo5luI/n9uk5OFFjb9ulAsNqpq8V1xylCo2DXIC7f94KClwzVw==
+  dependencies:
+    "@multiformats/multiaddr" "^12.1.10"
+    it-pushable "^3.2.1"
+    it-stream-types "^2.0.1"
+    multiformats "^12.1.3"
+    uint8arraylist "^2.4.3"
+
+"@libp2p/interface@^1.0.2":
   version "1.0.2"
-  resolved "https://registry.yarnpkg.com/@libp2p/interface/-/interface-1.0.2.tgz#91c3352026e5a74753a520ba68dd81c90514df6e"
+  resolved "https://registry.npmjs.org/@libp2p/interface/-/interface-1.0.2.tgz"
   integrity sha512-z/3Yyg+7cVyzRXwzdrDkJd7YmNaLE9iZjQaixo5luI/n9uk5OFFjb9ulAsNqpq8V1xylCo2DXIC7f94KClwzVw==
   dependencies:
     "@multiformats/multiaddr" "^12.1.10"
@@ -1065,12 +801,12 @@
 
 "@libp2p/interfaces@^3.3.1":
   version "3.3.2"
-  resolved "https://registry.yarnpkg.com/@libp2p/interfaces/-/interfaces-3.3.2.tgz#5d8079be845b0960939b5b18880e785a4714465a"
+  resolved "https://registry.npmjs.org/@libp2p/interfaces/-/interfaces-3.3.2.tgz"
   integrity sha512-p/M7plbrxLzuQchvNwww1Was7ZeGE2NaOFulMaZBYIihU8z3fhaV+a033OqnC/0NTX/yhfdNOG7znhYq3XoR/g==
 
 "@libp2p/logger@^3.0.1", "@libp2p/logger@^3.0.2":
   version "3.1.0"
-  resolved "https://registry.yarnpkg.com/@libp2p/logger/-/logger-3.1.0.tgz#ac9adb08f344934e191d7049ce876ac0111449ce"
+  resolved "https://registry.npmjs.org/@libp2p/logger/-/logger-3.1.0.tgz"
   integrity sha512-qJbJBAhxHVsRBtQSOIkSLi0lskUSFjzE+zm0QvoyxzZKSz+mX41mZLbnofPIVOVauoDQ40dXpe7WDUOq8AbiQQ==
   dependencies:
     "@libp2p/interface" "^0.1.6"
@@ -1081,7 +817,7 @@
 
 "@libp2p/logger@^4.0.1":
   version "4.0.2"
-  resolved "https://registry.yarnpkg.com/@libp2p/logger/-/logger-4.0.2.tgz#20470cc5c0e3311505e20ea8ff9ad764bda4e8fa"
+  resolved "https://registry.npmjs.org/@libp2p/logger/-/logger-4.0.2.tgz"
   integrity sha512-J9UMtMU9BKXNp+3c5kcI7HyWOPYg2B2E6sn1gEQckiSexTaz0wKJSlgTZ89f9F8bkC3AaC8ybXYuHbFQhwpTIg==
   dependencies:
     "@libp2p/interface" "^1.0.2"
@@ -1092,7 +828,7 @@
 
 "@multiformats/multiaddr@^12.1.0", "@multiformats/multiaddr@^12.1.10", "@multiformats/multiaddr@^12.1.5":
   version "12.1.11"
-  resolved "https://registry.yarnpkg.com/@multiformats/multiaddr/-/multiaddr-12.1.11.tgz#53d857ef61aa56996792c28163a4c320111e7ccb"
+  resolved "https://registry.npmjs.org/@multiformats/multiaddr/-/multiaddr-12.1.11.tgz"
   integrity sha512-CWG9kETEGTTMdr1T+/JEuMwFld3r3fHNP8LkLoUcLvHRy6yr8sWdotVGEDNEdDO/vrKhuD7bQBws3xMSMMyylg==
   dependencies:
     "@chainsafe/is-ip" "^2.0.1"
@@ -1105,7 +841,7 @@
 
 "@multiformats/murmur3@^2.0.0", "@multiformats/murmur3@^2.1.2":
   version "2.1.7"
-  resolved "https://registry.yarnpkg.com/@multiformats/murmur3/-/murmur3-2.1.7.tgz#931223742be287f99aac03498068b5cd24ad31a9"
+  resolved "https://registry.npmjs.org/@multiformats/murmur3/-/murmur3-2.1.7.tgz"
   integrity sha512-Yf0UpAaONjed+8PTt5NM/GG4Z4Ai4m1qfT7bqevjnkwRQ12K+0jxtRomirz+VJx4PokpA2St1ZSD1iMkZTqPRQ==
   dependencies:
     multiformats "^12.0.1"
@@ -1113,83 +849,38 @@
 
 "@nodelib/fs.scandir@2.1.5":
   version "2.1.5"
-  resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5"
+  resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz"
   integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==
   dependencies:
     "@nodelib/fs.stat" "2.0.5"
     run-parallel "^1.1.9"
 
-"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2":
+"@nodelib/fs.stat@^2.0.2", "@nodelib/fs.stat@2.0.5":
   version "2.0.5"
-  resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b"
+  resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz"
   integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==
 
 "@nodelib/fs.walk@^1.2.3":
   version "1.2.8"
-  resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a"
+  resolved "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz"
   integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==
   dependencies:
     "@nodelib/fs.scandir" "2.1.5"
     fastq "^1.6.0"
 
-"@rollup/plugin-alias@^5.0.0":
-  version "5.1.0"
-  resolved "https://registry.yarnpkg.com/@rollup/plugin-alias/-/plugin-alias-5.1.0.tgz#99a94accc4ff9a3483be5baeedd5d7da3b597e93"
-  integrity sha512-lpA3RZ9PdIG7qqhEfv79tBffNaoDuukFDrmhLqg9ifv99u/ehn+lOg30x2zmhf8AQqQUZaMk/B9fZraQ6/acDQ==
-  dependencies:
-    slash "^4.0.0"
+"@pkgjs/parseargs@^0.11.0":
+  version "0.11.0"
+  resolved "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz"
+  integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==
 
-"@rollup/plugin-commonjs@^25.0.4":
-  version "25.0.7"
-  resolved "https://registry.yarnpkg.com/@rollup/plugin-commonjs/-/plugin-commonjs-25.0.7.tgz#145cec7589ad952171aeb6a585bbeabd0fd3b4cf"
-  integrity sha512-nEvcR+LRjEjsaSsc4x3XZfCCvZIaSMenZu/OiwOKGN2UhQpAYI7ru7czFvyWbErlpoGjnSX3D5Ch5FcMA3kRWQ==
-  dependencies:
-    "@rollup/pluginutils" "^5.0.1"
-    commondir "^1.0.1"
-    estree-walker "^2.0.2"
-    glob "^8.0.3"
-    is-reference "1.2.1"
-    magic-string "^0.30.3"
-
-"@rollup/plugin-json@^6.0.0":
-  version "6.1.0"
-  resolved "https://registry.yarnpkg.com/@rollup/plugin-json/-/plugin-json-6.1.0.tgz#fbe784e29682e9bb6dee28ea75a1a83702e7b805"
-  integrity sha512-EGI2te5ENk1coGeADSIwZ7G2Q8CJS2sF120T7jLw4xFw9n7wIOXHo+kIYRAoVpJAN+kmqZSoO3Fp4JtoNF4ReA==
-  dependencies:
-    "@rollup/pluginutils" "^5.1.0"
-
-"@rollup/plugin-node-resolve@^15.2.1":
-  version "15.2.3"
-  resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.2.3.tgz#e5e0b059bd85ca57489492f295ce88c2d4b0daf9"
-  integrity sha512-j/lym8nf5E21LwBT4Df1VD6hRO2L2iwUeUmP7litikRsVp1H6NWx20NEp0Y7su+7XGc476GnXXc4kFeZNGmaSQ==
-  dependencies:
-    "@rollup/pluginutils" "^5.0.1"
-    "@types/resolve" "1.20.2"
-    deepmerge "^4.2.2"
-    is-builtin-module "^3.2.1"
-    is-module "^1.0.0"
-    resolve "^1.22.1"
-
-"@rollup/plugin-replace@^5.0.2":
-  version "5.0.5"
-  resolved "https://registry.yarnpkg.com/@rollup/plugin-replace/-/plugin-replace-5.0.5.tgz#33d5653dce6d03cb24ef98bef7f6d25b57faefdf"
-  integrity sha512-rYO4fOi8lMaTg/z5Jb+hKnrHHVn8j2lwkqwyS4kTRhKyWOLf2wST2sWXr4WzWiTcoHTp2sTjqUbqIj2E39slKQ==
-  dependencies:
-    "@rollup/pluginutils" "^5.0.1"
-    magic-string "^0.30.3"
-
-"@rollup/pluginutils@^5.0.1", "@rollup/pluginutils@^5.0.3", "@rollup/pluginutils@^5.1.0":
-  version "5.1.0"
-  resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-5.1.0.tgz#7e53eddc8c7f483a4ad0b94afb1f7f5fd3c771e0"
-  integrity sha512-XTIWOPPcpvyKI6L1NHo0lFlCyznUEyPmPY1mc3KpPVDYulHSTvyeLNVW00QTLIAFNhR3kYnJTQHeGqU4M3n09g==
-  dependencies:
-    "@types/estree" "^1.0.0"
-    estree-walker "^2.0.2"
-    picomatch "^2.3.1"
+"@rollup/rollup-darwin-arm64@4.9.6":
+  version "4.9.6"
+  resolved "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.9.6.tgz"
+  integrity sha512-CqNNAyhRkTbo8VVZ5R85X73H3R5NX9ONnKbXuHisGWC0qRbTTxnF1U4V9NafzJbgGM0sHZpdO83pLPzq8uOZFw==
 
 "@smithy/abort-controller@^2.0.1", "@smithy/abort-controller@^2.0.15":
   version "2.0.15"
-  resolved "https://registry.yarnpkg.com/@smithy/abort-controller/-/abort-controller-2.0.15.tgz#fcec9193da8b86eef1eedc3e71139a99c061db32"
+  resolved "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-2.0.15.tgz"
   integrity sha512-JkS36PIS3/UCbq/MaozzV7jECeL+BTt4R75bwY8i+4RASys4xOyUS1HsRyUNSqUXFP4QyCz5aNnh3ltuaxv+pw==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -1197,7 +888,7 @@
 
 "@smithy/chunked-blob-reader-native@^2.0.1":
   version "2.0.1"
-  resolved "https://registry.yarnpkg.com/@smithy/chunked-blob-reader-native/-/chunked-blob-reader-native-2.0.1.tgz#0599eaed8c2cd15c7ab43a1838cef1258ff27133"
+  resolved "https://registry.npmjs.org/@smithy/chunked-blob-reader-native/-/chunked-blob-reader-native-2.0.1.tgz"
   integrity sha512-N2oCZRglhWKm7iMBu7S6wDzXirjAofi7tAd26cxmgibRYOBS4D3hGfmkwCpHdASZzwZDD8rluh0Rcqw1JeZDRw==
   dependencies:
     "@smithy/util-base64" "^2.0.1"
@@ -1205,14 +896,14 @@
 
 "@smithy/chunked-blob-reader@^2.0.0":
   version "2.0.0"
-  resolved "https://registry.yarnpkg.com/@smithy/chunked-blob-reader/-/chunked-blob-reader-2.0.0.tgz#c44fe2c780eaf77f9e5381d982ac99a880cce51b"
+  resolved "https://registry.npmjs.org/@smithy/chunked-blob-reader/-/chunked-blob-reader-2.0.0.tgz"
   integrity sha512-k+J4GHJsMSAIQPChGBrjEmGS+WbPonCXesoqP9fynIqjn7rdOThdH8FAeCmokP9mxTYKQAKoHCLPzNlm6gh7Wg==
   dependencies:
     tslib "^2.5.0"
 
 "@smithy/config-resolver@^2.0.21":
   version "2.0.21"
-  resolved "https://registry.yarnpkg.com/@smithy/config-resolver/-/config-resolver-2.0.21.tgz#97cb1c71f3c8c453fb01169545f98414b3414d7f"
+  resolved "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-2.0.21.tgz"
   integrity sha512-rlLIGT+BeqjnA6C2FWumPRJS1UW07iU5ZxDHtFuyam4W65gIaOFMjkB90ofKCIh+0mLVQrQFrl/VLtQT/6FWTA==
   dependencies:
     "@smithy/node-config-provider" "^2.1.8"
@@ -1223,7 +914,7 @@
 
 "@smithy/core@^1.2.0":
   version "1.2.1"
-  resolved "https://registry.yarnpkg.com/@smithy/core/-/core-1.2.1.tgz#2ee9b9894f3b2ce5eac4010ea1a85874b446f203"
+  resolved "https://registry.npmjs.org/@smithy/core/-/core-1.2.1.tgz"
   integrity sha512-f6cwmMuHo7RIw/c184NBd2rGeGvGIX6p55HSrG5jfR3qkNYo80PHRfhzkJMq1+mv1ZjI5p8NhenWMMkIRJR4tw==
   dependencies:
     "@smithy/middleware-endpoint" "^2.2.3"
@@ -1237,7 +928,7 @@
 
 "@smithy/credential-provider-imds@^2.0.0", "@smithy/credential-provider-imds@^2.1.4":
   version "2.1.4"
-  resolved "https://registry.yarnpkg.com/@smithy/credential-provider-imds/-/credential-provider-imds-2.1.4.tgz#126adf69eac333f23f8683edbfabdc2b3b2deb15"
+  resolved "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-2.1.4.tgz"
   integrity sha512-cwPJN1fa1YOQzhBlTXRavABEYRRchci1X79QRwzaNLySnIMJfztyv1Zkst0iZPLMnpn8+CnHu3wOHS11J5Dr3A==
   dependencies:
     "@smithy/node-config-provider" "^2.1.8"
@@ -1248,7 +939,7 @@
 
 "@smithy/eventstream-codec@^2.0.15":
   version "2.0.15"
-  resolved "https://registry.yarnpkg.com/@smithy/eventstream-codec/-/eventstream-codec-2.0.15.tgz#733e638fd38e7e264bc0429dbda139bab950bd25"
+  resolved "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-2.0.15.tgz"
   integrity sha512-crjvz3j1gGPwA0us6cwS7+5gAn35CTmqu/oIxVbYJo2Qm/sGAye6zGJnMDk3BKhWZw5kcU1G4MxciTkuBpOZPg==
   dependencies:
     "@aws-crypto/crc32" "3.0.0"
@@ -1258,7 +949,7 @@
 
 "@smithy/eventstream-serde-browser@^2.0.15":
   version "2.0.15"
-  resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-2.0.15.tgz#f62c891e6f8ad59f552a92d8aa14eb6b4541d418"
+  resolved "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-2.0.15.tgz"
   integrity sha512-WiFG5N9j3jmS5P0z5Xev6dO0c3lf7EJYC2Ncb0xDnWFvShwXNn741AF71ABr5EcZw8F4rQma0362MMjAwJeZog==
   dependencies:
     "@smithy/eventstream-serde-universal" "^2.0.15"
@@ -1267,7 +958,7 @@
 
 "@smithy/eventstream-serde-config-resolver@^2.0.15":
   version "2.0.15"
-  resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-2.0.15.tgz#50e98c59aeb31a0702bad5dfab4009a15fc8b3bf"
+  resolved "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-2.0.15.tgz"
   integrity sha512-o65d2LRjgCbWYH+VVNlWXtmsI231SO99ZTOL4UuIPa6WTjbSHWtlXvUcJG9libhEKWmEV9DIUiH2IqyPWi7ubA==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -1275,7 +966,7 @@
 
 "@smithy/eventstream-serde-node@^2.0.15":
   version "2.0.15"
-  resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-node/-/eventstream-serde-node-2.0.15.tgz#8be1bd024048adcff4ccbb723c55fc42ce582d33"
+  resolved "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-2.0.15.tgz"
   integrity sha512-9OOXiIhHq1VeOG6xdHkn2ZayfMYM3vzdUTV3zhcCnt+tMqA3BJK3XXTJFRR2BV28rtRM778DzqbBTf+hqwQPTg==
   dependencies:
     "@smithy/eventstream-serde-universal" "^2.0.15"
@@ -1284,7 +975,7 @@
 
 "@smithy/eventstream-serde-universal@^2.0.15":
   version "2.0.15"
-  resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-2.0.15.tgz#85cdff39abc630cb18b4d333913b7120651771ca"
+  resolved "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-2.0.15.tgz"
   integrity sha512-dP8AQp/pXlWBjvL0TaPBJC3rM0GoYv7O0Uim8d/7UKZ2Wo13bFI3/BhQfY/1DeiP1m23iCHFNFtOQxfQNBB8rQ==
   dependencies:
     "@smithy/eventstream-codec" "^2.0.15"
@@ -1293,7 +984,7 @@
 
 "@smithy/fetch-http-handler@^2.3.1":
   version "2.3.1"
-  resolved "https://registry.yarnpkg.com/@smithy/fetch-http-handler/-/fetch-http-handler-2.3.1.tgz#aa055db5bf4d78acec97abe6ef24283fa2c18430"
+  resolved "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-2.3.1.tgz"
   integrity sha512-6MNk16fqb8EwcYY8O8WxB3ArFkLZ2XppsSNo1h7SQcFdDDwIumiJeO6wRzm7iB68xvsOQzsdQKbdtTieS3hfSQ==
   dependencies:
     "@smithy/protocol-http" "^3.0.11"
@@ -1304,7 +995,7 @@
 
 "@smithy/hash-blob-browser@^2.0.16":
   version "2.0.16"
-  resolved "https://registry.yarnpkg.com/@smithy/hash-blob-browser/-/hash-blob-browser-2.0.16.tgz#6cd3686e79f3c8d96a129076073bf20d06293152"
+  resolved "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-2.0.16.tgz"
   integrity sha512-cSYRi05LA7DZDwjB1HL0BP8B56eUNNeLglVH147QTXFyuXJq/7erAIiLRfsyXB8+GfFHkSS5BHbc76a7k/AYPA==
   dependencies:
     "@smithy/chunked-blob-reader" "^2.0.0"
@@ -1314,7 +1005,7 @@
 
 "@smithy/hash-node@^2.0.17":
   version "2.0.17"
-  resolved "https://registry.yarnpkg.com/@smithy/hash-node/-/hash-node-2.0.17.tgz#9ce5e3f137143e3658759d31a16e068ef94a14fc"
+  resolved "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-2.0.17.tgz"
   integrity sha512-Il6WuBcI1nD+e2DM7tTADMf01wEPGK8PAhz4D+YmDUVaoBqlA+CaH2uDJhiySifmuKBZj748IfygXty81znKhw==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -1324,7 +1015,7 @@
 
 "@smithy/hash-stream-node@^2.0.17":
   version "2.0.17"
-  resolved "https://registry.yarnpkg.com/@smithy/hash-stream-node/-/hash-stream-node-2.0.17.tgz#90375ed9c1a586118433c925a61d39b5555bf284"
+  resolved "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-2.0.17.tgz"
   integrity sha512-ey8DtnATzp1mOXgS7rqMwSmAki6iJA+jgNucKcxRkhMB1rrICfHg+rhmIF50iLPDHUhTcS5pBMOrLzzpZftvNQ==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -1333,7 +1024,7 @@
 
 "@smithy/invalid-dependency@^2.0.15":
   version "2.0.15"
-  resolved "https://registry.yarnpkg.com/@smithy/invalid-dependency/-/invalid-dependency-2.0.15.tgz#7653490047bf0ab6042fb812adfbcce857aa2d06"
+  resolved "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-2.0.15.tgz"
   integrity sha512-dlEKBFFwVfzA5QroHlBS94NpgYjXhwN/bFfun+7w3rgxNvVy79SK0w05iGc7UAeC5t+D7gBxrzdnD6hreZnDVQ==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -1341,14 +1032,14 @@
 
 "@smithy/is-array-buffer@^2.0.0":
   version "2.0.0"
-  resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-2.0.0.tgz#8fa9b8040651e7ba0b2f6106e636a91354ff7d34"
+  resolved "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.0.0.tgz"
   integrity sha512-z3PjFjMyZNI98JFRJi/U0nGoLWMSJlDjAW4QUX2WNZLas5C0CmVV6LJ01JI0k90l7FvpmixjWxPFmENSClQ7ug==
   dependencies:
     tslib "^2.5.0"
 
 "@smithy/md5-js@^2.0.17":
   version "2.0.17"
-  resolved "https://registry.yarnpkg.com/@smithy/md5-js/-/md5-js-2.0.17.tgz#784c02da6cee539f5af0e45b1eaf9beb10ed8ad6"
+  resolved "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-2.0.17.tgz"
   integrity sha512-jmISTCnEkOnm2oCNx/rMkvBT/eQh3aA6nktevkzbmn/VYqYEuc5Z2n5sTTqsciMSO01Lvf56wG1A4twDqovYeQ==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -1357,7 +1048,7 @@
 
 "@smithy/middleware-content-length@^2.0.17":
   version "2.0.17"
-  resolved "https://registry.yarnpkg.com/@smithy/middleware-content-length/-/middleware-content-length-2.0.17.tgz#13479173a15d1cd4224e3e21071a27c66a74b653"
+  resolved "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-2.0.17.tgz"
   integrity sha512-OyadvMcKC7lFXTNBa8/foEv7jOaqshQZkjWS9coEXPRZnNnihU/Ls+8ZuJwGNCOrN2WxXZFmDWhegbnM4vak8w==
   dependencies:
     "@smithy/protocol-http" "^3.0.11"
@@ -1366,7 +1057,7 @@
 
 "@smithy/middleware-endpoint@^2.2.3":
   version "2.2.3"
-  resolved "https://registry.yarnpkg.com/@smithy/middleware-endpoint/-/middleware-endpoint-2.2.3.tgz#4069ab6e8d1b485bc0d2384b30f7b37096111ec2"
+  resolved "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-2.2.3.tgz"
   integrity sha512-nYfxuq0S/xoAjdLbyn1ixeVB6cyH9wYCMtbbOCpcCRYR5u2mMtqUtVjjPAZ/DIdlK3qe0tpB0Q76szFGNuz+kQ==
   dependencies:
     "@smithy/middleware-serde" "^2.0.15"
@@ -1379,7 +1070,7 @@
 
 "@smithy/middleware-retry@^2.0.24", "@smithy/middleware-retry@^2.0.25":
   version "2.0.25"
-  resolved "https://registry.yarnpkg.com/@smithy/middleware-retry/-/middleware-retry-2.0.25.tgz#ae948f4a1ad5968bc315f07df930a55c09ffe040"
+  resolved "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-2.0.25.tgz"
   integrity sha512-FXhafCPvx/9L9OgHJ3cdo/pD1f7ngC7DKsjDV2J7k6LO/Yl69POoBLk4sI1OZPUGc4dfxriENlTma9Nj1hI+IQ==
   dependencies:
     "@smithy/node-config-provider" "^2.1.8"
@@ -1394,7 +1085,7 @@
 
 "@smithy/middleware-serde@^2.0.15":
   version "2.0.15"
-  resolved "https://registry.yarnpkg.com/@smithy/middleware-serde/-/middleware-serde-2.0.15.tgz#9deac4daad1f2a60d5c4e7097658f9ae2eb0a33f"
+  resolved "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-2.0.15.tgz"
   integrity sha512-FOZRFk/zN4AT4wzGuBY+39XWe+ZnCFd0gZtyw3f9Okn2CJPixl9GyWe98TIaljeZdqWkgrzGyPre20AcW2UMHQ==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -1402,7 +1093,7 @@
 
 "@smithy/middleware-stack@^2.0.9":
   version "2.0.9"
-  resolved "https://registry.yarnpkg.com/@smithy/middleware-stack/-/middleware-stack-2.0.9.tgz#60e51697c74258fac087bc739d940f524921a15f"
+  resolved "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-2.0.9.tgz"
   integrity sha512-bCB5dUtGQ5wh7QNL2ELxmDc6g7ih7jWU3Kx6MYH1h4mZbv9xL3WyhKHojRltThCB1arLPyTUFDi+x6fB/oabtA==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -1410,7 +1101,7 @@
 
 "@smithy/node-config-provider@^2.1.8":
   version "2.1.8"
-  resolved "https://registry.yarnpkg.com/@smithy/node-config-provider/-/node-config-provider-2.1.8.tgz#8cab8f1172c8cd1146e7997292786909abcae763"
+  resolved "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-2.1.8.tgz"
   integrity sha512-+w26OKakaBUGp+UG+dxYZtFb5fs3tgHg3/QrRrmUZj+rl3cIuw840vFUXX35cVPTUCQIiTqmz7CpVF7+hdINdQ==
   dependencies:
     "@smithy/property-provider" "^2.0.16"
@@ -1420,7 +1111,7 @@
 
 "@smithy/node-http-handler@^2.2.1":
   version "2.2.1"
-  resolved "https://registry.yarnpkg.com/@smithy/node-http-handler/-/node-http-handler-2.2.1.tgz#23f6540e565edcae8c558a854fffde3d003451c0"
+  resolved "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-2.2.1.tgz"
   integrity sha512-8iAKQrC8+VFHPAT8pg4/j6hlsTQh+NKOWlctJBrYtQa4ExcxX7aSg3vdQ2XLoYwJotFUurg/NLqFCmZaPRrogw==
   dependencies:
     "@smithy/abort-controller" "^2.0.15"
@@ -1431,7 +1122,7 @@
 
 "@smithy/property-provider@^2.0.0", "@smithy/property-provider@^2.0.16":
   version "2.0.16"
-  resolved "https://registry.yarnpkg.com/@smithy/property-provider/-/property-provider-2.0.16.tgz#0c15ea8a3e8c8e7012bf5877c79ce754f7d2c06e"
+  resolved "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-2.0.16.tgz"
   integrity sha512-28Ky0LlOqtEjwg5CdHmwwaDRHcTWfPRzkT6HrhwOSRS2RryAvuDfJrZpM+BMcrdeCyEg1mbcgIMoqTla+rdL8Q==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -1439,7 +1130,7 @@
 
 "@smithy/protocol-http@^3.0.11":
   version "3.0.11"
-  resolved "https://registry.yarnpkg.com/@smithy/protocol-http/-/protocol-http-3.0.11.tgz#a9ea712fe7cc3375378ac68d9168a7b6cd0b6f65"
+  resolved "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-3.0.11.tgz"
   integrity sha512-3ziB8fHuXIRamV/akp/sqiWmNPR6X+9SB8Xxnozzj+Nq7hSpyKdFHd1FLpBkgfGFUTzzcBJQlDZPSyxzmdcx5A==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -1447,7 +1138,7 @@
 
 "@smithy/querystring-builder@^2.0.15":
   version "2.0.15"
-  resolved "https://registry.yarnpkg.com/@smithy/querystring-builder/-/querystring-builder-2.0.15.tgz#aa8c889bcaef274b8345be4ddabae3bfedf2cf33"
+  resolved "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-2.0.15.tgz"
   integrity sha512-e1q85aT6HutvouOdN+dMsN0jcdshp50PSCvxDvo6aIM57LqeXimjfONUEgfqQ4IFpYWAtVixptyIRE5frMp/2A==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -1456,7 +1147,7 @@
 
 "@smithy/querystring-parser@^2.0.15":
   version "2.0.15"
-  resolved "https://registry.yarnpkg.com/@smithy/querystring-parser/-/querystring-parser-2.0.15.tgz#46c8806a145f46636e4aee2a5d79e7ba68161a4c"
+  resolved "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-2.0.15.tgz"
   integrity sha512-jbBvoK3cc81Cj1c1TH1qMYxNQKHrYQ2DoTntN9FBbtUWcGhc+T4FP6kCKYwRLXyU4AajwGIZstvNAmIEgUUNTQ==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -1464,14 +1155,14 @@
 
 "@smithy/service-error-classification@^2.0.8":
   version "2.0.8"
-  resolved "https://registry.yarnpkg.com/@smithy/service-error-classification/-/service-error-classification-2.0.8.tgz#c9e421312a2def84da025c5efe6de06679c5be95"
+  resolved "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-2.0.8.tgz"
   integrity sha512-jCw9+005im8tsfYvwwSc4TTvd29kXRFkH9peQBg5R/4DD03ieGm6v6Hpv9nIAh98GwgYg1KrztcINC1s4o7/hg==
   dependencies:
     "@smithy/types" "^2.7.0"
 
 "@smithy/shared-ini-file-loader@^2.0.6", "@smithy/shared-ini-file-loader@^2.2.7":
   version "2.2.7"
-  resolved "https://registry.yarnpkg.com/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-2.2.7.tgz#4a3bd469703d02c3cc8e36dcba2238c06efa12cb"
+  resolved "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-2.2.7.tgz"
   integrity sha512-0Qt5CuiogIuvQIfK+be7oVHcPsayLgfLJGkPlbgdbl0lD28nUKu4p11L+UG3SAEsqc9UsazO+nErPXw7+IgDpQ==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -1479,7 +1170,7 @@
 
 "@smithy/signature-v4@^2.0.0":
   version "2.0.18"
-  resolved "https://registry.yarnpkg.com/@smithy/signature-v4/-/signature-v4-2.0.18.tgz#53b78b238edaa84cc8d61faf67d2b3c926cdd698"
+  resolved "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-2.0.18.tgz"
   integrity sha512-SJRAj9jT/l9ocm8D0GojMbnA1sp7I4JeStOQ4lEXI8A5eHE73vbjlzlqIFB7cLvIgau0oUl4cGVpF9IGCrvjlw==
   dependencies:
     "@smithy/eventstream-codec" "^2.0.15"
@@ -1493,7 +1184,7 @@
 
 "@smithy/smithy-client@^2.1.18", "@smithy/smithy-client@^2.2.0":
   version "2.2.0"
-  resolved "https://registry.yarnpkg.com/@smithy/smithy-client/-/smithy-client-2.2.0.tgz#9865981a2f49dd0e9ef68193aebcd5654cdd7f00"
+  resolved "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-2.2.0.tgz"
   integrity sha512-C/bkNue5H5Obgl83SnlBt4v6VM68CqIjIELh3vAabud87xFYznLNKtj6Qb69Z+QOnLp9T+We++sEem/f2AHE+Q==
   dependencies:
     "@smithy/middleware-endpoint" "^2.2.3"
@@ -1505,14 +1196,14 @@
 
 "@smithy/types@^2.7.0":
   version "2.7.0"
-  resolved "https://registry.yarnpkg.com/@smithy/types/-/types-2.7.0.tgz#6ed9ba5bff7c4d28c980cff967e6d8456840a4f3"
+  resolved "https://registry.npmjs.org/@smithy/types/-/types-2.7.0.tgz"
   integrity sha512-1OIFyhK+vOkMbu4aN2HZz/MomREkrAC/HqY5mlJMUJfGrPRwijJDTeiN8Rnj9zUaB8ogXAfIOtZrrgqZ4w7Wnw==
   dependencies:
     tslib "^2.5.0"
 
 "@smithy/url-parser@^2.0.15":
   version "2.0.15"
-  resolved "https://registry.yarnpkg.com/@smithy/url-parser/-/url-parser-2.0.15.tgz#878d9b61f9eac8834cb611cf1a8a0e5d9a48038c"
+  resolved "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-2.0.15.tgz"
   integrity sha512-sADUncUj9rNbOTrdDGm4EXlUs0eQ9dyEo+V74PJoULY4jSQxS+9gwEgsPYyiu8PUOv16JC/MpHonOgqP/IEDZA==
   dependencies:
     "@smithy/querystring-parser" "^2.0.15"
@@ -1521,7 +1212,7 @@
 
 "@smithy/util-base64@^2.0.1":
   version "2.0.1"
-  resolved "https://registry.yarnpkg.com/@smithy/util-base64/-/util-base64-2.0.1.tgz#57f782dafc187eddea7c8a1ff2a7c188ed1a02c4"
+  resolved "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-2.0.1.tgz"
   integrity sha512-DlI6XFYDMsIVN+GH9JtcRp3j02JEVuWIn/QOZisVzpIAprdsxGveFed0bjbMRCqmIFe8uetn5rxzNrBtIGrPIQ==
   dependencies:
     "@smithy/util-buffer-from" "^2.0.0"
@@ -1529,21 +1220,21 @@
 
 "@smithy/util-body-length-browser@^2.0.1":
   version "2.0.1"
-  resolved "https://registry.yarnpkg.com/@smithy/util-body-length-browser/-/util-body-length-browser-2.0.1.tgz#424485cc81c640d18c17c683e0e6edb57e8e2ab9"
+  resolved "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-2.0.1.tgz"
   integrity sha512-NXYp3ttgUlwkaug4bjBzJ5+yIbUbUx8VsSLuHZROQpoik+gRkIBeEG9MPVYfvPNpuXb/puqodeeUXcKFe7BLOQ==
   dependencies:
     tslib "^2.5.0"
 
 "@smithy/util-body-length-node@^2.1.0":
   version "2.1.0"
-  resolved "https://registry.yarnpkg.com/@smithy/util-body-length-node/-/util-body-length-node-2.1.0.tgz#313a5f7c5017947baf5fa018bfc22628904bbcfa"
+  resolved "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-2.1.0.tgz"
   integrity sha512-/li0/kj/y3fQ3vyzn36NTLGmUwAICb7Jbe/CsWCktW363gh1MOcpEcSO3mJ344Gv2dqz8YJCLQpb6hju/0qOWw==
   dependencies:
     tslib "^2.5.0"
 
 "@smithy/util-buffer-from@^2.0.0":
   version "2.0.0"
-  resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-2.0.0.tgz#7eb75d72288b6b3001bc5f75b48b711513091deb"
+  resolved "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.0.0.tgz"
   integrity sha512-/YNnLoHsR+4W4Vf2wL5lGv0ksg8Bmk3GEGxn2vEQt52AQaPSCuaO5PM5VM7lP1K9qHRKHwrPGktqVoAHKWHxzw==
   dependencies:
     "@smithy/is-array-buffer" "^2.0.0"
@@ -1551,14 +1242,14 @@
 
 "@smithy/util-config-provider@^2.0.0":
   version "2.0.0"
-  resolved "https://registry.yarnpkg.com/@smithy/util-config-provider/-/util-config-provider-2.0.0.tgz#4dd6a793605559d94267312fd06d0f58784b4c38"
+  resolved "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-2.0.0.tgz"
   integrity sha512-xCQ6UapcIWKxXHEU4Mcs2s7LcFQRiU3XEluM2WcCjjBtQkUN71Tb+ydGmJFPxMUrW/GWMgQEEGipLym4XG0jZg==
   dependencies:
     tslib "^2.5.0"
 
 "@smithy/util-defaults-mode-browser@^2.0.22":
   version "2.0.23"
-  resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-2.0.23.tgz#27bce42b7a55974ed4aff5b2b870256ce383613a"
+  resolved "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-2.0.23.tgz"
   integrity sha512-2u+7t7Wgz1jlfsf6il3pz6DIzyJHS3qrnNnmATICm00pQeqp2D4kUOYauOgKGIeKgVpwzzq8+hFQe749r3xR5w==
   dependencies:
     "@smithy/property-provider" "^2.0.16"
@@ -1569,7 +1260,7 @@
 
 "@smithy/util-defaults-mode-node@^2.0.29":
   version "2.0.30"
-  resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-2.0.30.tgz#12c4c737a9f8b151cfbd951348cdf94febd083dd"
+  resolved "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-2.0.30.tgz"
   integrity sha512-nmcmEyRlClNprp7mBnUzfmW6HrKQK+yvl+cyXCRUoQSxRvZuLDrztV+JD+zr3qV/oirEc4Q0QNIrrhTDCE6JeA==
   dependencies:
     "@smithy/config-resolver" "^2.0.21"
@@ -1582,7 +1273,7 @@
 
 "@smithy/util-endpoints@^1.0.7":
   version "1.0.7"
-  resolved "https://registry.yarnpkg.com/@smithy/util-endpoints/-/util-endpoints-1.0.7.tgz#5a258ac7838dea085660060b515cd2d19f19a4bc"
+  resolved "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-1.0.7.tgz"
   integrity sha512-Q2gEind3jxoLk6hdKWyESMU7LnXz8aamVwM+VeVjOYzYT1PalGlY/ETa48hv2YpV4+YV604y93YngyzzzQ4IIA==
   dependencies:
     "@smithy/node-config-provider" "^2.1.8"
@@ -1591,14 +1282,14 @@
 
 "@smithy/util-hex-encoding@^2.0.0":
   version "2.0.0"
-  resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-2.0.0.tgz#0aa3515acd2b005c6d55675e377080a7c513b59e"
+  resolved "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-2.0.0.tgz"
   integrity sha512-c5xY+NUnFqG6d7HFh1IFfrm3mGl29lC+vF+geHv4ToiuJCBmIfzx6IeHLg+OgRdPFKDXIw6pvi+p3CsscaMcMA==
   dependencies:
     tslib "^2.5.0"
 
 "@smithy/util-middleware@^2.0.8":
   version "2.0.8"
-  resolved "https://registry.yarnpkg.com/@smithy/util-middleware/-/util-middleware-2.0.8.tgz#2ec1da1190d09b69512ce0248ebd5e819e3c8a92"
+  resolved "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-2.0.8.tgz"
   integrity sha512-qkvqQjM8fRGGA8P2ydWylMhenCDP8VlkPn8kiNuFEaFz9xnUKC2irfqsBSJrfrOB9Qt6pQsI58r3zvvumhFMkw==
   dependencies:
     "@smithy/types" "^2.7.0"
@@ -1606,7 +1297,7 @@
 
 "@smithy/util-retry@^2.0.8":
   version "2.0.8"
-  resolved "https://registry.yarnpkg.com/@smithy/util-retry/-/util-retry-2.0.8.tgz#61f8db11e4fe60975cb9fb2eada173f5024a06f3"
+  resolved "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-2.0.8.tgz"
   integrity sha512-cQTPnVaVFMjjS6cb44WV2yXtHVyXDC5icKyIbejMarJEApYeJWpBU3LINTxHqp/tyLI+MZOUdosr2mZ3sdziNg==
   dependencies:
     "@smithy/service-error-classification" "^2.0.8"
@@ -1615,7 +1306,7 @@
 
 "@smithy/util-stream@^2.0.23":
   version "2.0.23"
-  resolved "https://registry.yarnpkg.com/@smithy/util-stream/-/util-stream-2.0.23.tgz#468ad29913d091092317cfea2d8ac5b866326a07"
+  resolved "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-2.0.23.tgz"
   integrity sha512-OJMWq99LAZJUzUwTk+00plyxX3ESktBaGPhqNIEVab+53gLULiWN9B/8bRABLg0K6R6Xg4t80uRdhk3B/LZqMQ==
   dependencies:
     "@smithy/fetch-http-handler" "^2.3.1"
@@ -1629,14 +1320,14 @@
 
 "@smithy/util-uri-escape@^2.0.0":
   version "2.0.0"
-  resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-2.0.0.tgz#19955b1a0f517a87ae77ac729e0e411963dfda95"
+  resolved "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-2.0.0.tgz"
   integrity sha512-ebkxsqinSdEooQduuk9CbKcI+wheijxEb3utGXkCoYQkJnwTnLbH1JXGimJtUkQwNQbsbuYwG2+aFVyZf5TLaw==
   dependencies:
     tslib "^2.5.0"
 
 "@smithy/util-utf8@^2.0.2":
   version "2.0.2"
-  resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-2.0.2.tgz#626b3e173ad137208e27ed329d6bea70f4a1a7f7"
+  resolved "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.0.2.tgz"
   integrity sha512-qOiVORSPm6Ce4/Yu6hbSgNHABLP2VMv8QOC3tTDNHHlWY19pPyc++fBTbZPtx6egPXi4HQxKDnMxVxpbtX2GoA==
   dependencies:
     "@smithy/util-buffer-from" "^2.0.0"
@@ -1644,31 +1335,26 @@
 
 "@smithy/util-waiter@^2.0.15":
   version "2.0.15"
-  resolved "https://registry.yarnpkg.com/@smithy/util-waiter/-/util-waiter-2.0.15.tgz#b02a42bf1b82f07973d1756a0ee10fafa1fbf58e"
+  resolved "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-2.0.15.tgz"
   integrity sha512-9Y+btzzB7MhLADW7xgD6SjvmoYaRkrb/9SCbNGmNdfO47v38rxb90IGXyDtAK0Shl9bMthTmLgjlfYc+vtz2Qw==
   dependencies:
     "@smithy/abort-controller" "^2.0.15"
     "@smithy/types" "^2.7.0"
     tslib "^2.5.0"
 
-"@trysound/sax@0.2.0":
-  version "0.2.0"
-  resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad"
-  integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==
-
-"@types/estree@*", "@types/estree@^1.0.0":
+"@types/estree@1.0.5":
   version "1.0.5"
-  resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4"
+  resolved "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz"
   integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==
 
 "@types/linkify-it@*":
   version "3.0.5"
-  resolved "https://registry.yarnpkg.com/@types/linkify-it/-/linkify-it-3.0.5.tgz#1e78a3ac2428e6d7e6c05c1665c242023a4601d8"
+  resolved "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-3.0.5.tgz"
   integrity sha512-yg6E+u0/+Zjva+buc3EIb+29XEg4wltq7cSmd4Uc2EE/1nUVmxyzpX6gUXD0V8jIrG0r7YeOGVIbYRkxeooCtw==
 
-"@types/markdown-it@^12.2.3":
+"@types/markdown-it@*", "@types/markdown-it@^12.2.3":
   version "12.2.3"
-  resolved "https://registry.yarnpkg.com/@types/markdown-it/-/markdown-it-12.2.3.tgz#0d6f6e5e413f8daaa26522904597be3d6cd93b51"
+  resolved "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-12.2.3.tgz"
   integrity sha512-GKMHFfv3458yYy+v/N8gjufHO6MSZKCOXpZc5GXIWWy8uldwfmPn98vp81gZ5f9SVw8YYBctgfJ22a2d7AOMeQ==
   dependencies:
     "@types/linkify-it" "*"
@@ -1676,74 +1362,85 @@
 
 "@types/mdurl@*":
   version "1.0.5"
-  resolved "https://registry.yarnpkg.com/@types/mdurl/-/mdurl-1.0.5.tgz#3e0d2db570e9fb6ccb2dc8fde0be1d79ac810d39"
+  resolved "https://registry.npmjs.org/@types/mdurl/-/mdurl-1.0.5.tgz"
   integrity sha512-6L6VymKTzYSrEf4Nev4Xa1LCHKrlTlYCBMTlQKFuddo1CvQcE52I0mwfOJayueUC7MJuXOeHTcIU683lzd0cUA==
 
-"@types/resolve@1.20.2":
-  version "1.20.2"
-  resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.20.2.tgz#97d26e00cd4a0423b4af620abecf3e6f442b7975"
-  integrity sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==
-
 "@vascosantos/moving-average@^1.1.0":
   version "1.1.0"
-  resolved "https://registry.yarnpkg.com/@vascosantos/moving-average/-/moving-average-1.1.0.tgz#8d5793b09b2d6021ba5e620c6a0f876c20db7eaa"
+  resolved "https://registry.npmjs.org/@vascosantos/moving-average/-/moving-average-1.1.0.tgz"
   integrity sha512-MVEJ4vWAPNbrGLjz7ITnHYg+YXZ6ijAqtH5/cHwSoCpbvuJ98aLXwFfPKAUfZpJMQR5uXB58UJajbY130IRF/w==
 
 abortable-iterator@^5.0.1:
   version "5.0.1"
-  resolved "https://registry.yarnpkg.com/abortable-iterator/-/abortable-iterator-5.0.1.tgz#5d93eba6fa8287a973a9ea090c64ca08b3777780"
+  resolved "https://registry.npmjs.org/abortable-iterator/-/abortable-iterator-5.0.1.tgz"
   integrity sha512-hlZ5Z8UwqrKsJcelVPEqDduZowJPBQJ9ZhBC2FXpja3lXy8X6MoI5uMzIgmrA8+3jcVnp8TF/tx+IBBqYJNUrg==
   dependencies:
     get-iterator "^2.0.0"
     it-stream-types "^2.0.1"
 
-acorn@^8.10.0:
-  version "8.11.3"
-  resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a"
-  integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==
-
 acorn@^8.8.2:
   version "8.11.2"
-  resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.2.tgz#ca0d78b51895be5390a5903c5b3bdcdaf78ae40b"
+  resolved "https://registry.npmjs.org/acorn/-/acorn-8.11.2.tgz"
   integrity sha512-nc0Axzp/0FILLEVsm4fNwLCwMttvhEI263QtVPQcbpfZZ3ts0hLsZGOpE6czNlid7CJ9MlyH8reXkpsf3YUY4w==
 
-ansi-styles@^3.2.1:
-  version "3.2.1"
-  resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
-  integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==
+ansi-regex@^5.0.1:
+  version "5.0.1"
+  resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz"
+  integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==
+
+ansi-regex@^6.0.1:
+  version "6.0.1"
+  resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz"
+  integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==
+
+ansi-styles@^4.0.0:
+  version "4.3.0"
+  resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz"
+  integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==
   dependencies:
-    color-convert "^1.9.0"
+    color-convert "^2.0.1"
+
+ansi-styles@^6.1.0:
+  version "6.2.1"
+  resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz"
+  integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==
+
+any-promise@^1.0.0:
+  version "1.3.0"
+  resolved "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz"
+  integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==
 
 any-signal@^4.1.1:
   version "4.1.1"
-  resolved "https://registry.yarnpkg.com/any-signal/-/any-signal-4.1.1.tgz#928416c355c66899e6b2a91cad4488f0324bae03"
+  resolved "https://registry.npmjs.org/any-signal/-/any-signal-4.1.1.tgz"
   integrity sha512-iADenERppdC+A2YKbOXXB2WUeABLaM6qnpZ70kZbPZ1cZMMJ7eF+3CaYm+/PhBizgkzlvssC7QuHS30oOiQYWA==
 
+anymatch@~3.1.2:
+  version "3.1.3"
+  resolved "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz"
+  integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==
+  dependencies:
+    normalize-path "^3.0.0"
+    picomatch "^2.0.4"
+
 argparse@^2.0.1:
   version "2.0.1"
-  resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
+  resolved "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz"
   integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==
 
+array-union@^2.1.0:
+  version "2.1.0"
+  resolved "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz"
+  integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==
+
 asynckit@^0.4.0:
   version "0.4.0"
-  resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
+  resolved "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz"
   integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==
 
-autoprefixer@^10.4.14:
-  version "10.4.16"
-  resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.16.tgz#fad1411024d8670880bdece3970aa72e3572feb8"
-  integrity sha512-7vd3UC6xKp0HLfua5IjZlcXvGAGy7cBAXTg2lyQ/8WpNhd6SiZ8Be+xm3FyBSYJx5GKcpRCzBh7RH4/0dnY+uQ==
-  dependencies:
-    browserslist "^4.21.10"
-    caniuse-lite "^1.0.30001538"
-    fraction.js "^4.3.6"
-    normalize-range "^0.1.2"
-    picocolors "^1.0.0"
-    postcss-value-parser "^4.2.0"
-
 axios@1.6.2:
   version "1.6.2"
-  resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.2.tgz#de67d42c755b571d3e698df1b6504cde9b0ee9f2"
+  resolved "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz"
   integrity sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==
   dependencies:
     follow-redirects "^1.15.0"
@@ -1752,17 +1449,22 @@ axios@1.6.2:
 
 balanced-match@^1.0.0:
   version "1.0.2"
-  resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
+  resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz"
   integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
 
 base64-js@^1.0.2, base64-js@^1.3.1:
   version "1.5.1"
-  resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
+  resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz"
   integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==
 
+binary-extensions@^2.0.0:
+  version "2.2.0"
+  resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz"
+  integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==
+
 bl@^5.0.0:
   version "5.1.0"
-  resolved "https://registry.yarnpkg.com/bl/-/bl-5.1.0.tgz#183715f678c7188ecef9fe475d90209400624273"
+  resolved "https://registry.npmjs.org/bl/-/bl-5.1.0.tgz"
   integrity sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ==
   dependencies:
     buffer "^6.0.3"
@@ -1771,7 +1473,7 @@ bl@^5.0.0:
 
 blockstore-core@^4.0.0:
   version "4.3.8"
-  resolved "https://registry.yarnpkg.com/blockstore-core/-/blockstore-core-4.3.8.tgz#2630e2e20d8e167c8e0c51bffcb25369829461f3"
+  resolved "https://registry.npmjs.org/blockstore-core/-/blockstore-core-4.3.8.tgz"
   integrity sha512-Agunhjw9w0I1OoJn012OpzJwBRm3Nf+v64N2FaZSsF3UGhoQAu4RePLuIBsZrPh4XRqT5Yg1rHoBYJGDhDmkWQ==
   dependencies:
     "@libp2p/logger" "^4.0.1"
@@ -1787,7 +1489,7 @@ blockstore-core@^4.0.0:
 
 blockstore-fs@1.1.8:
   version "1.1.8"
-  resolved "https://registry.yarnpkg.com/blockstore-fs/-/blockstore-fs-1.1.8.tgz#4851d7f49f974a29779bedc5df6754280311f1ad"
+  resolved "https://registry.npmjs.org/blockstore-fs/-/blockstore-fs-1.1.8.tgz"
   integrity sha512-1KD1+yEkxszZ3GWQdJbGgXAOs12LJC/Pit7JCPueJT/Pjt9GWtGZ4+8mgoaR3bjXVBgBIdhNlUxxw2NS787noA==
   dependencies:
     blockstore-core "^4.0.0"
@@ -1801,135 +1503,106 @@ blockstore-fs@1.1.8:
 
 bluebird@^3.7.2:
   version "3.7.2"
-  resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f"
+  resolved "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz"
   integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==
 
-boolbase@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e"
-  integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==
-
 bowser@^2.11.0:
   version "2.11.0"
-  resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.11.0.tgz#5ca3c35757a7aa5771500c70a73a9f91ef420a8f"
+  resolved "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz"
   integrity sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==
 
 brace-expansion@^2.0.1:
   version "2.0.1"
-  resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae"
+  resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz"
   integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==
   dependencies:
     balanced-match "^1.0.0"
 
-braces@^3.0.2:
+braces@^3.0.2, braces@~3.0.2:
   version "3.0.2"
-  resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
+  resolved "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz"
   integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
   dependencies:
     fill-range "^7.0.1"
 
-browserslist@^4.0.0, browserslist@^4.21.10, browserslist@^4.21.4, browserslist@^4.22.2:
-  version "4.22.2"
-  resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.22.2.tgz#704c4943072bd81ea18997f3bd2180e89c77874b"
-  integrity sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A==
-  dependencies:
-    caniuse-lite "^1.0.30001565"
-    electron-to-chromium "^1.4.601"
-    node-releases "^2.0.14"
-    update-browserslist-db "^1.0.13"
-
 buffer-from@^1.0.0:
   version "1.1.2"
-  resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5"
+  resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz"
   integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==
 
+buffer@^6.0.3:
+  version "6.0.3"
+  resolved "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz"
+  integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==
+  dependencies:
+    base64-js "^1.3.1"
+    ieee754 "^1.2.1"
+
 buffer@5.6.0:
   version "5.6.0"
-  resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.6.0.tgz#a31749dc7d81d84db08abf937b6b8c4033f62786"
+  resolved "https://registry.npmjs.org/buffer/-/buffer-5.6.0.tgz"
   integrity sha512-/gDYp/UtU0eA1ys8bOs9J6a+E/KWIY+DZ+Q2WESNUA0jFRsJOc0SNUO6xJ5SGA1xueg3NL65W6s+NY5l9cunuw==
   dependencies:
     base64-js "^1.0.2"
     ieee754 "^1.1.4"
 
-buffer@^6.0.3:
-  version "6.0.3"
-  resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6"
-  integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==
+bundle-require@^4.0.0:
+  version "4.0.2"
+  resolved "https://registry.npmjs.org/bundle-require/-/bundle-require-4.0.2.tgz"
+  integrity sha512-jwzPOChofl67PSTW2SGubV9HBQAhhR2i6nskiOThauo9dzwDUgOWQScFVaJkjEfYX+UXiD+LEx8EblQMc2wIag==
   dependencies:
-    base64-js "^1.3.1"
-    ieee754 "^1.2.1"
+    load-tsconfig "^0.2.3"
 
-builtin-modules@^3.3.0:
-  version "3.3.0"
-  resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.3.0.tgz#cae62812b89801e9656336e46223e030386be7b6"
-  integrity sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==
+cac@^6.7.12:
+  version "6.7.14"
+  resolved "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz"
+  integrity sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==
 
 camel-case@^4.1.2:
   version "4.1.2"
-  resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a"
+  resolved "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz"
   integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==
   dependencies:
     pascal-case "^3.1.2"
     tslib "^2.0.3"
 
-caniuse-api@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0"
-  integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==
-  dependencies:
-    browserslist "^4.0.0"
-    caniuse-lite "^1.0.0"
-    lodash.memoize "^4.1.2"
-    lodash.uniq "^4.5.0"
-
-caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001538, caniuse-lite@^1.0.30001565:
-  version "1.0.30001572"
-  resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001572.tgz#1ccf7dc92d2ee2f92ed3a54e11b7b4a3041acfa0"
-  integrity sha512-1Pbh5FLmn5y4+QhNyJE9j3/7dK44dGB83/ZMjv/qJk86TvDbjk0LosiZo0i0WB0Vx607qMX9jYrn1VLHCkN4rw==
-
 catharsis@^0.9.0:
   version "0.9.0"
-  resolved "https://registry.yarnpkg.com/catharsis/-/catharsis-0.9.0.tgz#40382a168be0e6da308c277d3a2b3eb40c7d2121"
+  resolved "https://registry.npmjs.org/catharsis/-/catharsis-0.9.0.tgz"
   integrity sha512-prMTQVpcns/tzFgFVkVp6ak6RykZyWb3gu8ckUpd6YkTlacOd3DXGJjIpD4Q6zJirizvaiAjSSHlOsA+6sNh2A==
   dependencies:
     lodash "^4.17.15"
 
-cborg@^4.0.0, cborg@^4.0.3:
+cborg@^4.0.0, cborg@^4.0.3, cborg@^4.0.5:
   version "4.0.5"
-  resolved "https://registry.yarnpkg.com/cborg/-/cborg-4.0.5.tgz#20680c0e8d0521e5700b5d9a1d0a644207ca2878"
+  resolved "https://registry.npmjs.org/cborg/-/cborg-4.0.5.tgz"
   integrity sha512-q8TAjprr8pn9Fp53rOIGp/UFDdFY6os2Nq62YogPSIzczJD9M6g2b6igxMkpCiZZKJ0kn/KzDLDvG+EqBIEeCg==
 
-chalk@^2.4.2:
-  version "2.4.2"
-  resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
-  integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
-  dependencies:
-    ansi-styles "^3.2.1"
-    escape-string-regexp "^1.0.5"
-    supports-color "^5.3.0"
-
-chalk@^5.3.0:
-  version "5.3.0"
-  resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385"
-  integrity sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==
-
-citty@^0.1.2, citty@^0.1.5:
-  version "0.1.5"
-  resolved "https://registry.yarnpkg.com/citty/-/citty-0.1.5.tgz#fe37ceae5dc764af75eb2fece99d2bf527ea4e50"
-  integrity sha512-AS7n5NSc0OQVMV9v6wt3ByujNIrne0/cTjiC2MYqhvao57VNfiuVksTSr2p17nVOhEr2KtqiAkGwHcgMC/qUuQ==
-  dependencies:
-    consola "^3.2.3"
+chokidar@^3.5.1:
+  version "3.5.3"
+  resolved "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz"
+  integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==
+  dependencies:
+    anymatch "~3.1.2"
+    braces "~3.0.2"
+    glob-parent "~5.1.2"
+    is-binary-path "~2.1.0"
+    is-glob "~4.0.1"
+    normalize-path "~3.0.0"
+    readdirp "~3.6.0"
+  optionalDependencies:
+    fsevents "~2.3.2"
 
 clean-css@~5.3.2:
   version "5.3.3"
-  resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.3.3.tgz#b330653cd3bd6b75009cc25c714cae7b93351ccd"
+  resolved "https://registry.npmjs.org/clean-css/-/clean-css-5.3.3.tgz"
   integrity sha512-D5J+kHaVb/wKSFcyyV75uCn8fiY4sV38XJoe4CUyGQ+mOU/fMVYUdH1hJC+CJQ5uY3EnW27SbJYS4X8BiLrAFg==
   dependencies:
     source-map "~0.6.0"
 
 clean-jsdoc-theme@4.2.17:
   version "4.2.17"
-  resolved "https://registry.yarnpkg.com/clean-jsdoc-theme/-/clean-jsdoc-theme-4.2.17.tgz#a815401d9d881076fddec0574f71ba732995dbe7"
+  resolved "https://registry.npmjs.org/clean-jsdoc-theme/-/clean-jsdoc-theme-4.2.17.tgz"
   integrity sha512-5SbJNXcQHUXd7N13g+3OpGFiBQdxz36xwEP3p1r1vbo/apLcDRtugaFdUZ56H6Rvlb68Q33EChoBkajSlnD11w==
   dependencies:
     "@jsdoc/salty" "^0.2.4"
@@ -1939,319 +1612,181 @@ clean-jsdoc-theme@4.2.17:
     lodash "^4.17.21"
     showdown "^2.1.0"
 
-color-convert@^1.9.0:
-  version "1.9.3"
-  resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
-  integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
+color-convert@^2.0.1:
+  version "2.0.1"
+  resolved "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz"
+  integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==
   dependencies:
-    color-name "1.1.3"
-
-color-name@1.1.3:
-  version "1.1.3"
-  resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
-  integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==
+    color-name "~1.1.4"
 
-colord@^2.9.1:
-  version "2.9.3"
-  resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43"
-  integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==
+color-name@~1.1.4:
+  version "1.1.4"
+  resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz"
+  integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
 
 combined-stream@^1.0.8:
   version "1.0.8"
-  resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f"
+  resolved "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz"
   integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==
   dependencies:
     delayed-stream "~1.0.0"
 
 commander@^10.0.0:
   version "10.0.1"
-  resolved "https://registry.yarnpkg.com/commander/-/commander-10.0.1.tgz#881ee46b4f77d1c1dccc5823433aa39b022cbe06"
+  resolved "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz"
   integrity sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==
 
 commander@^2.20.0:
   version "2.20.3"
-  resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33"
+  resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz"
   integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==
 
-commander@^7.2.0:
-  version "7.2.0"
-  resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7"
-  integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==
+commander@^4.0.0:
+  version "4.1.1"
+  resolved "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz"
+  integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==
 
 commander@^9.0.0:
   version "9.5.0"
-  resolved "https://registry.yarnpkg.com/commander/-/commander-9.5.0.tgz#bc08d1eb5cedf7ccb797a96199d41c7bc3e60d30"
+  resolved "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz"
   integrity sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==
 
-commondir@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b"
-  integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==
-
-consola@^3.2.3:
-  version "3.2.3"
-  resolved "https://registry.yarnpkg.com/consola/-/consola-3.2.3.tgz#0741857aa88cfa0d6fd53f1cff0375136e98502f"
-  integrity sha512-I5qxpzLv+sJhTVEoLYNcTW+bThDCPsit0vLNKShZx6rLtpilNpmmeTPaeqJb9ZE9dV3DGaeby6Vuhrw38WjeyQ==
-
-convert-source-map@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a"
-  integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==
-
-css-declaration-sorter@^7.0.0:
-  version "7.1.1"
-  resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-7.1.1.tgz#9796bcc257b4647c39993bda8d431ce32b666f80"
-  integrity sha512-dZ3bVTEEc1vxr3Bek9vGwfB5Z6ESPULhcRvO472mfjVnj8jRcTnKO8/JTczlvxM10Myb+wBM++1MtdO76eWcaQ==
-
-css-select@^5.1.0:
-  version "5.1.0"
-  resolved "https://registry.yarnpkg.com/css-select/-/css-select-5.1.0.tgz#b8ebd6554c3637ccc76688804ad3f6a6fdaea8a6"
-  integrity sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==
-  dependencies:
-    boolbase "^1.0.0"
-    css-what "^6.1.0"
-    domhandler "^5.0.2"
-    domutils "^3.0.1"
-    nth-check "^2.0.1"
-
-css-tree@^2.2.1:
-  version "2.3.1"
-  resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-2.3.1.tgz#10264ce1e5442e8572fc82fbe490644ff54b5c20"
-  integrity sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==
-  dependencies:
-    mdn-data "2.0.30"
-    source-map-js "^1.0.1"
-
-css-tree@~2.2.0:
-  version "2.2.1"
-  resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-2.2.1.tgz#36115d382d60afd271e377f9c5f67d02bd48c032"
-  integrity sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA==
-  dependencies:
-    mdn-data "2.0.28"
-    source-map-js "^1.0.1"
-
-css-what@^6.1.0:
-  version "6.1.0"
-  resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4"
-  integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==
-
-cssesc@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee"
-  integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==
-
-cssnano-preset-default@^6.0.2:
-  version "6.0.2"
-  resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-6.0.2.tgz#0cc13e2da462b0d1632b1ebea4af78e7484592e8"
-  integrity sha512-VnZybFeZ63AiVqIUNlxqMxpj9VU8B5j0oKgP7WyVt/7mkyf97KsYkNzsPTV/RVmy54Pg7cBhOK4WATbdCB44gw==
-  dependencies:
-    css-declaration-sorter "^7.0.0"
-    cssnano-utils "^4.0.1"
-    postcss-calc "^9.0.1"
-    postcss-colormin "^6.0.1"
-    postcss-convert-values "^6.0.1"
-    postcss-discard-comments "^6.0.1"
-    postcss-discard-duplicates "^6.0.1"
-    postcss-discard-empty "^6.0.1"
-    postcss-discard-overridden "^6.0.1"
-    postcss-merge-longhand "^6.0.1"
-    postcss-merge-rules "^6.0.2"
-    postcss-minify-font-values "^6.0.1"
-    postcss-minify-gradients "^6.0.1"
-    postcss-minify-params "^6.0.1"
-    postcss-minify-selectors "^6.0.1"
-    postcss-normalize-charset "^6.0.1"
-    postcss-normalize-display-values "^6.0.1"
-    postcss-normalize-positions "^6.0.1"
-    postcss-normalize-repeat-style "^6.0.1"
-    postcss-normalize-string "^6.0.1"
-    postcss-normalize-timing-functions "^6.0.1"
-    postcss-normalize-unicode "^6.0.1"
-    postcss-normalize-url "^6.0.1"
-    postcss-normalize-whitespace "^6.0.1"
-    postcss-ordered-values "^6.0.1"
-    postcss-reduce-initial "^6.0.1"
-    postcss-reduce-transforms "^6.0.1"
-    postcss-svgo "^6.0.1"
-    postcss-unique-selectors "^6.0.1"
-
-cssnano-utils@^4.0.1:
-  version "4.0.1"
-  resolved "https://registry.yarnpkg.com/cssnano-utils/-/cssnano-utils-4.0.1.tgz#fd18b42f95938bf55ab47967705355d6047bf1da"
-  integrity sha512-6qQuYDqsGoiXssZ3zct6dcMxiqfT6epy7x4R0TQJadd4LWO3sPR6JH6ZByOvVLoZ6EdwPGgd7+DR1EmX3tiXQQ==
-
-cssnano@^6.0.1:
-  version "6.0.2"
-  resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-6.0.2.tgz#7b49d60ce51e1dea3d569795f751ee49e97124c9"
-  integrity sha512-Tu9wv8UdN6CoiQnIVkCNvi+0rw/BwFWOJBlg2bVfEyKaadSuE3Gq/DD8tniVvggTJGwK88UjqZp7zL5sv6t1aA==
-  dependencies:
-    cssnano-preset-default "^6.0.2"
-    lilconfig "^3.0.0"
-
-csso@5.0.5:
-  version "5.0.5"
-  resolved "https://registry.yarnpkg.com/csso/-/csso-5.0.5.tgz#f9b7fe6cc6ac0b7d90781bb16d5e9874303e2ca6"
-  integrity sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==
+cross-spawn@^7.0.0, cross-spawn@^7.0.3:
+  version "7.0.3"
+  resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz"
+  integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
   dependencies:
-    css-tree "~2.2.0"
+    path-key "^3.1.0"
+    shebang-command "^2.0.0"
+    which "^2.0.1"
 
-debug@^4.1.0, debug@^4.3.1, debug@^4.3.4:
+debug@^4.3.1, debug@^4.3.4:
   version "4.3.4"
-  resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
+  resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz"
   integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
   dependencies:
     ms "2.1.2"
 
-deepmerge@^4.2.2:
-  version "4.3.1"
-  resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a"
-  integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==
-
-defu@^6.1.2, defu@^6.1.3:
-  version "6.1.3"
-  resolved "https://registry.yarnpkg.com/defu/-/defu-6.1.3.tgz#6d7f56bc61668e844f9f593ace66fd67ef1205fd"
-  integrity sha512-Vy2wmG3NTkmHNg/kzpuvHhkqeIx3ODWqasgCRbKtbXEN0G+HpEEv9BtJLp7ZG1CZloFaC41Ah3ZFbq7aqCqMeQ==
-
 delayed-stream@~1.0.0:
   version "1.0.0"
-  resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
+  resolved "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz"
   integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==
 
 dir-glob@^3.0.1:
   version "3.0.1"
-  resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f"
+  resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz"
   integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==
   dependencies:
     path-type "^4.0.0"
 
 dns-over-http-resolver@3.0.0:
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/dns-over-http-resolver/-/dns-over-http-resolver-3.0.0.tgz#2a8edcfb1c830cc3fff0cd37f01b824a55fa209a"
+  resolved "https://registry.npmjs.org/dns-over-http-resolver/-/dns-over-http-resolver-3.0.0.tgz"
   integrity sha512-5+BI+B7n8LKhNaEZBYErr+CBd9t5nYtjunByLhrLGtZ+i3TRgiU8yE87pCjEBu2KOwNsD9ljpSXEbZ4S8xih5g==
   dependencies:
     debug "^4.3.4"
     receptacle "^1.3.2"
 
-dom-serializer@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-2.0.0.tgz#e41b802e1eedf9f6cae183ce5e622d789d7d8e53"
-  integrity sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==
-  dependencies:
-    domelementtype "^2.3.0"
-    domhandler "^5.0.2"
-    entities "^4.2.0"
-
-domelementtype@^2.3.0:
-  version "2.3.0"
-  resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d"
-  integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==
-
-domhandler@^5.0.2, domhandler@^5.0.3:
-  version "5.0.3"
-  resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-5.0.3.tgz#cc385f7f751f1d1fc650c21374804254538c7d31"
-  integrity sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==
-  dependencies:
-    domelementtype "^2.3.0"
-
-domutils@^3.0.1:
-  version "3.1.0"
-  resolved "https://registry.yarnpkg.com/domutils/-/domutils-3.1.0.tgz#c47f551278d3dc4b0b1ab8cbb42d751a6f0d824e"
-  integrity sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==
-  dependencies:
-    dom-serializer "^2.0.0"
-    domelementtype "^2.3.0"
-    domhandler "^5.0.3"
-
 dot-case@^3.0.4:
   version "3.0.4"
-  resolved "https://registry.yarnpkg.com/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751"
+  resolved "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz"
   integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==
   dependencies:
     no-case "^3.0.4"
     tslib "^2.0.3"
 
-electron-to-chromium@^1.4.601:
-  version "1.4.616"
-  resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.616.tgz#4bddbc2c76e1e9dbf449ecd5da3d8119826ea4fb"
-  integrity sha512-1n7zWYh8eS0L9Uy+GskE0lkBUNK83cXTVJI0pU3mGprFsbfSdAc15VTFbo+A+Bq4pwstmL30AVcEU3Fo463lNg==
+eastasianwidth@^0.2.0:
+  version "0.2.0"
+  resolved "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz"
+  integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==
+
+emoji-regex@^8.0.0:
+  version "8.0.0"
+  resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz"
+  integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==
 
-entities@^4.2.0, entities@^4.4.0:
+emoji-regex@^9.2.2:
+  version "9.2.2"
+  resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz"
+  integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==
+
+entities@^4.4.0:
   version "4.5.0"
-  resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48"
+  resolved "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz"
   integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==
 
 entities@~2.1.0:
   version "2.1.0"
-  resolved "https://registry.yarnpkg.com/entities/-/entities-2.1.0.tgz#992d3129cf7df6870b96c57858c249a120f8b8b5"
+  resolved "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz"
   integrity sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==
 
 err-code@^3.0.1:
   version "3.0.1"
-  resolved "https://registry.yarnpkg.com/err-code/-/err-code-3.0.1.tgz#a444c7b992705f2b120ee320b09972eef331c920"
+  resolved "https://registry.npmjs.org/err-code/-/err-code-3.0.1.tgz"
   integrity sha512-GiaH0KJUewYok+eeY05IIgjtAe4Yltygk9Wqp1V5yVWLdhf0hYZchRjNIT9bb0mSwRcIusT3cx7PJUf3zEIfUA==
 
-esbuild@^0.19.2, esbuild@^0.19.7:
-  version "0.19.10"
-  resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.19.10.tgz#55e83e4a6b702e3498b9f872d84bfb4ebcb6d16e"
-  integrity sha512-S1Y27QGt/snkNYrRcswgRFqZjaTG5a5xM3EQo97uNBnH505pdzSNe/HLBq1v0RO7iK/ngdbhJB6mDAp0OK+iUA==
+esbuild@^0.19.2, esbuild@>=0.17:
+  version "0.19.12"
+  resolved "https://registry.npmjs.org/esbuild/-/esbuild-0.19.12.tgz"
+  integrity sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==
   optionalDependencies:
-    "@esbuild/aix-ppc64" "0.19.10"
-    "@esbuild/android-arm" "0.19.10"
-    "@esbuild/android-arm64" "0.19.10"
-    "@esbuild/android-x64" "0.19.10"
-    "@esbuild/darwin-arm64" "0.19.10"
-    "@esbuild/darwin-x64" "0.19.10"
-    "@esbuild/freebsd-arm64" "0.19.10"
-    "@esbuild/freebsd-x64" "0.19.10"
-    "@esbuild/linux-arm" "0.19.10"
-    "@esbuild/linux-arm64" "0.19.10"
-    "@esbuild/linux-ia32" "0.19.10"
-    "@esbuild/linux-loong64" "0.19.10"
-    "@esbuild/linux-mips64el" "0.19.10"
-    "@esbuild/linux-ppc64" "0.19.10"
-    "@esbuild/linux-riscv64" "0.19.10"
-    "@esbuild/linux-s390x" "0.19.10"
-    "@esbuild/linux-x64" "0.19.10"
-    "@esbuild/netbsd-x64" "0.19.10"
-    "@esbuild/openbsd-x64" "0.19.10"
-    "@esbuild/sunos-x64" "0.19.10"
-    "@esbuild/win32-arm64" "0.19.10"
-    "@esbuild/win32-ia32" "0.19.10"
-    "@esbuild/win32-x64" "0.19.10"
-
-escalade@^3.1.1:
-  version "3.1.1"
-  resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40"
-  integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==
-
-escape-string-regexp@^1.0.5:
-  version "1.0.5"
-  resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
-  integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==
+    "@esbuild/aix-ppc64" "0.19.12"
+    "@esbuild/android-arm" "0.19.12"
+    "@esbuild/android-arm64" "0.19.12"
+    "@esbuild/android-x64" "0.19.12"
+    "@esbuild/darwin-arm64" "0.19.12"
+    "@esbuild/darwin-x64" "0.19.12"
+    "@esbuild/freebsd-arm64" "0.19.12"
+    "@esbuild/freebsd-x64" "0.19.12"
+    "@esbuild/linux-arm" "0.19.12"
+    "@esbuild/linux-arm64" "0.19.12"
+    "@esbuild/linux-ia32" "0.19.12"
+    "@esbuild/linux-loong64" "0.19.12"
+    "@esbuild/linux-mips64el" "0.19.12"
+    "@esbuild/linux-ppc64" "0.19.12"
+    "@esbuild/linux-riscv64" "0.19.12"
+    "@esbuild/linux-s390x" "0.19.12"
+    "@esbuild/linux-x64" "0.19.12"
+    "@esbuild/netbsd-x64" "0.19.12"
+    "@esbuild/openbsd-x64" "0.19.12"
+    "@esbuild/sunos-x64" "0.19.12"
+    "@esbuild/win32-arm64" "0.19.12"
+    "@esbuild/win32-ia32" "0.19.12"
+    "@esbuild/win32-x64" "0.19.12"
 
 escape-string-regexp@^2.0.0:
   version "2.0.0"
-  resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344"
+  resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz"
   integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==
 
-estree-walker@^2.0.2:
-  version "2.0.2"
-  resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-2.0.2.tgz#52f010178c2a4c117a7757cfe942adb7d2da4cac"
-  integrity sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==
-
 eventemitter3@^5.0.1:
   version "5.0.1"
-  resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-5.0.1.tgz#53f5ffd0a492ac800721bb42c66b841de96423c4"
+  resolved "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz"
   integrity sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==
 
-events@3.3.0, events@^3.3.0:
+events@^3.3.0, events@3.3.0:
   version "3.3.0"
-  resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400"
+  resolved "https://registry.npmjs.org/events/-/events-3.3.0.tgz"
   integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==
 
-fast-glob@^3.3.0:
+execa@^5.0.0:
+  version "5.1.1"
+  resolved "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz"
+  integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==
+  dependencies:
+    cross-spawn "^7.0.3"
+    get-stream "^6.0.0"
+    human-signals "^2.1.0"
+    is-stream "^2.0.0"
+    merge-stream "^2.0.0"
+    npm-run-path "^4.0.1"
+    onetime "^5.1.2"
+    signal-exit "^3.0.3"
+    strip-final-newline "^2.0.0"
+
+fast-glob@^3.2.9:
   version "3.3.2"
-  resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.2.tgz#a904501e57cfdd2ffcded45e99a54fef55e46129"
+  resolved "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz"
   integrity sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==
   dependencies:
     "@nodelib/fs.stat" "^2.0.2"
@@ -2262,159 +1797,122 @@ fast-glob@^3.3.0:
 
 fast-write-atomic@^0.2.0:
   version "0.2.1"
-  resolved "https://registry.yarnpkg.com/fast-write-atomic/-/fast-write-atomic-0.2.1.tgz#7ee8ef0ce3c1f531043c09ae8e5143361ab17ede"
+  resolved "https://registry.npmjs.org/fast-write-atomic/-/fast-write-atomic-0.2.1.tgz"
   integrity sha512-WvJe06IfNYlr+6cO3uQkdKdy3Cb1LlCJSF8zRs2eT8yuhdbSlR9nIt+TgQ92RUxiRrQm+/S7RARnMfCs5iuAjw==
 
 fast-xml-parser@4.2.5:
   version "4.2.5"
-  resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz#a6747a09296a6cb34f2ae634019bf1738f3b421f"
+  resolved "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz"
   integrity sha512-B9/wizE4WngqQftFPmdaMYlXoJlJOYxGQOanC77fq9k8+Z0v5dDSVh+3glErdIROP//s/jgb7ZuxKfB8nVyo0g==
   dependencies:
     strnum "^1.0.5"
 
 fastq@^1.6.0:
-  version "1.16.0"
-  resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.16.0.tgz#83b9a9375692db77a822df081edb6a9cf6839320"
-  integrity sha512-ifCoaXsDrsdkWTtiNJX5uzHDsrck5TzfKKDcuFFTIrrc/BS076qgEIfoIy1VeZqViznfKiysPYTh/QeHtnIsYA==
+  version "1.17.0"
+  resolved "https://registry.npmjs.org/fastq/-/fastq-1.17.0.tgz"
+  integrity sha512-zGygtijUMT7jnk3h26kUms3BkSDp4IfIKjmnqI2tvx6nuBfiF1UqOxbnLfzdv+apBy+53oaImsKtMw/xYbW+1w==
   dependencies:
     reusify "^1.0.4"
 
 fill-range@^7.0.1:
   version "7.0.1"
-  resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40"
+  resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz"
   integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
   dependencies:
     to-regex-range "^5.0.1"
 
 follow-redirects@^1.15.0:
   version "1.15.3"
-  resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.3.tgz#fe2f3ef2690afce7e82ed0b44db08165b207123a"
+  resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz"
   integrity sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==
 
+foreground-child@^3.1.0:
+  version "3.1.1"
+  resolved "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz"
+  integrity sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==
+  dependencies:
+    cross-spawn "^7.0.0"
+    signal-exit "^4.0.1"
+
 form-data@^4.0.0:
   version "4.0.0"
-  resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452"
+  resolved "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz"
   integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==
   dependencies:
     asynckit "^0.4.0"
     combined-stream "^1.0.8"
     mime-types "^2.1.12"
 
-fraction.js@^4.3.6:
-  version "4.3.7"
-  resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.7.tgz#06ca0085157e42fda7f9e726e79fefc4068840f7"
-  integrity sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==
-
 fs-extra@^10.1.0:
   version "10.1.0"
-  resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf"
+  resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz"
   integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==
   dependencies:
     graceful-fs "^4.2.0"
     jsonfile "^6.0.1"
     universalify "^2.0.0"
 
-fs-extra@^11.1.1:
-  version "11.2.0"
-  resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-11.2.0.tgz#e70e17dfad64232287d01929399e0ea7c86b0e5b"
-  integrity sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==
-  dependencies:
-    graceful-fs "^4.2.0"
-    jsonfile "^6.0.1"
-    universalify "^2.0.0"
-
-fs.realpath@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
-  integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==
-
 fsevents@~2.3.2:
   version "2.3.3"
-  resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6"
+  resolved "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz"
   integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==
 
-function-bind@^1.1.2:
-  version "1.1.2"
-  resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c"
-  integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==
-
-gensync@^1.0.0-beta.2:
-  version "1.0.0-beta.2"
-  resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0"
-  integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==
-
 get-iterator@^2.0.0:
   version "2.0.1"
-  resolved "https://registry.yarnpkg.com/get-iterator/-/get-iterator-2.0.1.tgz#a904829f61bace789e0d64bd1a504c511a015c3f"
+  resolved "https://registry.npmjs.org/get-iterator/-/get-iterator-2.0.1.tgz"
   integrity sha512-7HuY/hebu4gryTDT7O/XY/fvY9wRByEGdK6QOa4of8npTcv0+NS6frFKABcf6S9EBAsveTuKTsZQQBFMMNILIg==
 
-glob-parent@^5.1.2:
+get-stream@^6.0.0:
+  version "6.0.1"
+  resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz"
+  integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==
+
+glob-parent@^5.1.2, glob-parent@~5.1.2:
   version "5.1.2"
-  resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
+  resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz"
   integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==
   dependencies:
     is-glob "^4.0.1"
 
-glob@^8.0.3:
-  version "8.1.0"
-  resolved "https://registry.yarnpkg.com/glob/-/glob-8.1.0.tgz#d388f656593ef708ee3e34640fdfb99a9fd1c33e"
-  integrity sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==
+glob@^10.3.10:
+  version "10.3.10"
+  resolved "https://registry.npmjs.org/glob/-/glob-10.3.10.tgz"
+  integrity sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g==
   dependencies:
-    fs.realpath "^1.0.0"
-    inflight "^1.0.4"
-    inherits "2"
-    minimatch "^5.0.1"
-    once "^1.3.0"
+    foreground-child "^3.1.0"
+    jackspeak "^2.3.5"
+    minimatch "^9.0.1"
+    minipass "^5.0.0 || ^6.0.2 || ^7.0.0"
+    path-scurry "^1.10.1"
 
-globals@^11.1.0:
-  version "11.12.0"
-  resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e"
-  integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==
-
-globby@^13.2.2:
-  version "13.2.2"
-  resolved "https://registry.yarnpkg.com/globby/-/globby-13.2.2.tgz#63b90b1bf68619c2135475cbd4e71e66aa090592"
-  integrity sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w==
+globby@^11.0.3:
+  version "11.1.0"
+  resolved "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz"
+  integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==
   dependencies:
+    array-union "^2.1.0"
     dir-glob "^3.0.1"
-    fast-glob "^3.3.0"
-    ignore "^5.2.4"
+    fast-glob "^3.2.9"
+    ignore "^5.2.0"
     merge2 "^1.4.1"
-    slash "^4.0.0"
+    slash "^3.0.0"
 
 graceful-fs@^4.1.11, graceful-fs@^4.1.6, graceful-fs@^4.1.9, graceful-fs@^4.2.0:
   version "4.2.11"
-  resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3"
+  resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz"
   integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==
 
 hamt-sharding@^3.0.0, hamt-sharding@^3.0.2:
   version "3.0.2"
-  resolved "https://registry.yarnpkg.com/hamt-sharding/-/hamt-sharding-3.0.2.tgz#a3fba1e4e6b58469388a0e1458768c78c0cd95f6"
+  resolved "https://registry.npmjs.org/hamt-sharding/-/hamt-sharding-3.0.2.tgz"
   integrity sha512-f0DzBD2tSmLFdFsLAvOflIBqFPjerbA7BfmwO8mVho/5hXwgyyYhv+ijIzidQf/DpDX3bRjAQvhGoBFj+DBvPw==
   dependencies:
     sparse-array "^1.3.1"
     uint8arrays "^4.0.2"
 
-has-flag@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
-  integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==
-
-hasown@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.0.tgz#f4c513d454a57b7c7e1650778de226b11700546c"
-  integrity sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==
-  dependencies:
-    function-bind "^1.1.2"
-
-hookable@^5.5.3:
-  version "5.5.3"
-  resolved "https://registry.yarnpkg.com/hookable/-/hookable-5.5.3.tgz#6cfc358984a1ef991e2518cb9ed4a778bbd3215d"
-  integrity sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==
-
 html-minifier-terser@^7.2.0:
   version "7.2.0"
-  resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-7.2.0.tgz#18752e23a2f0ed4b0f550f217bb41693e975b942"
+  resolved "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-7.2.0.tgz"
   integrity sha512-tXgn3QfqPIpGl9o+K5tpcj3/MN4SfLtsx2GWwBC3SSd0tXQGyF3gsSqad8loJgKZGM3ZxbYDd5yhiBIdWpmvLA==
   dependencies:
     camel-case "^4.1.2"
@@ -2425,32 +1923,29 @@ html-minifier-terser@^7.2.0:
     relateurl "^0.2.7"
     terser "^5.15.1"
 
+human-signals@^2.1.0:
+  version "2.1.0"
+  resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz"
+  integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==
+
 ieee754@^1.1.4, ieee754@^1.2.1:
   version "1.2.1"
-  resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352"
+  resolved "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz"
   integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==
 
-ignore@^5.2.4:
-  version "5.3.0"
-  resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.0.tgz#67418ae40d34d6999c95ff56016759c718c82f78"
-  integrity sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==
+ignore@^5.2.0:
+  version "5.3.1"
+  resolved "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz"
+  integrity sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==
 
-inflight@^1.0.4:
-  version "1.0.6"
-  resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
-  integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==
-  dependencies:
-    once "^1.3.0"
-    wrappy "1"
-
-inherits@2, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.4:
+inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.4:
   version "2.0.4"
-  resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
+  resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz"
   integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
 
 interface-blockstore@^5.0.0:
   version "5.2.7"
-  resolved "https://registry.yarnpkg.com/interface-blockstore/-/interface-blockstore-5.2.7.tgz#ad27ad6bd86ddcb0b749abe3e772bb8680302e9b"
+  resolved "https://registry.npmjs.org/interface-blockstore/-/interface-blockstore-5.2.7.tgz"
   integrity sha512-B9UplmgUdQg15f/6xDJEbQYcjMm568cnqJsxSZYbDD0s6eQX5gKh58sd9H3aJEMosIy8T4vz9MwWWZuAOc3hQQ==
   dependencies:
     interface-store "^5.0.0"
@@ -2458,7 +1953,7 @@ interface-blockstore@^5.0.0:
 
 interface-datastore@^8.0.0, interface-datastore@^8.2.0:
   version "8.2.9"
-  resolved "https://registry.yarnpkg.com/interface-datastore/-/interface-datastore-8.2.9.tgz#caa6d6157b229abbc3df23fca6bc11ad391b839c"
+  resolved "https://registry.npmjs.org/interface-datastore/-/interface-datastore-8.2.9.tgz"
   integrity sha512-J/8PN8TnB5xxCRtgu9Vx3zExdOzcTU5/DBF2dlU41deX1GW6/SPpbJo5DRNSnvzfjmwJ7YhUOIFXyccUp8nuAA==
   dependencies:
     interface-store "^5.0.0"
@@ -2466,12 +1961,12 @@ interface-datastore@^8.0.0, interface-datastore@^8.2.0:
 
 interface-store@^5.0.0, interface-store@^5.0.1, interface-store@^5.1.0:
   version "5.1.5"
-  resolved "https://registry.yarnpkg.com/interface-store/-/interface-store-5.1.5.tgz#0c8c4001624d728789e43d01f977689705ac136f"
+  resolved "https://registry.npmjs.org/interface-store/-/interface-store-5.1.5.tgz"
   integrity sha512-X0KnJBk3o+YL13MxZBMwa88/b3Mdrpm0yPzkSTKDDVn9BSPH7UK6W+ZtIPO2bxKOQVmq7zqOwAnYnpfqWjb6/g==
 
 ipfs-bitswap@^19.0.0:
   version "19.0.2"
-  resolved "https://registry.yarnpkg.com/ipfs-bitswap/-/ipfs-bitswap-19.0.2.tgz#906bd9056e2483b8ae1a42b21f44ce9f50f3f11b"
+  resolved "https://registry.npmjs.org/ipfs-bitswap/-/ipfs-bitswap-19.0.2.tgz"
   integrity sha512-pm0EcnTAwMMkCmdXHw/a7uPXzQ4I/pxVFiQZ6Ebg/R64XxAky/bCOJRzmqsgqH0+prH2bXAOgzS0mOZdL+zFSw==
   dependencies:
     "@libp2p/interface" "^0.1.1"
@@ -2500,7 +1995,7 @@ ipfs-bitswap@^19.0.0:
 
 ipfs-unixfs-exporter@^13.1.0:
   version "13.2.2"
-  resolved "https://registry.yarnpkg.com/ipfs-unixfs-exporter/-/ipfs-unixfs-exporter-13.2.2.tgz#532ba28e7d716253ab0d937fb52b317b1fc4fd2e"
+  resolved "https://registry.npmjs.org/ipfs-unixfs-exporter/-/ipfs-unixfs-exporter-13.2.2.tgz"
   integrity sha512-poCxSte+SdQzuPc/Sm+gx/86VJu+IEsW6/Cfkq29yEUZDG8QuCvTkvuqAysKAYuN40aR9SjYqwYFRW/hsvspSw==
   dependencies:
     "@ipld/dag-cbor" "^9.0.0"
@@ -2523,7 +2018,7 @@ ipfs-unixfs-exporter@^13.1.0:
 
 ipfs-unixfs-importer@^15.1.0:
   version "15.2.1"
-  resolved "https://registry.yarnpkg.com/ipfs-unixfs-importer/-/ipfs-unixfs-importer-15.2.1.tgz#230c8d99c18f44588660d5bf061f990f772f161c"
+  resolved "https://registry.npmjs.org/ipfs-unixfs-importer/-/ipfs-unixfs-importer-15.2.1.tgz"
   integrity sha512-9ArBh7Xfz8gUSe8pq9c9ilBOXd1bbT3L+4xnI6w/usWLwnNT14p8WbFZjDD0MO1/PrD0PTUZuHNDS2l4EO+wPg==
   dependencies:
     "@ipld/dag-pb" "^4.0.0"
@@ -2545,110 +2040,106 @@ ipfs-unixfs-importer@^15.1.0:
 
 ipfs-unixfs@^11.0.0:
   version "11.1.0"
-  resolved "https://registry.yarnpkg.com/ipfs-unixfs/-/ipfs-unixfs-11.1.0.tgz#f8e3e0b52182b74e56327bd2952f5f9ac936f5fa"
+  resolved "https://registry.npmjs.org/ipfs-unixfs/-/ipfs-unixfs-11.1.0.tgz"
   integrity sha512-Lq37nKLJOpRFjx3rcg3y+ZwUxBX7jluKfIt5UPp6wb1L3dP0sj1yaLR0Yg2CdGYvHWyUpZD1iTnT8upL0ToDOw==
   dependencies:
     err-code "^3.0.1"
     protons-runtime "^5.0.0"
     uint8arraylist "^2.4.3"
 
-is-builtin-module@^3.2.1:
-  version "3.2.1"
-  resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-3.2.1.tgz#f03271717d8654cfcaf07ab0463faa3571581169"
-  integrity sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==
-  dependencies:
-    builtin-modules "^3.3.0"
-
-is-core-module@^2.13.0:
-  version "2.13.1"
-  resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384"
-  integrity sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==
+is-binary-path@~2.1.0:
+  version "2.1.0"
+  resolved "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz"
+  integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==
   dependencies:
-    hasown "^2.0.0"
+    binary-extensions "^2.0.0"
 
 is-extglob@^2.1.1:
   version "2.1.1"
-  resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
+  resolved "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz"
   integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==
 
-is-glob@^4.0.1:
+is-fullwidth-code-point@^3.0.0:
+  version "3.0.0"
+  resolved "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz"
+  integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==
+
+is-glob@^4.0.1, is-glob@~4.0.1:
   version "4.0.3"
-  resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084"
+  resolved "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz"
   integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==
   dependencies:
     is-extglob "^2.1.1"
 
-is-module@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591"
-  integrity sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==
-
 is-number@^7.0.0:
   version "7.0.0"
-  resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
+  resolved "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz"
   integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
 
 is-plain-obj@^2.1.0:
   version "2.1.0"
-  resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287"
+  resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz"
   integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==
 
-is-reference@1.2.1:
-  version "1.2.1"
-  resolved "https://registry.yarnpkg.com/is-reference/-/is-reference-1.2.1.tgz#8b2dac0b371f4bc994fdeaba9eb542d03002d0b7"
-  integrity sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==
-  dependencies:
-    "@types/estree" "*"
+is-stream@^2.0.0:
+  version "2.0.1"
+  resolved "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz"
+  integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==
+
+isexe@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz"
+  integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==
 
 it-all@^3.0.2:
   version "3.0.4"
-  resolved "https://registry.yarnpkg.com/it-all/-/it-all-3.0.4.tgz#08f2e3eb3df04fa4525a343dcacfbdf91ffee162"
+  resolved "https://registry.npmjs.org/it-all/-/it-all-3.0.4.tgz"
   integrity sha512-UMiy0i9DqCHBdWvMbzdYvVGa5/w4t1cc4nchpbnjdLhklglv8mQeEYnii0gvKESJuL1zV32Cqdb33R6/GPfxpQ==
 
 it-batch@^3.0.0, it-batch@^3.0.2:
   version "3.0.4"
-  resolved "https://registry.yarnpkg.com/it-batch/-/it-batch-3.0.4.tgz#98fd1fb69fc9d99e4a2cc0b46f9ed5976d8289b2"
+  resolved "https://registry.npmjs.org/it-batch/-/it-batch-3.0.4.tgz"
   integrity sha512-WRu2mqOYIs+T9k7+yxSK9VJdk0UE4R0jKQsWQcti5c6vhb1FhjC2+yCB5XBrctQ9edNfCMU/wVzdDj8qSwimbA==
 
 it-drain@^3.0.1:
   version "3.0.5"
-  resolved "https://registry.yarnpkg.com/it-drain/-/it-drain-3.0.5.tgz#d7aed18a16a12c157fa477653fb42c1b4f08491c"
+  resolved "https://registry.npmjs.org/it-drain/-/it-drain-3.0.5.tgz"
   integrity sha512-qYFe4SWdvs9oJGUY5bSjvmiLUMLzFEODNOQUdYdCIkuIgQF+AUB2INhM4yQ09buJ2rhHKDFxvTD/+yUq6qg0XA==
 
 it-filter@^3.0.0, it-filter@^3.0.2:
   version "3.0.4"
-  resolved "https://registry.yarnpkg.com/it-filter/-/it-filter-3.0.4.tgz#f8af5919ca7fc72f718edb3e7c0d71581aa149c6"
+  resolved "https://registry.npmjs.org/it-filter/-/it-filter-3.0.4.tgz"
   integrity sha512-e0sz+st4sudK/zH6GZ/gRTRP8A/ADuJFCYDmRgMbZvR79y5+v4ZXav850bBZk5wL9zXaYZFxS1v/6Qi+Vjwh5g==
   dependencies:
     it-peekable "^3.0.0"
 
 it-first@^3.0.2:
   version "3.0.4"
-  resolved "https://registry.yarnpkg.com/it-first/-/it-first-3.0.4.tgz#d68c8ae646ea402cd5e650c352da69988a310342"
+  resolved "https://registry.npmjs.org/it-first/-/it-first-3.0.4.tgz"
   integrity sha512-FtQl84iTNxN5EItP/JgL28V2rzNMkCzTUlNoj41eVdfix2z1DBuLnBqZ0hzYhGGa1rMpbQf0M7CQSA2adlrLJg==
 
 it-foreach@^2.0.2:
   version "2.0.6"
-  resolved "https://registry.yarnpkg.com/it-foreach/-/it-foreach-2.0.6.tgz#6e753be551168d746236ca214d19961729470eee"
+  resolved "https://registry.npmjs.org/it-foreach/-/it-foreach-2.0.6.tgz"
   integrity sha512-OVosBHJsdXpAyeFlCbe3IGZia+65UykyAznakNsKXK+b99dbhuu/mOnXxTadDEo1GWhKx+WA8RNanKkMf07zQw==
   dependencies:
     it-peekable "^3.0.0"
 
 it-glob@^2.0.1, it-glob@^2.0.4:
   version "2.0.6"
-  resolved "https://registry.yarnpkg.com/it-glob/-/it-glob-2.0.6.tgz#616a5b008aa456e82be9a29b8c7dd4dc8ef81acb"
+  resolved "https://registry.npmjs.org/it-glob/-/it-glob-2.0.6.tgz"
   integrity sha512-4C6ccz4nhqrq7yZMzBr3MsKhyL+rlnLXIPceyGG6ogl3Lx3eeWMv1RtlySJwFi6q+jVcPyTpeYt/xftwI2JEQQ==
   dependencies:
     minimatch "^9.0.0"
 
 it-last@^3.0.1, it-last@^3.0.2:
   version "3.0.4"
-  resolved "https://registry.yarnpkg.com/it-last/-/it-last-3.0.4.tgz#2b107f8032329bd896d2555abd9fc23c304695e8"
+  resolved "https://registry.npmjs.org/it-last/-/it-last-3.0.4.tgz"
   integrity sha512-Ns+KTsQWhs0KCvfv5X3Ck3lpoYxHcp4zUp4d+AOdmC8cXXqDuoZqAjfWhgCbxJubXyIYWdfE2nRcfWqgvZHP8Q==
 
 it-length-prefixed@^9.0.0:
   version "9.0.3"
-  resolved "https://registry.yarnpkg.com/it-length-prefixed/-/it-length-prefixed-9.0.3.tgz#73af16f786cab60a0a9bfc2997e88eb26d3a72ca"
+  resolved "https://registry.npmjs.org/it-length-prefixed/-/it-length-prefixed-9.0.3.tgz"
   integrity sha512-YAu424ceYpXctxtjcLOqn7vJq082CaoP8J646ZusYISfQc3bpzQErgTUqMFj81V262KG2W9/YMBHsy6A/4yvmg==
   dependencies:
     err-code "^3.0.1"
@@ -2660,40 +2151,40 @@ it-length-prefixed@^9.0.0:
 
 it-map@^3.0.1, it-map@^3.0.2, it-map@^3.0.3:
   version "3.0.5"
-  resolved "https://registry.yarnpkg.com/it-map/-/it-map-3.0.5.tgz#30b1e1324cdb4aaadba29cd989485168d1dc4136"
+  resolved "https://registry.npmjs.org/it-map/-/it-map-3.0.5.tgz"
   integrity sha512-hB0TDXo/h4KSJJDSRLgAPmDroiXP6Fx1ck4Bzl3US9hHfZweTKsuiP0y4gXuTMcJlS6vj0bb+f70rhkD47ZA3w==
   dependencies:
     it-peekable "^3.0.0"
 
 it-merge@^3.0.0, it-merge@^3.0.1:
   version "3.0.3"
-  resolved "https://registry.yarnpkg.com/it-merge/-/it-merge-3.0.3.tgz#c7d407c8e0473accf7f9958ce2e0f60276002e84"
+  resolved "https://registry.npmjs.org/it-merge/-/it-merge-3.0.3.tgz"
   integrity sha512-FYVU15KC5pb/GQX1Ims+lee8d4pdqGVCpWr0lkNj8o4xuNo7jY71k6GuEiWdP+T7W1bJqewSxX5yoTy5yZpRVA==
   dependencies:
     it-pushable "^3.2.0"
 
 it-parallel-batch@^3.0.0, it-parallel-batch@^3.0.1:
   version "3.0.4"
-  resolved "https://registry.yarnpkg.com/it-parallel-batch/-/it-parallel-batch-3.0.4.tgz#d009e2654b879b11f26fc0803fab4f4e5c1a0594"
+  resolved "https://registry.npmjs.org/it-parallel-batch/-/it-parallel-batch-3.0.4.tgz"
   integrity sha512-O1omh8ss8+UtXiMjE+8kM5C20DT0Ma4VtKVfrSHOJU0UHZ+iWBXarabzPYEp+WiuQmrv+klDPPlTZ9KaLN9xOA==
   dependencies:
     it-batch "^3.0.0"
 
 it-parallel@^3.0.0:
   version "3.0.6"
-  resolved "https://registry.yarnpkg.com/it-parallel/-/it-parallel-3.0.6.tgz#d8f9efa56dac5f960545b3a148d2ca171694d228"
+  resolved "https://registry.npmjs.org/it-parallel/-/it-parallel-3.0.6.tgz"
   integrity sha512-i7UM7I9LTkDJw3YIqXHFAPZX6CWYzGc+X3irdNrVExI4vPazrJdI7t5OqrSVN8CONXLAunCiqaSV/zZRbQR56A==
   dependencies:
     p-defer "^4.0.0"
 
 it-peekable@^3.0.0:
   version "3.0.3"
-  resolved "https://registry.yarnpkg.com/it-peekable/-/it-peekable-3.0.3.tgz#5f5741f34f3acd5735804f40d198652c54a3d8c1"
+  resolved "https://registry.npmjs.org/it-peekable/-/it-peekable-3.0.3.tgz"
   integrity sha512-Wx21JX/rMzTEl9flx3DGHuPV1KQFGOl8uoKfQtmZHgPQtGb89eQ6RyVd82h3HuP9Ghpt0WgBDlmmdWeHXqyx7w==
 
 it-pipe@^3.0.1:
   version "3.0.1"
-  resolved "https://registry.yarnpkg.com/it-pipe/-/it-pipe-3.0.1.tgz#b25720df82f4c558a8532602b5fbc37bbe4e7ba5"
+  resolved "https://registry.npmjs.org/it-pipe/-/it-pipe-3.0.1.tgz"
   integrity sha512-sIoNrQl1qSRg2seYSBH/3QxWhJFn9PKYvOf/bHdtCBF0bnghey44VyASsWzn5dAx0DCDDABq1hZIuzKmtBZmKA==
   dependencies:
     it-merge "^3.0.0"
@@ -2702,14 +2193,14 @@ it-pipe@^3.0.1:
 
 it-pushable@^3.0.0, it-pushable@^3.1.0, it-pushable@^3.1.2, it-pushable@^3.2.0, it-pushable@^3.2.1:
   version "3.2.3"
-  resolved "https://registry.yarnpkg.com/it-pushable/-/it-pushable-3.2.3.tgz#e2b80aed90cfbcd54b620c0a0785e546d4e5f334"
+  resolved "https://registry.npmjs.org/it-pushable/-/it-pushable-3.2.3.tgz"
   integrity sha512-gzYnXYK8Y5t5b/BnJUr7glfQLO4U5vyb05gPx/TyTw+4Bv1zM9gFk4YsOrnulWefMewlphCjKkakFvj1y99Tcg==
   dependencies:
     p-defer "^4.0.0"
 
 it-reader@^6.0.1:
   version "6.0.4"
-  resolved "https://registry.yarnpkg.com/it-reader/-/it-reader-6.0.4.tgz#439cb88225dcd15116be0ffde9e846a928c3871a"
+  resolved "https://registry.npmjs.org/it-reader/-/it-reader-6.0.4.tgz"
   integrity sha512-XCWifEcNFFjjBHtor4Sfaj8rcpt+FkY0L6WdhD578SCDhV4VUm7fCkF3dv5a+fTcfQqvN9BsxBTvWbYO6iCjTg==
   dependencies:
     it-stream-types "^2.0.1"
@@ -2717,34 +2208,38 @@ it-reader@^6.0.1:
 
 it-stream-types@^2.0.1:
   version "2.0.1"
-  resolved "https://registry.yarnpkg.com/it-stream-types/-/it-stream-types-2.0.1.tgz#69cb4d7e79e707b8257a8997e02751ccb6c3af32"
+  resolved "https://registry.npmjs.org/it-stream-types/-/it-stream-types-2.0.1.tgz"
   integrity sha512-6DmOs5r7ERDbvS4q8yLKENcj6Yecr7QQTqWApbZdfAUTEC947d+PEha7PCqhm//9oxaLYL7TWRekwhoXl2s6fg==
 
 it-take@^3.0.1:
   version "3.0.4"
-  resolved "https://registry.yarnpkg.com/it-take/-/it-take-3.0.4.tgz#a1614d6ee03f1bee9af89255897de3e249e49d1d"
+  resolved "https://registry.npmjs.org/it-take/-/it-take-3.0.4.tgz"
   integrity sha512-RG8HDjAZlvkzz5Nav4xq6gK5zNT+Ff1UTIf+CrSJW8nIl6N1FpBH5e7clUshiCn+MmmMoSdIEpw4UaTolszxhA==
 
-jiti@^1.19.1, jiti@^1.19.3, jiti@^1.21.0:
-  version "1.21.0"
-  resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.0.tgz#7c97f8fe045724e136a397f7340475244156105d"
-  integrity sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q==
+jackspeak@^2.3.5:
+  version "2.3.6"
+  resolved "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz"
+  integrity sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==
+  dependencies:
+    "@isaacs/cliui" "^8.0.2"
+  optionalDependencies:
+    "@pkgjs/parseargs" "^0.11.0"
 
-js-tokens@^4.0.0:
-  version "4.0.0"
-  resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
-  integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
+joycon@^3.0.1:
+  version "3.1.1"
+  resolved "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz"
+  integrity sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==
 
 js2xmlparser@^4.0.2:
   version "4.0.2"
-  resolved "https://registry.yarnpkg.com/js2xmlparser/-/js2xmlparser-4.0.2.tgz#2a1fdf01e90585ef2ae872a01bc169c6a8d5e60a"
+  resolved "https://registry.npmjs.org/js2xmlparser/-/js2xmlparser-4.0.2.tgz"
   integrity sha512-6n4D8gLlLf1n5mNLQPRfViYzu9RATblzPEtm1SthMX1Pjao0r9YI9nw7ZIfRxQMERS87mcswrg+r/OYrPRX6jA==
   dependencies:
     xmlcreate "^2.0.4"
 
-jsdoc@4.0.2:
+"jsdoc@>=3.x <=4.x", jsdoc@4.0.2:
   version "4.0.2"
-  resolved "https://registry.yarnpkg.com/jsdoc/-/jsdoc-4.0.2.tgz#a1273beba964cf433ddf7a70c23fd02c3c60296e"
+  resolved "https://registry.npmjs.org/jsdoc/-/jsdoc-4.0.2.tgz"
   integrity sha512-e8cIg2z62InH7azBBi3EsSEqrKx+nUtAS5bBcYTSpZFA+vhNPyhv8PTFZ0WsjOPDj04/dOLlm08EDcQJDqaGQg==
   dependencies:
     "@babel/parser" "^7.20.15"
@@ -2763,24 +2258,9 @@ jsdoc@4.0.2:
     strip-json-comments "^3.1.0"
     underscore "~1.13.2"
 
-jsesc@^2.5.1:
-  version "2.5.2"
-  resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4"
-  integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==
-
-json5@^2.2.3:
-  version "2.2.3"
-  resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283"
-  integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==
-
-jsonc-parser@^3.2.0:
-  version "3.2.0"
-  resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.2.0.tgz#31ff3f4c2b9793f89c67212627c51c6394f88e76"
-  integrity sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==
-
 jsonfile@^6.0.1:
   version "6.1.0"
-  resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae"
+  resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz"
   integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==
   dependencies:
     universalify "^2.0.0"
@@ -2789,79 +2269,75 @@ jsonfile@^6.0.1:
 
 just-debounce-it@^3.0.1:
   version "3.2.0"
-  resolved "https://registry.yarnpkg.com/just-debounce-it/-/just-debounce-it-3.2.0.tgz#4352265f4af44188624ce9fdbc6bff4d49c63a80"
+  resolved "https://registry.npmjs.org/just-debounce-it/-/just-debounce-it-3.2.0.tgz"
   integrity sha512-WXzwLL0745uNuedrCsCs3rpmfD6DBaf7uuVwaq98/8dafURfgQaBsSpjiPp5+CW6Vjltwy9cOGI6qE71b3T8iQ==
 
 klaw-sync@^6.0.0:
   version "6.0.0"
-  resolved "https://registry.yarnpkg.com/klaw-sync/-/klaw-sync-6.0.0.tgz#1fd2cfd56ebb6250181114f0a581167099c2b28c"
+  resolved "https://registry.npmjs.org/klaw-sync/-/klaw-sync-6.0.0.tgz"
   integrity sha512-nIeuVSzdCCs6TDPTqI8w1Yre34sSq7AkZ4B3sfOBbI2CgVSB4Du4aLQijFU2+lhAFCwt9+42Hel6lQNIv6AntQ==
   dependencies:
     graceful-fs "^4.1.11"
 
 klaw@^3.0.0:
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/klaw/-/klaw-3.0.0.tgz#b11bec9cf2492f06756d6e809ab73a2910259146"
+  resolved "https://registry.npmjs.org/klaw/-/klaw-3.0.0.tgz"
   integrity sha512-0Fo5oir+O9jnXu5EefYbVK+mHMBeEVEy2cmctR1O1NECcCkPRreJKrS6Qt/j3KC2C148Dfo9i3pCmCMsdqGr0g==
   dependencies:
     graceful-fs "^4.1.9"
 
 lilconfig@^3.0.0:
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-3.0.0.tgz#f8067feb033b5b74dab4602a5f5029420be749bc"
+  resolved "https://registry.npmjs.org/lilconfig/-/lilconfig-3.0.0.tgz"
   integrity sha512-K2U4W2Ff5ibV7j7ydLr+zLAkIg5JJ4lPn1Ltsdt+Tz/IjQ8buJ55pZAxoP34lqIiwtF9iAvtLv3JGv7CAyAg+g==
 
+lines-and-columns@^1.1.6:
+  version "1.2.4"
+  resolved "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz"
+  integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==
+
 linkify-it@^3.0.1:
   version "3.0.3"
-  resolved "https://registry.yarnpkg.com/linkify-it/-/linkify-it-3.0.3.tgz#a98baf44ce45a550efb4d49c769d07524cc2fa2e"
+  resolved "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz"
   integrity sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==
   dependencies:
     uc.micro "^1.0.1"
 
-lodash.memoize@^4.1.2:
-  version "4.1.2"
-  resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe"
-  integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==
+load-tsconfig@^0.2.3:
+  version "0.2.5"
+  resolved "https://registry.npmjs.org/load-tsconfig/-/load-tsconfig-0.2.5.tgz"
+  integrity sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==
 
-lodash.uniq@^4.5.0:
-  version "4.5.0"
-  resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
-  integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==
+lodash.sortby@^4.7.0:
+  version "4.7.0"
+  resolved "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz"
+  integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==
 
 lodash@^4.17.15, lodash@^4.17.21:
   version "4.17.21"
-  resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
+  resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz"
   integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
 
 lower-case@^2.0.2:
   version "2.0.2"
-  resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28"
+  resolved "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz"
   integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==
   dependencies:
     tslib "^2.0.3"
 
-lru-cache@^5.1.1:
-  version "5.1.1"
-  resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920"
-  integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==
-  dependencies:
-    yallist "^3.0.2"
-
-magic-string@^0.30.3, magic-string@^0.30.4:
-  version "0.30.5"
-  resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.30.5.tgz#1994d980bd1c8835dc6e78db7cbd4ae4f24746f9"
-  integrity sha512-7xlpfBaQaP/T6Vh8MO/EqXSW5En6INHEvEXQiuff7Gku0PWjU3uf6w/j9o7O+SpB5fOAkrI5HeoNgwjEO0pFsA==
-  dependencies:
-    "@jridgewell/sourcemap-codec" "^1.4.15"
+"lru-cache@^9.1.1 || ^10.0.0":
+  version "10.2.0"
+  resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.0.tgz"
+  integrity sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==
 
 markdown-it-anchor@^8.4.1:
   version "8.6.7"
-  resolved "https://registry.yarnpkg.com/markdown-it-anchor/-/markdown-it-anchor-8.6.7.tgz#ee6926daf3ad1ed5e4e3968b1740eef1c6399634"
+  resolved "https://registry.npmjs.org/markdown-it-anchor/-/markdown-it-anchor-8.6.7.tgz"
   integrity sha512-FlCHFwNnutLgVTflOYHPW2pPcl2AACqVzExlkGQNsi4CJgqOHN7YTgDd4LuhgN1BFO3TS0vLAruV1Td6dwWPJA==
 
-markdown-it@^12.3.2:
+markdown-it@*, markdown-it@^12.3.2:
   version "12.3.2"
-  resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-12.3.2.tgz#bf92ac92283fe983fe4de8ff8abfb5ad72cd0c90"
+  resolved "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz"
   integrity sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==
   dependencies:
     argparse "^2.0.1"
@@ -2872,39 +2348,34 @@ markdown-it@^12.3.2:
 
 marked@^4.0.10:
   version "4.3.0"
-  resolved "https://registry.yarnpkg.com/marked/-/marked-4.3.0.tgz#796362821b019f734054582038b116481b456cf3"
+  resolved "https://registry.npmjs.org/marked/-/marked-4.3.0.tgz"
   integrity sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A==
 
-mdn-data@2.0.28:
-  version "2.0.28"
-  resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.28.tgz#5ec48e7bef120654539069e1ae4ddc81ca490eba"
-  integrity sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==
-
-mdn-data@2.0.30:
-  version "2.0.30"
-  resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.30.tgz#ce4df6f80af6cfbe218ecd5c552ba13c4dfa08cc"
-  integrity sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==
-
 mdurl@^1.0.1:
   version "1.0.1"
-  resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-1.0.1.tgz#fe85b2ec75a59037f2adfec100fd6c601761152e"
+  resolved "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz"
   integrity sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==
 
 merge-options@^3.0.4:
   version "3.0.4"
-  resolved "https://registry.yarnpkg.com/merge-options/-/merge-options-3.0.4.tgz#84709c2aa2a4b24c1981f66c179fe5565cc6dbb7"
+  resolved "https://registry.npmjs.org/merge-options/-/merge-options-3.0.4.tgz"
   integrity sha512-2Sug1+knBjkaMsMgf1ctR1Ujx+Ayku4EdJN4Z+C2+JzoeF7A3OZ9KM2GY0CpQS51NR61LTurMJrRKPhSs3ZRTQ==
   dependencies:
     is-plain-obj "^2.1.0"
 
+merge-stream@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz"
+  integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==
+
 merge2@^1.3.0, merge2@^1.4.1:
   version "1.4.1"
-  resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae"
+  resolved "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz"
   integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==
 
 micromatch@^4.0.4:
   version "4.0.5"
-  resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6"
+  resolved "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz"
   integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==
   dependencies:
     braces "^3.0.2"
@@ -2912,102 +2383,75 @@ micromatch@^4.0.4:
 
 mime-db@1.52.0:
   version "1.52.0"
-  resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
+  resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz"
   integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==
 
 mime-types@^2.1.12:
   version "2.1.35"
-  resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a"
+  resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz"
   integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==
   dependencies:
     mime-db "1.52.0"
 
-minimatch@^5.0.1:
-  version "5.1.6"
-  resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.6.tgz#1cfcb8cf5522ea69952cd2af95ae09477f122a96"
-  integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==
-  dependencies:
-    brace-expansion "^2.0.1"
+mimic-fn@^2.1.0:
+  version "2.1.0"
+  resolved "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz"
+  integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==
 
-minimatch@^9.0.0:
+minimatch@^9.0.0, minimatch@^9.0.1:
   version "9.0.3"
-  resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825"
+  resolved "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz"
   integrity sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==
   dependencies:
     brace-expansion "^2.0.1"
 
 minimist@^1.2.5:
   version "1.2.8"
-  resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c"
+  resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz"
   integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==
 
+"minipass@^5.0.0 || ^6.0.2 || ^7.0.0":
+  version "7.0.4"
+  resolved "https://registry.npmjs.org/minipass/-/minipass-7.0.4.tgz"
+  integrity sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==
+
 mkdirp@^1.0.4:
   version "1.0.4"
-  resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e"
+  resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz"
   integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==
 
-mkdist@^1.3.0:
-  version "1.4.0"
-  resolved "https://registry.yarnpkg.com/mkdist/-/mkdist-1.4.0.tgz#69d937fee4a9636a7eaa1137461ca716bbe59734"
-  integrity sha512-LzzdzWDx6cWWPd8saIoO+kT5jnbijfeDaE6jZfmCYEi3YL2aJSyF23/tCFee/mDuh/ek1UQeSYdLeSa6oesdiw==
-  dependencies:
-    autoprefixer "^10.4.14"
-    citty "^0.1.5"
-    cssnano "^6.0.1"
-    defu "^6.1.3"
-    esbuild "^0.19.7"
-    fs-extra "^11.1.1"
-    globby "^13.2.2"
-    jiti "^1.21.0"
-    mlly "^1.4.2"
-    mri "^1.2.0"
-    pathe "^1.1.1"
-    postcss "^8.4.26"
-    postcss-nested "^6.0.1"
-
-mlly@^1.2.0, mlly@^1.4.0, mlly@^1.4.2:
-  version "1.4.2"
-  resolved "https://registry.yarnpkg.com/mlly/-/mlly-1.4.2.tgz#7cf406aa319ff6563d25da6b36610a93f2a8007e"
-  integrity sha512-i/Ykufi2t1EZ6NaPLdfnZk2AX8cs0d+mTzVKuPfqPKPatxLApaBoxJQ9x1/uckXtrS/U5oisPMDkNs0yQTaBRg==
-  dependencies:
-    acorn "^8.10.0"
-    pathe "^1.1.1"
-    pkg-types "^1.0.3"
-    ufo "^1.3.0"
-
-mri@^1.2.0:
-  version "1.2.0"
-  resolved "https://registry.yarnpkg.com/mri/-/mri-1.2.0.tgz#6721480fec2a11a4889861115a48b6cbe7cc8f0b"
-  integrity sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==
-
-ms@2.1.2:
+ms@^2.1.1, ms@2.1.2:
   version "2.1.2"
-  resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
+  resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz"
   integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
 
-ms@^2.1.1:
-  version "2.1.3"
-  resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
-  integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
-
 multiformats@^12.0.1, multiformats@^12.1.0, multiformats@^12.1.1, multiformats@^12.1.3:
   version "12.1.3"
-  resolved "https://registry.yarnpkg.com/multiformats/-/multiformats-12.1.3.tgz#cbf7a9861e11e74f8228b21376088cb43ba8754e"
+  resolved "https://registry.npmjs.org/multiformats/-/multiformats-12.1.3.tgz"
   integrity sha512-eajQ/ZH7qXZQR2AgtfpmSMizQzmyYVmCql7pdhldPuYQi4atACekbJaQplk6dWyIi10jCaFnd6pqvcEFXjbaJw==
 
+multiformats@^13.0.0:
+  version "13.0.1"
+  resolved "https://registry.npmjs.org/multiformats/-/multiformats-13.0.1.tgz"
+  integrity sha512-bt3R5iXe2O8xpp3wkmQhC73b/lC4S2ihU8Dndwcsysqbydqb8N+bpP116qMcClZ17g58iSIwtXUTcg2zT4sniA==
+
 murmurhash3js-revisited@^3.0.0:
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/murmurhash3js-revisited/-/murmurhash3js-revisited-3.0.0.tgz#6bd36e25de8f73394222adc6e41fa3fac08a5869"
+  resolved "https://registry.npmjs.org/murmurhash3js-revisited/-/murmurhash3js-revisited-3.0.0.tgz"
   integrity sha512-/sF3ee6zvScXMb1XFJ8gDsSnY+X8PbOyjIuBhtgis10W2Jx4ZjIhikUCIF9c4gpJxVnQIsPAFrSwTCuAjicP6g==
 
-nanoid@^3.3.7:
-  version "3.3.7"
-  resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8"
-  integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==
+mz@^2.7.0:
+  version "2.7.0"
+  resolved "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz"
+  integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==
+  dependencies:
+    any-promise "^1.0.0"
+    object-assign "^4.0.1"
+    thenify-all "^1.0.0"
 
 no-case@^3.0.4:
   version "3.0.4"
-  resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d"
+  resolved "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz"
   integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==
   dependencies:
     lower-case "^2.0.2"
@@ -3015,43 +2459,43 @@ no-case@^3.0.4:
 
 node-fetch@^2.6.1:
   version "2.7.0"
-  resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d"
+  resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz"
   integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==
   dependencies:
     whatwg-url "^5.0.0"
 
-node-releases@^2.0.14:
-  version "2.0.14"
-  resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.14.tgz#2ffb053bceb8b2be8495ece1ab6ce600c4461b0b"
-  integrity sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==
-
-normalize-range@^0.1.2:
-  version "0.1.2"
-  resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942"
-  integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==
+normalize-path@^3.0.0, normalize-path@~3.0.0:
+  version "3.0.0"
+  resolved "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz"
+  integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
 
-nth-check@^2.0.1:
-  version "2.1.1"
-  resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d"
-  integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==
+npm-run-path@^4.0.1:
+  version "4.0.1"
+  resolved "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz"
+  integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==
   dependencies:
-    boolbase "^1.0.0"
+    path-key "^3.0.0"
 
-once@^1.3.0:
-  version "1.4.0"
-  resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
-  integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==
+object-assign@^4.0.1:
+  version "4.1.1"
+  resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz"
+  integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==
+
+onetime@^5.1.2:
+  version "5.1.2"
+  resolved "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz"
+  integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==
   dependencies:
-    wrappy "1"
+    mimic-fn "^2.1.0"
 
 p-defer@^4.0.0:
   version "4.0.0"
-  resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-4.0.0.tgz#8082770aeeb10eb6b408abe91866738741ddd5d2"
+  resolved "https://registry.npmjs.org/p-defer/-/p-defer-4.0.0.tgz"
   integrity sha512-Vb3QRvQ0Y5XnF40ZUWW7JfLogicVh/EnA5gBIvKDJoYpeI82+1E3AlB9yOcKFS0AhHrWVnAQO39fbR0G99IVEQ==
 
 p-queue@^7.3.0, p-queue@^7.3.4:
   version "7.4.1"
-  resolved "https://registry.yarnpkg.com/p-queue/-/p-queue-7.4.1.tgz#7f86f853048beca8272abdbb7cec1ed2afc0f265"
+  resolved "https://registry.npmjs.org/p-queue/-/p-queue-7.4.1.tgz"
   integrity sha512-vRpMXmIkYF2/1hLBKisKeVYJZ8S2tZ0zEAmIJgdVKP2nq0nh4qCdf8bgw+ZgKrkh71AOCaqzwbJJk1WtdcF3VA==
   dependencies:
     eventemitter3 "^5.0.1"
@@ -3059,12 +2503,12 @@ p-queue@^7.3.0, p-queue@^7.3.4:
 
 p-timeout@^5.0.2:
   version "5.1.0"
-  resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-5.1.0.tgz#b3c691cf4415138ce2d9cfe071dba11f0fee085b"
+  resolved "https://registry.npmjs.org/p-timeout/-/p-timeout-5.1.0.tgz"
   integrity sha512-auFDyzzzGZZZdHz3BtET9VEz0SE/uMEAx7uWfGPucfzEwwe/xH0iVeZibQmANYE/hp9T2+UUZT5m+BKyrDp3Ew==
 
 param-case@^3.0.4:
   version "3.0.4"
-  resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5"
+  resolved "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz"
   integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==
   dependencies:
     dot-case "^3.0.4"
@@ -3072,289 +2516,61 @@ param-case@^3.0.4:
 
 pascal-case@^3.1.2:
   version "3.1.2"
-  resolved "https://registry.yarnpkg.com/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb"
+  resolved "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz"
   integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==
   dependencies:
     no-case "^3.0.4"
     tslib "^2.0.3"
 
-path-parse@^1.0.7:
-  version "1.0.7"
-  resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735"
-  integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==
+path-key@^3.0.0, path-key@^3.1.0:
+  version "3.1.1"
+  resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz"
+  integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==
+
+path-scurry@^1.10.1:
+  version "1.10.1"
+  resolved "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.1.tgz"
+  integrity sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==
+  dependencies:
+    lru-cache "^9.1.1 || ^10.0.0"
+    minipass "^5.0.0 || ^6.0.2 || ^7.0.0"
 
 path-type@^4.0.0:
   version "4.0.0"
-  resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b"
+  resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz"
   integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==
 
-pathe@^1.1.0, pathe@^1.1.1:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/pathe/-/pathe-1.1.1.tgz#1dd31d382b974ba69809adc9a7a347e65d84829a"
-  integrity sha512-d+RQGp0MAYTIaDBIMmOfMwz3E+LOZnxx1HZd5R18mmCZY0QBlK0LDZfPc8FW8Ed2DlvsuE6PRjroDY+wg4+j/Q==
-
-picocolors@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c"
-  integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==
-
-picomatch@^2.3.1:
+picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1:
   version "2.3.1"
-  resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42"
+  resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz"
   integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==
 
-pkg-types@^1.0.3:
-  version "1.0.3"
-  resolved "https://registry.yarnpkg.com/pkg-types/-/pkg-types-1.0.3.tgz#988b42ab19254c01614d13f4f65a2cfc7880f868"
-  integrity sha512-nN7pYi0AQqJnoLPC9eHFQ8AcyaixBUOwvqc5TDnIKCMEE6I0y8P7OKA7fPexsXGCGxQDl/cmrLAp26LhcwxZ4A==
-  dependencies:
-    jsonc-parser "^3.2.0"
-    mlly "^1.2.0"
-    pathe "^1.1.0"
-
-postcss-calc@^9.0.1:
-  version "9.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-9.0.1.tgz#a744fd592438a93d6de0f1434c572670361eb6c6"
-  integrity sha512-TipgjGyzP5QzEhsOZUaIkeO5mKeMFpebWzRogWG/ysonUlnHcq5aJe0jOjpfzUU8PeSaBQnrE8ehR0QA5vs8PQ==
-  dependencies:
-    postcss-selector-parser "^6.0.11"
-    postcss-value-parser "^4.2.0"
-
-postcss-colormin@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-6.0.1.tgz#3aa61d38a88dbdeeb7252fae67809a7ac547a129"
-  integrity sha512-Tb9aR2wCJCzKuNjIeMzVNd0nXjQy25HDgFmmaRsHnP0eP/k8uQWE4S8voX5S2coO5CeKrp+USFs1Ayv9Tpxx6w==
-  dependencies:
-    browserslist "^4.21.4"
-    caniuse-api "^3.0.0"
-    colord "^2.9.1"
-    postcss-value-parser "^4.2.0"
-
-postcss-convert-values@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-6.0.1.tgz#a1451cb7e53b67b3db95c37276c5decb997409f2"
-  integrity sha512-zTd4Vh0HxGkhg5aHtfCogcRHzGkvblfdWlQ53lIh1cJhYcGyIxh2hgtKoVh40AMktRERet+JKdB04nNG19kjmA==
-  dependencies:
-    browserslist "^4.21.4"
-    postcss-value-parser "^4.2.0"
-
-postcss-discard-comments@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-6.0.1.tgz#46176212bd9c3e5f48aa4b8b4868786726c41d36"
-  integrity sha512-f1KYNPtqYLUeZGCHQPKzzFtsHaRuECe6jLakf/RjSRqvF5XHLZnM2+fXLhb8Qh/HBFHs3M4cSLb1k3B899RYIg==
-
-postcss-discard-duplicates@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-6.0.1.tgz#112b1a95948e69b3484fdd43584dda6930977939"
-  integrity sha512-1hvUs76HLYR8zkScbwyJ8oJEugfPV+WchpnA+26fpJ7Smzs51CzGBHC32RS03psuX/2l0l0UKh2StzNxOrKCYg==
-
-postcss-discard-empty@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-6.0.1.tgz#b34cb45ec891246da4506b53e352390fdef126c4"
-  integrity sha512-yitcmKwmVWtNsrrRqGJ7/C0YRy53i0mjexBDQ9zYxDwTWVBgbU4+C9jIZLmQlTDT9zhml+u0OMFJh8+31krmOg==
-
-postcss-discard-overridden@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-6.0.1.tgz#c63c559237758d74bc505452393a64dda9b19ef4"
-  integrity sha512-qs0ehZMMZpSESbRkw1+inkf51kak6OOzNRaoLd/U7Fatp0aN2HQ1rxGOrJvYcRAN9VpX8kUF13R2ofn8OlvFVA==
-
-postcss-merge-longhand@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-6.0.1.tgz#5a1145868c615e643ca0996d9e9c3f09ad8de854"
-  integrity sha512-vmr/HZQzaPXc45FRvSctqFTF05UaDnTn5ABX+UtQPJznDWT/QaFbVc/pJ5C2YPxx2J2XcfmWowlKwtCDwiQ5hA==
-  dependencies:
-    postcss-value-parser "^4.2.0"
-    stylehacks "^6.0.1"
-
-postcss-merge-rules@^6.0.2:
-  version "6.0.2"
-  resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-6.0.2.tgz#b0f0063a05d671b7093a86f0faa6d2c6695dc036"
-  integrity sha512-6lm8bl0UfriSfxI+F/cezrebqqP8w702UC6SjZlUlBYwuRVNbmgcJuQU7yePIvD4MNT53r/acQCUAyulrpgmeQ==
-  dependencies:
-    browserslist "^4.21.4"
-    caniuse-api "^3.0.0"
-    cssnano-utils "^4.0.1"
-    postcss-selector-parser "^6.0.5"
-
-postcss-minify-font-values@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-6.0.1.tgz#788eb930168be90225f3937f0b70aa19d8b532b2"
-  integrity sha512-tIwmF1zUPoN6xOtA/2FgVk1ZKrLcCvE0dpZLtzyyte0j9zUeB8RTbCqrHZGjJlxOvNWKMYtunLrrl7HPOiR46w==
-  dependencies:
-    postcss-value-parser "^4.2.0"
-
-postcss-minify-gradients@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-6.0.1.tgz#4faf1880b483dc37016658aa186b42194ff9b5bc"
-  integrity sha512-M1RJWVjd6IOLPl1hYiOd5HQHgpp6cvJVLrieQYS9y07Yo8itAr6jaekzJphaJFR0tcg4kRewCk3kna9uHBxn/w==
-  dependencies:
-    colord "^2.9.1"
-    cssnano-utils "^4.0.1"
-    postcss-value-parser "^4.2.0"
-
-postcss-minify-params@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-6.0.1.tgz#79b83947bae2aa991df12646c7f463276abb0aef"
-  integrity sha512-eFvGWArqh4khPIgPDu6SZNcaLctx97nO7c59OXnRtGntAp5/VS4gjMhhW9qUFsK6mQ27pEZGt2kR+mPizI+Z9g==
-  dependencies:
-    browserslist "^4.21.4"
-    cssnano-utils "^4.0.1"
-    postcss-value-parser "^4.2.0"
-
-postcss-minify-selectors@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-6.0.1.tgz#7b2f05651a2f734da1fa50dea62cfc47e67d68f9"
-  integrity sha512-mfReq5wrS6vkunxvJp6GDuOk+Ak6JV7134gp8L+ANRnV9VwqzTvBtX6lpohooVU750AR0D3pVx2Zn6uCCwOAfQ==
-  dependencies:
-    postcss-selector-parser "^6.0.5"
-
-postcss-nested@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-nested/-/postcss-nested-6.0.1.tgz#f83dc9846ca16d2f4fa864f16e9d9f7d0961662c"
-  integrity sha512-mEp4xPMi5bSWiMbsgoPfcP74lsWLHkQbZc3sY+jWYd65CUwXrUaTp0fmNpa01ZcETKlIgUdFN/MpS2xZtqL9dQ==
-  dependencies:
-    postcss-selector-parser "^6.0.11"
-
-postcss-normalize-charset@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-6.0.1.tgz#5f70e1eb8bbdbcfcbed060ef70f179e8fef57d0c"
-  integrity sha512-aW5LbMNRZ+oDV57PF9K+WI1Z8MPnF+A8qbajg/T8PP126YrGX1f9IQx21GI2OlGz7XFJi/fNi0GTbY948XJtXg==
-
-postcss-normalize-display-values@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-6.0.1.tgz#ff9aa30bbf1283294bfd9cc8b6fb81ff060a7f2d"
-  integrity sha512-mc3vxp2bEuCb4LgCcmG1y6lKJu1Co8T+rKHrcbShJwUmKJiEl761qb/QQCfFwlrvSeET3jksolCR/RZuMURudw==
-  dependencies:
-    postcss-value-parser "^4.2.0"
-
-postcss-normalize-positions@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-6.0.1.tgz#41ffdc72994f024c6cd6e91dbfb40ab9abe6fe90"
-  integrity sha512-HRsq8u/0unKNvm0cvwxcOUEcakFXqZ41fv3FOdPn916XFUrympjr+03oaLkuZENz3HE9RrQE9yU0Xv43ThWjQg==
-  dependencies:
-    postcss-value-parser "^4.2.0"
-
-postcss-normalize-repeat-style@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-6.0.1.tgz#55dc54b6f80305b280a379899a6626e0a07b04a8"
-  integrity sha512-Gbb2nmCy6tTiA7Sh2MBs3fj9W8swonk6lw+dFFeQT68B0Pzwp1kvisJQkdV6rbbMSd9brMlS8I8ts52tAGWmGQ==
-  dependencies:
-    postcss-value-parser "^4.2.0"
-
-postcss-normalize-string@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-6.0.1.tgz#7605e0fb4ec7bf2709709991d13a949e4419db1d"
-  integrity sha512-5Fhx/+xzALJD9EI26Aq23hXwmv97Zfy2VFrt5PLT8lAhnBIZvmaT5pQk+NuJ/GWj/QWaKSKbnoKDGLbV6qnhXg==
-  dependencies:
-    postcss-value-parser "^4.2.0"
-
-postcss-normalize-timing-functions@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-6.0.1.tgz#ef937b7ca2fd62ed0b46645ea5728b842a3600db"
-  integrity sha512-4zcczzHqmCU7L5dqTB9rzeqPWRMc0K2HoR+Bfl+FSMbqGBUcP5LRfgcH4BdRtLuzVQK1/FHdFoGT3F7rkEnY+g==
-  dependencies:
-    postcss-value-parser "^4.2.0"
-
-postcss-normalize-unicode@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-6.0.1.tgz#a652faa02fc8ce5d1429ac0782575d8d66a60d9b"
-  integrity sha512-ok9DsI94nEF79MkvmLfHfn8ddnKXA7w+8YuUoz5m7b6TOdoaRCpvu/QMHXQs9+DwUbvp+ytzz04J55CPy77PuQ==
-  dependencies:
-    browserslist "^4.21.4"
-    postcss-value-parser "^4.2.0"
-
-postcss-normalize-url@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-6.0.1.tgz#eae58cb4f5f9a4fa5bbbf6d4222dff534ad46186"
-  integrity sha512-jEXL15tXSvbjm0yzUV7FBiEXwhIa9H88JOXDGQzmcWoB4mSjZIsmtto066s2iW9FYuIrIF4k04HA2BKAOpbsaQ==
-  dependencies:
-    postcss-value-parser "^4.2.0"
-
-postcss-normalize-whitespace@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-6.0.1.tgz#b5933750b938814c028d3d2b2e5c0199e0037b53"
-  integrity sha512-76i3NpWf6bB8UHlVuLRxG4zW2YykF9CTEcq/9LGAiz2qBuX5cBStadkk0jSkg9a9TCIXbMQz7yzrygKoCW9JuA==
-  dependencies:
-    postcss-value-parser "^4.2.0"
-
-postcss-ordered-values@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-6.0.1.tgz#553e735d009065b362da93340e57f43d5f2d0fbc"
-  integrity sha512-XXbb1O/MW9HdEhnBxitZpPFbIvDgbo9NK4c/5bOfiKpnIGZDoL2xd7/e6jW5DYLsWxBbs+1nZEnVgnjnlFViaA==
-  dependencies:
-    cssnano-utils "^4.0.1"
-    postcss-value-parser "^4.2.0"
-
-postcss-reduce-initial@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-6.0.1.tgz#37621ba31a18fd75eb9c76e818cca2a2adb13238"
-  integrity sha512-cgzsI2ThG1PMSdSyM9A+bVxiiVgPIVz9f5c6H+TqEv0CA89iCOO81mwLWRWLgOKFtQkKob9nNpnkxG/1RlgFcA==
-  dependencies:
-    browserslist "^4.21.4"
-    caniuse-api "^3.0.0"
-
-postcss-reduce-transforms@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-6.0.1.tgz#7bf59d7c6e7066e3b18ef17237d2344bd3da6d75"
-  integrity sha512-fUbV81OkUe75JM+VYO1gr/IoA2b/dRiH6HvMwhrIBSUrxq3jNZQZitSnugcTLDi1KkQh1eR/zi+iyxviUNBkcQ==
-  dependencies:
-    postcss-value-parser "^4.2.0"
-
-postcss-selector-parser@^6.0.11, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5:
-  version "6.0.14"
-  resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.14.tgz#9d45f1afbebedae6811a17f49d09754f2ad153b3"
-  integrity sha512-65xXYsT40i9GyWzlHQ5ShZoK7JZdySeOozi/tz2EezDo6c04q6+ckYMeoY7idaie1qp2dT5KoYQ2yky6JuoHnA==
-  dependencies:
-    cssesc "^3.0.0"
-    util-deprecate "^1.0.2"
-
-postcss-svgo@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-6.0.1.tgz#6bf63713ef5cb40f1bedd2c2cfca2486b41d5184"
-  integrity sha512-eWV4Rrqa06LzTgqirOv5Ln6WTGyU7Pbeqj9WEyKo9tpnWixNATVJMeaEcOHOW1ZYyjcG8wSJwX/28DvU3oy3HA==
-  dependencies:
-    postcss-value-parser "^4.2.0"
-    svgo "^3.0.5"
-
-postcss-unique-selectors@^6.0.1:
-  version "6.0.1"
-  resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-6.0.1.tgz#e6d071c2ea64ce265eb55ea9c170ff951183c712"
-  integrity sha512-/KCCEpNNR7oXVJ38/Id7GC9Nt0zxO1T3zVbhVaq6F6LSG+3gU3B7+QuTHfD0v8NPEHlzewAout29S0InmB78EQ==
-  dependencies:
-    postcss-selector-parser "^6.0.5"
-
-postcss-value-parser@^4.2.0:
-  version "4.2.0"
-  resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514"
-  integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==
+pirates@^4.0.1:
+  version "4.0.6"
+  resolved "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz"
+  integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==
 
-postcss@^8.4.26:
-  version "8.4.32"
-  resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.32.tgz#1dac6ac51ab19adb21b8b34fd2d93a86440ef6c9"
-  integrity sha512-D/kj5JNu6oo2EIy+XL/26JEDTlIbB8hw85G8StOE6L74RQAVVP5rej6wxCNqyMbR4RkPfqvezVbPw81Ngd6Kcw==
+postcss-load-config@^4.0.1:
+  version "4.0.2"
+  resolved "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-4.0.2.tgz"
+  integrity sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==
   dependencies:
-    nanoid "^3.3.7"
-    picocolors "^1.0.0"
-    source-map-js "^1.0.2"
+    lilconfig "^3.0.0"
+    yaml "^2.3.4"
 
 prettier@3.1.0:
   version "3.1.0"
-  resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.1.0.tgz#c6d16474a5f764ea1a4a373c593b779697744d5e"
+  resolved "https://registry.npmjs.org/prettier/-/prettier-3.1.0.tgz"
   integrity sha512-TQLvXjq5IAibjh8EpBIkNKxO749UEWABoiIZehEPiY4GNpVdhaFKqSTu+QrlU6D2dPAfubRmtJTi4K4YkQ5eXw==
 
-pretty-bytes@^6.1.1:
-  version "6.1.1"
-  resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-6.1.1.tgz#38cd6bb46f47afbf667c202cfc754bffd2016a3b"
-  integrity sha512-mQUvGU6aUFQ+rNvTIAcZuWGRT9a6f6Yrg9bHs4ImKF+HZCEK+plBvnAZYSIQztknZF2qnzNtr6F8s0+IuptdlQ==
-
 progress-events@^1.0.0:
   version "1.0.0"
-  resolved "https://registry.yarnpkg.com/progress-events/-/progress-events-1.0.0.tgz#34f5e8fdb5dae3561837b22672d1e02277bb2109"
+  resolved "https://registry.npmjs.org/progress-events/-/progress-events-1.0.0.tgz"
   integrity sha512-zIB6QDrSbPfRg+33FZalluFIowkbV5Xh1xSuetjG+rlC5he6u2dc6VQJ0TbMdlN3R1RHdpOqxEFMKTnQ+itUwA==
 
 protons-runtime@^5.0.0:
   version "5.2.0"
-  resolved "https://registry.yarnpkg.com/protons-runtime/-/protons-runtime-5.2.0.tgz#896e9b8a0e6b95cfe2b990e31d9a8de7d537bf0c"
+  resolved "https://registry.npmjs.org/protons-runtime/-/protons-runtime-5.2.0.tgz"
   integrity sha512-jL3VSbXllgm17zurKQ/z+Ath0w+4BknJ+l/NLocfjAB8hbeASOZTNtb7zK3nDsKq2pHK9YFumNQvpkZ6gFfWhA==
   dependencies:
     uint8arraylist "^2.4.3"
@@ -3362,17 +2578,22 @@ protons-runtime@^5.0.0:
 
 proxy-from-env@^1.1.0:
   version "1.1.0"
-  resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2"
+  resolved "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz"
   integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==
 
+punycode@^2.1.0:
+  version "2.3.1"
+  resolved "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz"
+  integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==
+
 queue-microtask@^1.2.2:
   version "1.2.3"
-  resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243"
+  resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz"
   integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==
 
 rabin-wasm@^0.1.4:
   version "0.1.5"
-  resolved "https://registry.yarnpkg.com/rabin-wasm/-/rabin-wasm-0.1.5.tgz#5b625ca007d6a2cbc1456c78ae71d550addbc9c9"
+  resolved "https://registry.npmjs.org/rabin-wasm/-/rabin-wasm-0.1.5.tgz"
   integrity sha512-uWgQTo7pim1Rnj5TuWcCewRDTf0PEFTSlaUjWP4eY9EbLV9em08v89oCz/WO+wRxpYuO36XEHp4wgYQnAgOHzA==
   dependencies:
     "@assemblyscript/loader" "^0.9.4"
@@ -3384,114 +2605,130 @@ rabin-wasm@^0.1.4:
 
 race-signal@^1.0.0:
   version "1.0.2"
-  resolved "https://registry.yarnpkg.com/race-signal/-/race-signal-1.0.2.tgz#e42379fba0cec4ee8dab7c9bbbd4aa6e0d14c25f"
+  resolved "https://registry.npmjs.org/race-signal/-/race-signal-1.0.2.tgz"
   integrity sha512-o3xNv0iTcIDQCXFlF6fPAMEBRjFxssgGoRqLbg06m+AdzEXXLUmoNOoUHTVz2NoBI8hHwKFKoC6IqyNtWr2bww==
 
 readable-stream@^3.4.0, readable-stream@^3.5.0, readable-stream@^3.6.0:
   version "3.6.2"
-  resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967"
+  resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz"
   integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==
   dependencies:
     inherits "^2.0.3"
     string_decoder "^1.1.1"
     util-deprecate "^1.0.1"
 
+readdirp@~3.6.0:
+  version "3.6.0"
+  resolved "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz"
+  integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==
+  dependencies:
+    picomatch "^2.2.1"
+
 receptacle@^1.3.2:
   version "1.3.2"
-  resolved "https://registry.yarnpkg.com/receptacle/-/receptacle-1.3.2.tgz#a7994c7efafc7a01d0e2041839dab6c4951360d2"
+  resolved "https://registry.npmjs.org/receptacle/-/receptacle-1.3.2.tgz"
   integrity sha512-HrsFvqZZheusncQRiEE7GatOAETrARKV/lnfYicIm8lbvp/JQOdADOfhjBd2DajvoszEyxSM6RlAAIZgEoeu/A==
   dependencies:
     ms "^2.1.1"
 
 relateurl@^0.2.7:
   version "0.2.7"
-  resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9"
+  resolved "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz"
   integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog==
 
 requizzle@^0.2.3:
   version "0.2.4"
-  resolved "https://registry.yarnpkg.com/requizzle/-/requizzle-0.2.4.tgz#319eb658b28c370f0c20f968fa8ceab98c13d27c"
+  resolved "https://registry.npmjs.org/requizzle/-/requizzle-0.2.4.tgz"
   integrity sha512-JRrFk1D4OQ4SqovXOgdav+K8EAhSB/LJZqCz8tbX0KObcdeM15Ss59ozWMBWmmINMagCwmqn4ZNryUGpBsl6Jw==
   dependencies:
     lodash "^4.17.21"
 
-resolve@^1.22.1:
-  version "1.22.8"
-  resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d"
-  integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==
-  dependencies:
-    is-core-module "^2.13.0"
-    path-parse "^1.0.7"
-    supports-preserve-symlinks-flag "^1.0.0"
+resolve-from@^5.0.0:
+  version "5.0.0"
+  resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz"
+  integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==
 
 retimer@^3.0.0:
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/retimer/-/retimer-3.0.0.tgz#98b751b1feaf1af13eb0228f8ea68b8f9da530df"
+  resolved "https://registry.npmjs.org/retimer/-/retimer-3.0.0.tgz"
   integrity sha512-WKE0j11Pa0ZJI5YIk0nflGI7SQsfl2ljihVy7ogh7DeQSeYAUi0ubZ/yEueGtDfUPk6GH5LRw1hBdLq4IwUBWA==
 
 reusify@^1.0.4:
   version "1.0.4"
-  resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76"
+  resolved "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz"
   integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==
 
-rollup-plugin-dts@^6.0.0:
-  version "6.1.0"
-  resolved "https://registry.yarnpkg.com/rollup-plugin-dts/-/rollup-plugin-dts-6.1.0.tgz#56e9c5548dac717213c6a4aa9df523faf04f75ae"
-  integrity sha512-ijSCPICkRMDKDLBK9torss07+8dl9UpY9z1N/zTeA1cIqdzMlpkV3MOOC7zukyvQfDyxa1s3Dl2+DeiP/G6DOw==
+rollup@^4.0.2:
+  version "4.9.6"
+  resolved "https://registry.npmjs.org/rollup/-/rollup-4.9.6.tgz"
+  integrity sha512-05lzkCS2uASX0CiLFybYfVkwNbKZG5NFQ6Go0VWyogFTXXbR039UVsegViTntkk4OglHBdF54ccApXRRuXRbsg==
   dependencies:
-    magic-string "^0.30.4"
-  optionalDependencies:
-    "@babel/code-frame" "^7.22.13"
-
-rollup@^3.28.1:
-  version "3.29.4"
-  resolved "https://registry.yarnpkg.com/rollup/-/rollup-3.29.4.tgz#4d70c0f9834146df8705bfb69a9a19c9e1109981"
-  integrity sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==
+    "@types/estree" "1.0.5"
   optionalDependencies:
+    "@rollup/rollup-android-arm-eabi" "4.9.6"
+    "@rollup/rollup-android-arm64" "4.9.6"
+    "@rollup/rollup-darwin-arm64" "4.9.6"
+    "@rollup/rollup-darwin-x64" "4.9.6"
+    "@rollup/rollup-linux-arm-gnueabihf" "4.9.6"
+    "@rollup/rollup-linux-arm64-gnu" "4.9.6"
+    "@rollup/rollup-linux-arm64-musl" "4.9.6"
+    "@rollup/rollup-linux-riscv64-gnu" "4.9.6"
+    "@rollup/rollup-linux-x64-gnu" "4.9.6"
+    "@rollup/rollup-linux-x64-musl" "4.9.6"
+    "@rollup/rollup-win32-arm64-msvc" "4.9.6"
+    "@rollup/rollup-win32-ia32-msvc" "4.9.6"
+    "@rollup/rollup-win32-x64-msvc" "4.9.6"
     fsevents "~2.3.2"
 
 run-parallel@^1.1.9:
   version "1.2.0"
-  resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee"
+  resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz"
   integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==
   dependencies:
     queue-microtask "^1.2.2"
 
 safe-buffer@~5.2.0:
   version "5.2.1"
-  resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
+  resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz"
   integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
 
-scule@^1.0.0:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/scule/-/scule-1.1.1.tgz#82b4d13bb8c729c15849256e749cee0cb52a4d89"
-  integrity sha512-sHtm/SsIK9BUBI3EFT/Gnp9VoKfY6QLvlkvAE6YK7454IF8FSgJEAnJpVdSC7K5/pjI5NfxhzBLW2JAfYA/shQ==
+shebang-command@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz"
+  integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==
+  dependencies:
+    shebang-regex "^3.0.0"
 
-semver@^6.3.1:
-  version "6.3.1"
-  resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4"
-  integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==
+shebang-regex@^3.0.0:
+  version "3.0.0"
+  resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz"
+  integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==
 
 showdown@^2.1.0:
   version "2.1.0"
-  resolved "https://registry.yarnpkg.com/showdown/-/showdown-2.1.0.tgz#1251f5ed8f773f0c0c7bfc8e6fd23581f9e545c5"
+  resolved "https://registry.npmjs.org/showdown/-/showdown-2.1.0.tgz"
   integrity sha512-/6NVYu4U819R2pUIk79n67SYgJHWCce0a5xTP979WbNp0FL9MN1I1QK662IDU1b6JzKTvmhgI7T7JYIxBi3kMQ==
   dependencies:
     commander "^9.0.0"
 
-slash@^4.0.0:
-  version "4.0.0"
-  resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7"
-  integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==
+signal-exit@^3.0.3:
+  version "3.0.7"
+  resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz"
+  integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==
 
-source-map-js@^1.0.1, source-map-js@^1.0.2:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c"
-  integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==
+signal-exit@^4.0.1:
+  version "4.1.0"
+  resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz"
+  integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==
+
+slash@^3.0.0:
+  version "3.0.0"
+  resolved "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz"
+  integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
 
 source-map-support@~0.5.20:
   version "0.5.21"
-  resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f"
+  resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz"
   integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==
   dependencies:
     buffer-from "^1.0.0"
@@ -3499,17 +2736,24 @@ source-map-support@~0.5.20:
 
 source-map@^0.6.0, source-map@~0.6.0:
   version "0.6.1"
-  resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
+  resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz"
   integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
 
+source-map@0.8.0-beta.0:
+  version "0.8.0-beta.0"
+  resolved "https://registry.npmjs.org/source-map/-/source-map-0.8.0-beta.0.tgz"
+  integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==
+  dependencies:
+    whatwg-url "^7.0.0"
+
 sparse-array@^1.3.1, sparse-array@^1.3.2:
   version "1.3.2"
-  resolved "https://registry.yarnpkg.com/sparse-array/-/sparse-array-1.3.2.tgz#0e1a8b71706d356bc916fe754ff496d450ec20b0"
+  resolved "https://registry.npmjs.org/sparse-array/-/sparse-array-1.3.2.tgz"
   integrity sha512-ZT711fePGn3+kQyLuv1fpd3rNSkNF8vd5Kv2D+qnOANeyKs3fx6bUMGWRPvgTTcYV64QMqZKZwcuaQSP3AZ0tg==
 
 stream-browserify@3.0.0:
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-3.0.0.tgz#22b0a2850cdf6503e73085da1fc7b7d0c2122f2f"
+  resolved "https://registry.npmjs.org/stream-browserify/-/stream-browserify-3.0.0.tgz"
   integrity sha512-H73RAHsVBapbim0tU2JwwOiXUj+fikfiaoYAKHF3VJfA0pe2BCzkhAHBlLG6REzE+2WNZcxOXjK7lkso+9euLA==
   dependencies:
     inherits "~2.0.4"
@@ -3517,57 +2761,90 @@ stream-browserify@3.0.0:
 
 string_decoder@^1.1.1:
   version "1.3.0"
-  resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
+  resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz"
   integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
   dependencies:
     safe-buffer "~5.2.0"
 
-strip-json-comments@^3.1.0:
-  version "3.1.1"
-  resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
-  integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
+"string-width-cjs@npm:string-width@^4.2.0":
+  version "4.2.3"
+  resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"
+  integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
+  dependencies:
+    emoji-regex "^8.0.0"
+    is-fullwidth-code-point "^3.0.0"
+    strip-ansi "^6.0.1"
 
-strnum@^1.0.5:
-  version "1.0.5"
-  resolved "https://registry.yarnpkg.com/strnum/-/strnum-1.0.5.tgz#5c4e829fe15ad4ff0d20c3db5ac97b73c9b072db"
-  integrity sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==
+string-width@^4.1.0:
+  version "4.2.3"
+  resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"
+  integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
+  dependencies:
+    emoji-regex "^8.0.0"
+    is-fullwidth-code-point "^3.0.0"
+    strip-ansi "^6.0.1"
 
-stylehacks@^6.0.1:
+string-width@^5.0.1, string-width@^5.1.2:
+  version "5.1.2"
+  resolved "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz"
+  integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==
+  dependencies:
+    eastasianwidth "^0.2.0"
+    emoji-regex "^9.2.2"
+    strip-ansi "^7.0.1"
+
+"strip-ansi-cjs@npm:strip-ansi@^6.0.1":
   version "6.0.1"
-  resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-6.0.1.tgz#c103f0149e268a290a0dda3fce8fd4c5459a13c3"
-  integrity sha512-jTqG2aIoX2fYg0YsGvqE4ooE/e75WmaEjnNiP6Ag7irLtHxML8NJRxRxS0HyDpde8DRGuEXTFVHVfR5Tmbxqzg==
+  resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"
+  integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
   dependencies:
-    browserslist "^4.21.4"
-    postcss-selector-parser "^6.0.4"
+    ansi-regex "^5.0.1"
 
-supports-color@^5.3.0:
-  version "5.5.0"
-  resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
-  integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
+strip-ansi@^6.0.0, strip-ansi@^6.0.1:
+  version "6.0.1"
+  resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"
+  integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
   dependencies:
-    has-flag "^3.0.0"
+    ansi-regex "^5.0.1"
 
-supports-preserve-symlinks-flag@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09"
-  integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==
+strip-ansi@^7.0.1:
+  version "7.1.0"
+  resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz"
+  integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==
+  dependencies:
+    ansi-regex "^6.0.1"
 
-svgo@^3.0.5:
-  version "3.1.0"
-  resolved "https://registry.yarnpkg.com/svgo/-/svgo-3.1.0.tgz#7e63855c8da73297d5d5765e968f9679a0f8d24a"
-  integrity sha512-R5SnNA89w1dYgNv570591F66v34b3eQShpIBcQtZtM5trJwm1VvxbIoMpRYY3ybTAutcKTLEmTsdnaknOHbiQA==
+strip-final-newline@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz"
+  integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==
+
+strip-json-comments@^3.1.0:
+  version "3.1.1"
+  resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz"
+  integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
+
+strnum@^1.0.5:
+  version "1.0.5"
+  resolved "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz"
+  integrity sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==
+
+sucrase@^3.20.3:
+  version "3.35.0"
+  resolved "https://registry.npmjs.org/sucrase/-/sucrase-3.35.0.tgz"
+  integrity sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==
   dependencies:
-    "@trysound/sax" "0.2.0"
-    commander "^7.2.0"
-    css-select "^5.1.0"
-    css-tree "^2.2.1"
-    css-what "^6.1.0"
-    csso "5.0.5"
-    picocolors "^1.0.0"
+    "@jridgewell/gen-mapping" "^0.3.2"
+    commander "^4.0.0"
+    glob "^10.3.10"
+    lines-and-columns "^1.1.6"
+    mz "^2.7.0"
+    pirates "^4.0.1"
+    ts-interface-checker "^0.1.9"
 
 terser@^5.15.1:
   version "5.26.0"
-  resolved "https://registry.yarnpkg.com/terser/-/terser-5.26.0.tgz#ee9f05d929f4189a9c28a0feb889d96d50126fe1"
+  resolved "https://registry.npmjs.org/terser/-/terser-5.26.0.tgz"
   integrity sha512-dytTGoE2oHgbNV9nTzgBEPaqAWvcJNl66VZ0BkJqlvp71IjO8CxdBx/ykCNb47cLnCmCvRZ6ZR0tLkqvZCdVBQ==
   dependencies:
     "@jridgewell/source-map" "^0.3.3"
@@ -3575,58 +2852,99 @@ terser@^5.15.1:
     commander "^2.20.0"
     source-map-support "~0.5.20"
 
+thenify-all@^1.0.0:
+  version "1.6.0"
+  resolved "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz"
+  integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==
+  dependencies:
+    thenify ">= 3.1.0 < 4"
+
+"thenify@>= 3.1.0 < 4":
+  version "3.3.1"
+  resolved "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz"
+  integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==
+  dependencies:
+    any-promise "^1.0.0"
+
 timeout-abort-controller@^3.0.0:
   version "3.0.0"
-  resolved "https://registry.yarnpkg.com/timeout-abort-controller/-/timeout-abort-controller-3.0.0.tgz#dd57ffca041652c03769904f8d95afd93fb95595"
+  resolved "https://registry.npmjs.org/timeout-abort-controller/-/timeout-abort-controller-3.0.0.tgz"
   integrity sha512-O3e+2B8BKrQxU2YRyEjC/2yFdb33slI22WRdUaDx6rvysfi9anloNZyR2q0l6LnePo5qH7gSM7uZtvvwZbc2yA==
   dependencies:
     retimer "^3.0.0"
 
-to-fast-properties@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e"
-  integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==
-
 to-regex-range@^5.0.1:
   version "5.0.1"
-  resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
+  resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz"
   integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==
   dependencies:
     is-number "^7.0.0"
 
+tr46@^1.0.1:
+  version "1.0.1"
+  resolved "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz"
+  integrity sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==
+  dependencies:
+    punycode "^2.1.0"
+
 tr46@~0.0.3:
   version "0.0.3"
-  resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a"
+  resolved "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz"
   integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==
 
+tree-kill@^1.2.2:
+  version "1.2.2"
+  resolved "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz"
+  integrity sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==
+
+ts-interface-checker@^0.1.9:
+  version "0.1.13"
+  resolved "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz"
+  integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==
+
 tslib@^1.11.1:
   version "1.14.1"
-  resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00"
+  resolved "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz"
   integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==
 
 tslib@^2.0.3, tslib@^2.3.1, tslib@^2.5.0:
   version "2.6.2"
-  resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae"
+  resolved "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz"
   integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==
 
-typescript@5.3.3:
+tsup@^8.0.1:
+  version "8.0.1"
+  resolved "https://registry.npmjs.org/tsup/-/tsup-8.0.1.tgz"
+  integrity sha512-hvW7gUSG96j53ZTSlT4j/KL0q1Q2l6TqGBFc6/mu/L46IoNWqLLUzLRLP1R8Q7xrJTmkDxxDoojV5uCVs1sVOg==
+  dependencies:
+    bundle-require "^4.0.0"
+    cac "^6.7.12"
+    chokidar "^3.5.1"
+    debug "^4.3.1"
+    esbuild "^0.19.2"
+    execa "^5.0.0"
+    globby "^11.0.3"
+    joycon "^3.0.1"
+    postcss-load-config "^4.0.1"
+    resolve-from "^5.0.0"
+    rollup "^4.0.2"
+    source-map "0.8.0-beta.0"
+    sucrase "^3.20.3"
+    tree-kill "^1.2.2"
+
+typescript@>=4.5.0, typescript@5.3.3:
   version "5.3.3"
-  resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.3.3.tgz#b3ce6ba258e72e6305ba66f5c9b452aaee3ffe37"
+  resolved "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz"
   integrity sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==
 
 uc.micro@^1.0.1, uc.micro@^1.0.5:
   version "1.0.6"
-  resolved "https://registry.yarnpkg.com/uc.micro/-/uc.micro-1.0.6.tgz#9c411a802a409a91fc6cf74081baba34b24499ac"
+  resolved "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz"
   integrity sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==
 
-ufo@^1.3.0:
-  version "1.3.2"
-  resolved "https://registry.yarnpkg.com/ufo/-/ufo-1.3.2.tgz#c7d719d0628a1c80c006d2240e0d169f6e3c0496"
-  integrity sha512-o+ORpgGwaYQXgqGDwd+hkS4PuZ3QnmqMMxRuajK/a38L6fTpcE5GPIfrf+L/KemFzfUpeUQc1rRS1iDBozvnFA==
-
 uint8-varint@^2.0.1:
   version "2.0.2"
-  resolved "https://registry.yarnpkg.com/uint8-varint/-/uint8-varint-2.0.2.tgz#bbb140213ecd21da55da77876df954f971da0b90"
+  resolved "https://registry.npmjs.org/uint8-varint/-/uint8-varint-2.0.2.tgz"
   integrity sha512-LZXmBT0jiHR7J4oKM1GUhtdLFW1yPauzI8NjJlotXn92TprO9u8VMvEVR4QMk8xhUVUd+2fqfU2/kGbVHYSSWw==
   dependencies:
     uint8arraylist "^2.0.0"
@@ -3634,142 +2952,125 @@ uint8-varint@^2.0.1:
 
 uint8arraylist@^2.0.0, uint8arraylist@^2.4.3:
   version "2.4.7"
-  resolved "https://registry.yarnpkg.com/uint8arraylist/-/uint8arraylist-2.4.7.tgz#ca291ab963056f20df4e2ab5280f2d3ad01dec9d"
+  resolved "https://registry.npmjs.org/uint8arraylist/-/uint8arraylist-2.4.7.tgz"
   integrity sha512-ohRElqR6C5dd60vRFLq40MCiSnUe1AzkpHvbCEMCGGP6zMoFYECsjdhL6bR1kTK37ONNRDuHQ3RIpScRYcYYIg==
   dependencies:
     uint8arrays "^4.0.2"
 
 uint8arrays@^4.0.2, uint8arrays@^4.0.6:
   version "4.0.10"
-  resolved "https://registry.yarnpkg.com/uint8arrays/-/uint8arrays-4.0.10.tgz#3ec5cde3348903c140e87532fc53f46b8f2e921f"
+  resolved "https://registry.npmjs.org/uint8arrays/-/uint8arrays-4.0.10.tgz"
   integrity sha512-AnJNUGGDJAgFw/eWu/Xb9zrVKEGlwJJCaeInlf3BkecE/zcTobk5YXYIPNQJO1q5Hh1QZrQQHf0JvcHqz2hqoA==
   dependencies:
     multiformats "^12.0.1"
 
 uint8arrays@^5.0.0:
   version "5.0.0"
-  resolved "https://registry.yarnpkg.com/uint8arrays/-/uint8arrays-5.0.0.tgz#97d06e1c6246090de6663b796480096e51c8c891"
+  resolved "https://registry.npmjs.org/uint8arrays/-/uint8arrays-5.0.0.tgz"
   integrity sha512-RWO7gR4x6syxnKDfZO8mDCsaaYs1/BqZCxlHgrcRge50E9GTnLmtoA4kwFSGIL4s3dQkryeTkvtG6oEFEya3yg==
   dependencies:
     multiformats "^12.0.1"
 
-unbuild@2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/unbuild/-/unbuild-2.0.0.tgz#9e2117e83ce5d93bae0c9ee056c3f6c241ea4fbc"
-  integrity sha512-JWCUYx3Oxdzvw2J9kTAp+DKE8df/BnH/JTSj6JyA4SH40ECdFu7FoJJcrm8G92B7TjofQ6GZGjJs50TRxoH6Wg==
-  dependencies:
-    "@rollup/plugin-alias" "^5.0.0"
-    "@rollup/plugin-commonjs" "^25.0.4"
-    "@rollup/plugin-json" "^6.0.0"
-    "@rollup/plugin-node-resolve" "^15.2.1"
-    "@rollup/plugin-replace" "^5.0.2"
-    "@rollup/pluginutils" "^5.0.3"
-    chalk "^5.3.0"
-    citty "^0.1.2"
-    consola "^3.2.3"
-    defu "^6.1.2"
-    esbuild "^0.19.2"
-    globby "^13.2.2"
-    hookable "^5.5.3"
-    jiti "^1.19.3"
-    magic-string "^0.30.3"
-    mkdist "^1.3.0"
-    mlly "^1.4.0"
-    pathe "^1.1.1"
-    pkg-types "^1.0.3"
-    pretty-bytes "^6.1.1"
-    rollup "^3.28.1"
-    rollup-plugin-dts "^6.0.0"
-    scule "^1.0.0"
-    untyped "^1.4.0"
-
 underscore@~1.13.2:
   version "1.13.6"
-  resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.13.6.tgz#04786a1f589dc6c09f761fc5f45b89e935136441"
+  resolved "https://registry.npmjs.org/underscore/-/underscore-1.13.6.tgz"
   integrity sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==
 
 universalify@^2.0.0:
   version "2.0.1"
-  resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.1.tgz#168efc2180964e6386d061e094df61afe239b18d"
+  resolved "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz"
   integrity sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==
 
-untyped@^1.4.0:
-  version "1.4.0"
-  resolved "https://registry.yarnpkg.com/untyped/-/untyped-1.4.0.tgz#c2e84bea78372ca7841f179504d5596af4e85a89"
-  integrity sha512-Egkr/s4zcMTEuulcIb7dgURS6QpN7DyqQYdf+jBtiaJvQ+eRsrtWUoX84SbvQWuLkXsOjM+8sJC9u6KoMK/U7Q==
-  dependencies:
-    "@babel/core" "^7.22.9"
-    "@babel/standalone" "^7.22.9"
-    "@babel/types" "^7.22.5"
-    defu "^6.1.2"
-    jiti "^1.19.1"
-    mri "^1.2.0"
-    scule "^1.0.0"
-
-update-browserslist-db@^1.0.13:
-  version "1.0.13"
-  resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz#3c5e4f5c083661bd38ef64b6328c26ed6c8248c4"
-  integrity sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==
-  dependencies:
-    escalade "^3.1.1"
-    picocolors "^1.0.0"
-
-util-deprecate@^1.0.1, util-deprecate@^1.0.2:
+util-deprecate@^1.0.1:
   version "1.0.2"
-  resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
+  resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz"
   integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
 
-uuid@9.0.1:
-  version "9.0.1"
-  resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30"
-  integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==
-
 uuid@^8.3.2:
   version "8.3.2"
-  resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
+  resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz"
   integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
 
+uuid@9.0.1:
+  version "9.0.1"
+  resolved "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz"
+  integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==
+
 varint-decoder@^1.0.0:
   version "1.0.0"
-  resolved "https://registry.yarnpkg.com/varint-decoder/-/varint-decoder-1.0.0.tgz#289dab7887ee58d0c7be3a3353abeab4ca60aa77"
+  resolved "https://registry.npmjs.org/varint-decoder/-/varint-decoder-1.0.0.tgz"
   integrity sha512-JkOvdztASWGUAsXshCFHrB9f6AgR2Q8W08CEyJ+43b1qtFocmI8Sp1R/M0E/hDOY2FzVIqk63tOYLgDYWuJ7IQ==
   dependencies:
     varint "^5.0.0"
 
 varint@^5.0.0:
   version "5.0.2"
-  resolved "https://registry.yarnpkg.com/varint/-/varint-5.0.2.tgz#5b47f8a947eb668b848e034dcfa87d0ff8a7f7a4"
+  resolved "https://registry.npmjs.org/varint/-/varint-5.0.2.tgz"
   integrity sha512-lKxKYG6H03yCZUpAGOPOsMcGxd1RHCu1iKvEHYDPmTyq2HueGhD73ssNBqqQWfvYs04G9iUFRvmAVLW20Jw6ow==
 
 varint@^6.0.0:
   version "6.0.0"
-  resolved "https://registry.yarnpkg.com/varint/-/varint-6.0.0.tgz#9881eb0ce8feaea6512439d19ddf84bf551661d0"
+  resolved "https://registry.npmjs.org/varint/-/varint-6.0.0.tgz"
   integrity sha512-cXEIW6cfr15lFv563k4GuVuW/fiwjknytD37jIOLSdSWuOI6WnO/oKwmP2FQTU2l01LP8/M5TSAJpzUaGe3uWg==
 
 webidl-conversions@^3.0.0:
   version "3.0.1"
-  resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871"
+  resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz"
   integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==
 
+webidl-conversions@^4.0.2:
+  version "4.0.2"
+  resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz"
+  integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==
+
 whatwg-url@^5.0.0:
   version "5.0.0"
-  resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d"
+  resolved "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz"
   integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==
   dependencies:
     tr46 "~0.0.3"
     webidl-conversions "^3.0.0"
 
-wrappy@1:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
-  integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==
+whatwg-url@^7.0.0:
+  version "7.1.0"
+  resolved "https://registry.npmjs.org/whatwg-url/-/whatwg-url-7.1.0.tgz"
+  integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==
+  dependencies:
+    lodash.sortby "^4.7.0"
+    tr46 "^1.0.1"
+    webidl-conversions "^4.0.2"
+
+which@^2.0.1:
+  version "2.0.2"
+  resolved "https://registry.npmjs.org/which/-/which-2.0.2.tgz"
+  integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==
+  dependencies:
+    isexe "^2.0.0"
+
+"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0":
+  version "7.0.0"
+  resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz"
+  integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==
+  dependencies:
+    ansi-styles "^4.0.0"
+    string-width "^4.1.0"
+    strip-ansi "^6.0.0"
+
+wrap-ansi@^8.1.0:
+  version "8.1.0"
+  resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz"
+  integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==
+  dependencies:
+    ansi-styles "^6.1.0"
+    string-width "^5.0.1"
+    strip-ansi "^7.0.1"
 
 xmlcreate@^2.0.4:
   version "2.0.4"
-  resolved "https://registry.yarnpkg.com/xmlcreate/-/xmlcreate-2.0.4.tgz#0c5ab0f99cdd02a81065fa9cd8f8ae87624889be"
+  resolved "https://registry.npmjs.org/xmlcreate/-/xmlcreate-2.0.4.tgz"
   integrity sha512-nquOebG4sngPmGPICTS5EnxqhKbCmz5Ox5hsszI2T6U5qdrJizBc+0ilYSEjTSzU0yZcmvppztXe/5Al5fUwdg==
 
-yallist@^3.0.2:
-  version "3.1.1"
-  resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd"
-  integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==
+yaml@^2.3.4:
+  version "2.3.4"
+  resolved "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz"
+  integrity sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==

From 4a49af3cfeae70162f3748a8d51a00e9dcc03745 Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Fri, 2 Feb 2024 11:57:29 -0600
Subject: [PATCH 02/16] Remove dist Bump version to 1.0.3 Switch tsup to static
 version

---
 .gitignore       |     1 +
 dist/index.d.mts |  1354 ----
 dist/index.d.ts  |  1354 ----
 dist/index.js    | 18037 --------------------------------------------
 dist/index.mjs   | 18045 ---------------------------------------------
 package.json     |     4 +-
 yarn.lock        |   295 +-
 7 files changed, 222 insertions(+), 38868 deletions(-)
 delete mode 100644 dist/index.d.mts
 delete mode 100644 dist/index.d.ts
 delete mode 100644 dist/index.js
 delete mode 100644 dist/index.mjs

diff --git a/.gitignore b/.gitignore
index 41b5ac4..41e9674 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
+dist
 node_modules
 documentation
 .env
diff --git a/dist/index.d.mts b/dist/index.d.mts
deleted file mode 100644
index e2f52a1..0000000
--- a/dist/index.d.mts
+++ /dev/null
@@ -1,1354 +0,0 @@
-import { S3Client, CreateBucketCommand, ListBucketsCommand, DeleteBucketCommand, PutBucketAclCommand, GetBucketAclCommand, HeadObjectCommand, GetObjectCommand, ListObjectsV2Command, DeleteObjectCommand, CopyObjectCommand } from '@aws-sdk/client-s3';
-import axios from 'axios';
-import { Upload } from '@aws-sdk/lib-storage';
-import { CarWriter } from '@ipld/car';
-import { car } from '@helia/car';
-import { unixfs } from '@helia/unixfs';
-import { FsBlockstore } from 'blockstore-fs';
-import { createWriteStream, createReadStream } from 'node:fs';
-import { mkdir, rm } from 'node:fs/promises';
-import os from 'node:os';
-import path from 'node:path';
-import { Readable } from 'node:stream';
-import { v4 } from 'uuid';
-
-/** Provides methods for managing buckets in an S3 endpoint. */
-class BucketManager {
-  #DEFAULT_ENDPOINT = "https://s3.filebase.com";
-  #DEFAULT_REGION = "us-east-1";
-
-  #client;
-
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @tutorial quickstart-bucket
-   * @example
-   * import { BucketManager } from "@filebase/sdk";
-   * const bucketManager = new BucketManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
-   */
-  constructor(clientKey, clientSecret) {
-    const clientEndpoint =
-        process.env.NODE_ENV === "test"
-          ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT
-          : this.#DEFAULT_ENDPOINT,
-      clientConfiguration = {
-        credentials: {
-          accessKeyId: clientKey,
-          secretAccessKey: clientSecret,
-        },
-        endpoint: clientEndpoint,
-        region: this.#DEFAULT_REGION,
-        forcePathStyle: true,
-      };
-    this.#client = new S3Client(clientConfiguration);
-  }
-
-  /**
-   * @typedef {Object} bucket
-   * @property {string} Name The name of the bucket
-   * @property {date} Date the bucket was created
-   */
-
-  /**
-   * @summary Creates a new bucket with the specified name.
-   * @param {string} name - The name of the bucket to create.
-   * @returns {Promise} - A promise that resolves when the bucket is created.
-   * @example
-   * // Create bucket with name of `create-bucket-example`
-   * await bucketManager.create(`create-bucket-example`);
-   */
-  async create(name) {
-    const command = new CreateBucketCommand({
-      Bucket: name,
-    });
-
-    return await this.#client.send(command);
-  }
-
-  /**
-   * @summary Lists the buckets in the client.
-   * @returns {Promise>} - A promise that resolves with an array of objects representing the buckets in the client.
-   * @example
-   * // List all buckets
-   * await bucketManager.list();
-   */
-  async list() {
-    const command = new ListBucketsCommand({}),
-      { Buckets } = await this.#client.send(command);
-
-    return Buckets;
-  }
-
-  /**
-   * @summary Deletes the specified bucket.
-   * @param {string} name - The name of the bucket to delete.
-   * @returns {Promise} - A promise that resolves when the bucket is deleted.
-   * @example
-   * // Delete bucket with name of `bucket-name-to-delete`
-   * await bucketManager.delete(`bucket-name-to-delete`);
-   */
-  async delete(name) {
-    const command = new DeleteBucketCommand({
-      Bucket: name,
-    });
-
-    await this.#client.send(command);
-    return true;
-  }
-
-  /**
-   * @summary Sets the privacy of a given bucket.
-   * @param {string} name - The name of the bucket to toggle.
-   * @param {boolean} targetState - The new target state. [true=private,false=public]
-   * @returns {Promise} A promise that resolves to true if the bucket was successfully toggled.
-   * @example
-   * // Toggle bucket with label of `toggle-bucket-example`
-   * await bucketManager.setPrivacy(`toggle-bucket-example`, true);  // Enabled
-   * await bucketManager.setPrivacy(`toggle-bucket-example`, false); // Disabled
-   */
-
-  async setPrivacy(name, targetState) {
-    const command = new PutBucketAclCommand({
-      Bucket: name,
-      ACL: targetState ? "private" : "public-read",
-    });
-
-    await this.#client.send(command);
-    return true;
-  }
-
-  /**
-   * @summary Gets the privacy of a given bucket
-   * @param {string} name - The name of the bucket to query.
-   * @returns {Promise} A promise that resolves to true if the bucket is private.
-   */
-  async getPrivacy(name) {
-    const command = new GetBucketAclCommand({
-      Bucket: name,
-    });
-
-    const response = await this.#client.send(command),
-      readPermission = response.Grants.find((grant) => {
-        return grant.Grantee.Type === "Group" && grant.Permission === "READ";
-      });
-    return !(typeof readPermission !== "undefined");
-  }
-}
-
-const GATEWAY_DEFAULT_TIMEOUT = 60000;
-
-async function downloadFromGateway(cid, options) {
-  if (typeof options.endpoint !== "string") {
-    throw new Error(`Default Gateway must be set`);
-  }
-
-  const downloadHeaders = {};
-  if (options.token) {
-    downloadHeaders["x-filebase-gateway-token"] = options.token;
-  }
-
-  const downloadResponse = await axios.request({
-    method: "GET",
-    baseURL: options.endpoint,
-    url: `/ipfs/${cid}`,
-    headers: downloadHeaders,
-    type: "stream",
-    timeout: options?.timeout || GATEWAY_DEFAULT_TIMEOUT,
-  });
-  return downloadResponse.data;
-}
-
-function apiErrorHandler(err) {
-  if (
-    err?.response &&
-    err?.response?.status &&
-    (err.response.status.toString()[0] === "4" ||
-      err.response.status.toString()[0] === "5")
-  ) {
-    throw new Error(
-      err.response.data.error?.details ||
-        err.response.data.error?.reason ||
-        err,
-    );
-  }
-  throw err;
-}
-
-class GatewayManager {
-  #DEFAULT_ENDPOINT = "https://api.filebase.io";
-  #DEFAULT_TIMEOUT = 60000;
-
-  #client;
-
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @tutorial quickstart-gateway
-   * @example
-   * import { GatewayManager } from "@filebase/sdk";
-   * const gatewayManager = new GatewayManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
-   */
-  constructor(clientKey, clientSecret) {
-    const clientEndpoint =
-        process.env.NODE_ENV === "test"
-          ? process.env.TEST_GW_ENDPOINT || this.#DEFAULT_ENDPOINT
-          : this.#DEFAULT_ENDPOINT,
-      encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString(
-        "base64",
-      ),
-      baseURL = `${clientEndpoint}/v1/gateways`;
-    this.#client = axios.create({
-      baseURL: baseURL,
-      timeout: this.#DEFAULT_TIMEOUT,
-      headers: { Authorization: `Bearer ${encodedToken}` },
-    });
-  }
-
-  /**
-   * @typedef {Object} gateway
-   * @property {string} name Name for the gateway
-   * @property {string} domain Custom Domain for the gateway
-   * @property {boolean} enabled Whether the gateway is enabled or not
-   * @property {string} private Whether the gateway is scoped to users content
-   * @property {date} created_at Date the gateway was created
-   * @property {date} updated_at Date the gateway was last updated
-   */
-
-  /**
-   * @typedef {Object} gatewayOptions
-   * @property {boolean} [domain] Optional Domain to allow for using a Custom Domain
-   * @property {string} [enabled] Optional Toggle to use for enabling the gateway
-   * @property {boolean} [private] Optional Boolean determining if gateway is Public or Private
-   */
-
-  /**
-   * @summary Creates a gateway with the given name and options
-   * @param {string} name Unique name across entire platform for the gateway.  Must be a valid subdomain name.
-   * @param {gatewayOptions} [options]
-   * @returns {Promise} - A promise that resolves to the value of a gateway.
-   * @example
-   * // Create gateway with name of `create-gateway-example` and a custom domain of `cname.mycustomdomain.com`.
-   * // The custom domain must already exist and have a CNAME record pointed at `create-gateway-example.myfilebase.com`.
-   * await gatewayManager.create(`create-gateway-example`, {
-   *   domain: `cname.mycustomdomain.com`
-   * });
-   */
-  async create(name, options = {}) {
-    try {
-      let createOptions = {
-        name,
-      };
-      if (typeof options.domain === "string") {
-        createOptions.domain = options.domain;
-      }
-      if (typeof options.enabled === "boolean") {
-        createOptions.enabled = options.enabled;
-      }
-      if (typeof options.private === "boolean") {
-        createOptions.private = options.private;
-      }
-      const createResponse = await this.#client.request({
-        method: "POST",
-        data: createOptions,
-      });
-      return createResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Deletes a gateway with the given name.
-   * @param {string} name - The name of the gateway to delete.
-   * @returns {Promise} - A promise that resolves to true if the gateway was successfully deleted.
-   * @example
-   * // Delete gateway with name of `delete-gateway-example`
-   * await gatewayManager.delete(`delete-name-example`);
-   */
-  async delete(name) {
-    try {
-      await this.#client.request({
-        method: "DELETE",
-        url: `/${name}`,
-        validateStatus: (status) => {
-          return status === 204;
-        },
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Returns the value of a gateway
-   * @param {string} name - Parameter representing the name to get.
-   * @returns {Promise} - A promise that resolves to the value of a gateway.
-   * @example
-   * // Get gateway with name of `gateway-get-example`
-   * await gatewayManager.get(`gateway-get-example`);
-   */
-  async get(name) {
-    try {
-      const getResponse = await this.#client.request({
-        method: "GET",
-        url: `/${name}`,
-        validateStatus: (status) => {
-          return status === 200 || status === 404;
-        },
-      });
-      return getResponse.status === 200 ? getResponse.data : false;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Returns a list of gateways
-   * @returns {Promise>} - A promise that resolves to an array of gateways.
-   * @example
-   * // List all gateways
-   * await gatewayManager.list();
-   */
-  async list() {
-    try {
-      const getResponse = await this.#client.request({
-        method: "GET",
-      });
-      return getResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Updates the specified gateway.
-   * @param {string} name - The name of the gateway to update.
-   * @param {gatewayOptions} options - The options for the update operation.
-   *
-   * @returns {Promise} - A Promise that resolves to true if the gateway was updated.
-   * @example
-   * // Update gateway with name of `update-gateway-example` and set the gateway to only serve CIDs pinned by user.
-   * await gatewayManager.update(`update-gateway-example`, {
-   *   private: true
-   * });
-   */
-  async update(name, options) {
-    try {
-      const updateOptions = {
-        name,
-      };
-      if (options?.domain) {
-        updateOptions.domain = String(options.private);
-      }
-      if (options?.enabled) {
-        updateOptions.enabled = Boolean(options.enabled);
-      }
-      if (options?.private) {
-        updateOptions.private = Boolean(options.private);
-      }
-      await this.#client.request({
-        method: "PUT",
-        url: `/${name}`,
-        data: updateOptions,
-        validateStatus: (status) => {
-          return status === 200;
-        },
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Toggles the enabled state of a given gateway.
-   * @param {string} name - The name of the gateway to toggle.
-   * @param {boolean} targetState - The new target state.
-   * @returns {Promise} A promise that resolves to true if the gateway was successfully toggled.
-   * @example
-   * // Toggle gateway with label of `toggle-gateway-example`
-   * await gatewayManager.toggle(`toggle-gateway-example`, true);  // Enabled
-   * await gatewayManager.toggle(`toggle-gateway-example`, false); // Disabled
-   */
-  async toggle(name, targetState) {
-    try {
-      await this.#client.request({
-        method: "PUT",
-        url: `/${name}`,
-        data: {
-          enabled: Boolean(targetState),
-        },
-        validateStatus: (status) => {
-          return status === 200;
-        },
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-}
-
-/** Provides methods for managing names in an REST endpoint. */
-class NameManager {
-  #DEFAULT_ENDPOINT = "https://api.filebase.io";
-  #DEFAULT_TIMEOUT = 60000;
-
-  #client;
-
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @tutorial quickstart-name
-   * @example
-   * import { NameManager } from "@filebase/sdk";
-   * const nameManager = new NameManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
-   */
-  constructor(clientKey, clientSecret) {
-    const clientEndpoint =
-        process.env.NODE_ENV === "test"
-          ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT
-          : this.#DEFAULT_ENDPOINT,
-      encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString(
-        "base64",
-      ),
-      baseURL = `${clientEndpoint}/v1/names`;
-    this.#client = axios.create({
-      baseURL: baseURL,
-      timeout: this.#DEFAULT_TIMEOUT,
-      headers: { Authorization: `Bearer ${encodedToken}` },
-    });
-  }
-
-  /**
-   * @typedef {Object} name
-   * @property {string} label Descriptive label for the Key
-   * @property {string} network_key IPNS Key CID
-   * @property {string} cid Value that name Publishes
-   * @property {number} sequence Version Number for the name
-   * @property {boolean} enabled Whether the name is being Published or not
-   * @property {date} published_at Date the name was last published to the DHT
-   * @property {date} created_at Date the name was created
-   * @property {date} updated_at Date the name was last updated
-   */
-
-  /**
-   * @typedef {Object} nameOptions
-   * @property {boolean} [enabled] Whether the name is enabled or not.
-   */
-
-  /**
-   * @summary Creates a new IPNS name with the given name as the label and CID.
-   * @param {string} label - The label of the new IPNS name.
-   * @param {string} cid - The CID of the IPNS name.
-   * @param {nameOptions} [options] - Additional options for the IPNS name.
-   * @returns {Promise} - A Promise that resolves with the response JSON.
-   * @example
-   * // Create IPNS name with label of `create-name-example` and CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
-   * await nameManager.create(`create-name-example`, `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`);
-   */
-  async create(
-    label,
-    cid,
-    options = {
-      enabled: true,
-    },
-  ) {
-    try {
-      const createResponse = await this.#client.request({
-        method: "POST",
-        data: {
-          label,
-          cid,
-          enabled: options?.enabled !== false,
-        },
-      });
-      return createResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Imports a user's IPNS private key.
-   * @param {string} label - The label for the IPNS name.
-   * @param {string} cid - The CID (Content Identifier) of the data.
-   * @param {string} privateKey - The existing private key encoded in Base64.
-   * @param {nameOptions} [options] - Additional options for the IPNS name.
-   * @returns {Promise} - A Promise that resolves to the server response.
-   * @example
-   * // Import IPNS private key with label of `create-name-example`, CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
-   * // and a private key encoded with base64
-   * await nameManager.import(
-   *  `create-name-example`,
-   *  `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
-   *  `BASE64_ENCODED_PRIVATEKEY`
-   * );
-   */
-  async import(
-    label,
-    cid,
-    privateKey,
-    options = {
-      enabled: true,
-    },
-  ) {
-    try {
-      const importResponse = await this.#client.request({
-        method: "POST",
-        data: {
-          label,
-          cid,
-          network_private_key: privateKey,
-          enabled: options?.enabled !== false,
-        },
-      });
-      return importResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Updates the specified name with the given CID.
-   * @param {string} label - The label of the name to update.
-   * @param {string} cid - The cid to associate with the name.
-   * @param {nameOptions} options - The options for the set operation.
-   *
-   * @returns {Promise} - A Promise that resolves to true if the IPNS name was updated.
-   * @example
-   * // Update name with label of `update-name-example` and set the value of the IPNS name.
-   * await nameManager.update(`update-name-example`, `bafybeidt4nmaci476lyon2mvgfmwyzysdazienhxs2bqnfpdainzjuwjom`);
-   */
-  async update(label, cid, options = {}) {
-    try {
-      const updateOptions = {
-        cid,
-      };
-      if (options?.enabled) {
-        updateOptions.enabled = Boolean(options.enabled);
-      }
-      await this.#client.request({
-        method: "PUT",
-        url: `/${label}`,
-        data: updateOptions,
-        validateStatus: (status) => {
-          return status === 200;
-        },
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Returns the value of an IPNS name
-   * @param {string} label - Parameter representing the label of the name to resolve.
-   * @returns {Promise} - A promise that resolves to the value of a name.
-   * @example
-   * // Get IPNS name with label of `list-name-example`
-   * await nameManager.get(`list-name-example`);
-   */
-  async get(label) {
-    try {
-      const getResponse = await this.#client.request({
-        method: "GET",
-        url: `/${label}`,
-        validateStatus: (status) => {
-          return status === 200 || status === 404;
-        },
-      });
-      return getResponse.status === 200 ? getResponse.data : false;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Returns a list of IPNS names
-   * @returns {Promise>} - A promise that resolves to an array of names.
-   * @example
-   * // List all IPNS names
-   * await nameManager.list();
-   */
-  async list() {
-    try {
-      const listResponse = await this.#client.request({
-        method: "GET",
-      });
-      return listResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Deletes an IPNS name with the given label.
-   * @param {string} label - The label of the IPNS name to delete.
-   * @returns {Promise} - A promise that resolves to true if the IPNS name was successfully deleted.
-   * @example
-   * // List IPNS name with label of `delete-name-example`
-   * await nameManager.delete(`delete-name-example`);
-   */
-  async delete(label) {
-    try {
-      await this.#client.request({
-        method: "DELETE",
-        url: `/${label}`,
-        validateStatus: (status) => {
-          return status === 204;
-        },
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Toggles the enabled state of a given IPNS name.
-   * @param {string} label - The label of the IPNS name to toggle.
-   * @param {boolean} targetState - The new target state.
-   * @returns {Promise} A promise that resolves to true if the IPNS name was successfully toggled.
-   * @example
-   * // Toggle IPNS name with label of `toggle-name-example`
-   * await nameManager.toggle(`toggle-name-example`, true);  // Enabled
-   * await nameManager.toggle(`toggle-name-example`, false); // Disabled
-   */
-  async toggle(label, targetState) {
-    try {
-      await this.#client.request({
-        method: "PUT",
-        url: `/${label}`,
-        data: {
-          enabled: targetState,
-        },
-        validateStatus: (status) => {
-          return status === 200;
-        },
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-}
-
-// S3 Imports
-
-/** Interacts with an S3 client to perform various operations on objects in a bucket. */
-class ObjectManager {
-  #DEFAULT_ENDPOINT = "https://s3.filebase.com";
-  #DEFAULT_REGION = "us-east-1";
-  #DEFAULT_MAX_CONCURRENT_UPLOADS = 4;
-
-  #client;
-  #credentials;
-  #defaultBucket;
-  #gatewayConfiguration;
-  #maxConcurrentUploads;
-
-  /**
-   * @typedef {Object} objectManagerOptions Optional settings for the constructor.
-   * @property {string} [bucket] Default bucket to use.
-   * @property {objectDownloadOptions} [gateway] Default gateway to use.
-   * @property {number} [maxConcurrentUploads] The maximum number of concurrent uploads.
-   */
-
-  /**
-   * @typedef {Object} objectDownloadOptions Optional settings for downloading objects
-   * @property {string} endpoint Default gateway to use.
-   * @property {string} [token] Token for the default gateway.
-   * @property {number} [timeout=60000] Timeout for the default gateway
-   */
-
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @param {objectManagerOptions} options - Optional settings for the constructor.
-   * @tutorial quickstart-object
-   * @example
-   * import { ObjectManager } from "@filebase/sdk";
-   * const objectManager = new ObjectManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", {
-   *   bucket: "my-default-bucket",
-   *   maxConcurrentUploads: 4,
-   *   gateway: {
-   *     endpoint: "https://my-default-gateway.mydomain.com
-   *     token: SUPER_SECRET_GATEWAY_TOKEN
-   *   }
-   * });
-   */
-  constructor(clientKey, clientSecret, options) {
-    const clientEndpoint =
-        process.env.NODE_ENV === "test"
-          ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT
-          : this.#DEFAULT_ENDPOINT,
-      clientConfiguration = {
-        credentials: {
-          accessKeyId: clientKey,
-          secretAccessKey: clientSecret,
-        },
-        endpoint: clientEndpoint,
-        region: this.#DEFAULT_REGION,
-        forcePathStyle: true,
-      };
-    this.#defaultBucket = options?.bucket;
-    this.#maxConcurrentUploads =
-      options?.maxConcurrentUploads || this.#DEFAULT_MAX_CONCURRENT_UPLOADS;
-    this.#credentials = {
-      key: clientKey,
-      secret: clientSecret,
-    };
-    this.#client = new S3Client(clientConfiguration);
-
-    this.#gatewayConfiguration = {
-      endpoint: options?.gateway?.endpoint,
-      token: options?.gateway?.token,
-      timeout: options?.gateway?.timeout,
-    };
-  }
-
-  /**
-   * @typedef {Object} objectOptions
-   * @property {string} [bucket] - The bucket to pin the IPFS CID into.
-   */
-
-  /**
-   * @typedef {Object} objectHeadResult
-   * @property {string} cid The CID of the uploaded object
-   * @property {function} download Convenience function to download the object via S3 or the selected gateway
-   * @property {array} [entries] If a directory then returns an array of the containing objects
-   * @property {string} entries.cid The CID of the uploaded object
-   * @property {string} entries.path The path of the object
-   */
-
-  /**
-   * If the source parameter is an array of objects, it will pack multiple files into a CAR file for upload.
-   * The method returns a Promise that resolves to an object containing the CID (Content Identifier) of the uploaded file
-   * and an optional entries object when uploading a CAR file.
-   *
-   * @summary Uploads a file or a CAR file to the specified bucket.
-   * @param {string} key - The key or path of the file in the bucket.
-   * @param {Buffer|ReadableStream|Array} source - The content of the object to be uploaded.
-   *    If an array of files is provided, each file should have a 'path' property specifying the path of the file
-   *    and a 'content' property specifying the content of the file.  The SDK will then construct a CAR file locally
-   *    and use that as the content of the object to be uploaded.
-   * @param {Object} [metadata] Optional metadata for pin object
-   * @param {objectOptions} [options] - The options for uploading the object.
-   * @returns {Promise}
-   * @example
-   * // Upload Object
-   * await objectManager.upload("my-object", Buffer.from("Hello World!"));
-   * // Upload Object with Metadata
-   * await objectManager.upload("my-custom-object", Buffer.from("Hello Big World!"), {
-   *   "application": "my-filebase-app"
-   * });
-   * // Upload Directory
-   * await objectManager.upload("my-first-directory", [
-   *  {
-   *   path: "/testObjects/1.txt",
-   *   content: Buffer.from("upload test object", "utf-8"),
-   *  },
-   *  {
-   *   path: "/testObjects/deep/1.txt",
-   *   content: Buffer.from("upload deep test object", "utf-8"),
-   *  },
-   *  {
-   *   path: "/topLevel.txt",
-   *   content: Buffer.from("upload top level test object", "utf-8"),
-   *  },
-   * ]);
-   */
-  async upload(key, source, metadata, options) {
-    // Generate Upload UUID
-    const uploadUUID = v4();
-
-    // Setup Upload Options
-    const bucket = options?.bucket || this.#defaultBucket,
-      uploadOptions = {
-        client: this.#client,
-        params: {
-          Bucket: bucket,
-          Key: key,
-          Body: source,
-          Metadata: metadata || {},
-        },
-        queueSize: this.#maxConcurrentUploads,
-        partSize: 26843546, //25.6Mb || 250Gb Max File Size
-      };
-
-    // Pack Multiple Files into CAR file for upload
-    let parsedEntries = {};
-    if (Array.isArray(source)) {
-      // Mark Upload as a CAR file import
-      uploadOptions.params.Metadata = {
-        ...uploadOptions.params.Metadata,
-        import: "car",
-      };
-
-      let temporaryCarFilePath, temporaryBlockstoreDir;
-      try {
-        // Setup Blockstore
-        temporaryBlockstoreDir = path.resolve(
-          os.tmpdir(),
-          "filebase-sdk",
-          "uploads",
-          uploadUUID,
-        );
-        temporaryCarFilePath = `${temporaryBlockstoreDir}/main.car`;
-        await mkdir(temporaryBlockstoreDir, { recursive: true });
-        const temporaryBlockstore = new FsBlockstore(temporaryBlockstoreDir);
-
-        const heliaFs = unixfs({
-          blockstore: temporaryBlockstore,
-        });
-
-        for (let sourceEntry of source) {
-          sourceEntry.path =
-            sourceEntry.path[0] === "/"
-              ? `/${uploadUUID}${sourceEntry.path}`
-              : `/${uploadUUID}/${sourceEntry.path}`;
-        }
-        for await (const entry of heliaFs.addAll(source)) {
-          parsedEntries[entry.path] = entry;
-        }
-        const rootEntry = parsedEntries[uploadUUID];
-
-        // Get carFile stream here
-        const carExporter = car({ blockstore: temporaryBlockstore }),
-          { writer, out } = CarWriter.create([rootEntry.cid]);
-
-        // Put carFile stream to disk
-        const output = createWriteStream(temporaryCarFilePath);
-        Readable.from(out).pipe(output);
-        await carExporter.export(rootEntry.cid, writer);
-
-        // Set Uploader to Read from carFile on disk
-        uploadOptions.params.Body = createReadStream(temporaryCarFilePath);
-
-        // Upload carFile via S3
-        const parallelUploads3 = new Upload(uploadOptions);
-        await parallelUploads3.done();
-        await temporaryBlockstore.close();
-      } finally {
-        if (typeof temporaryBlockstoreDir !== "undefined") {
-          // Delete Temporary Blockstore
-          await rm(temporaryBlockstoreDir, { recursive: true, force: true });
-        }
-      }
-    } else {
-      // Upload file via S3
-      const parallelUploads3 = new Upload(uploadOptions);
-      await parallelUploads3.done();
-    }
-
-    // Get CID from Platform
-    const command = new HeadObjectCommand({
-        Bucket: bucket,
-        Key: key,
-        Body: source,
-      }),
-      headResult = await this.#client.send(command),
-      responseCid = headResult.Metadata.cid;
-
-    if (Object.keys(parsedEntries).length === 0) {
-      return {
-        cid: responseCid,
-        download: () => {
-          return this.#routeDownload(responseCid, key, options);
-        },
-      };
-    }
-    return {
-      cid: responseCid,
-      download: () => {
-        return this.#routeDownload(responseCid, key, options);
-      },
-      entries: parsedEntries,
-    };
-  }
-
-  async #routeDownload(cid, key, options) {
-    return typeof this.#gatewayConfiguration.endpoint !== "undefined"
-      ? downloadFromGateway(cid, this.#gatewayConfiguration)
-      : this.download(key, options);
-  }
-
-  /**
-   * @summary Gets an objects info and metadata using the S3 API.
-   * @param {string} key - The key of the object to be inspected.
-   * @param {objectOptions} [options] - The options for inspecting the object.
-   * @returns {Promise}
-   */
-  async get(key, options) {
-    const bucket = options?.bucket || this.#defaultBucket;
-    try {
-      const command = new HeadObjectCommand({
-          Bucket: bucket,
-          Key: key,
-        }),
-        response = await this.#client.send(command);
-
-      response.download = () => {
-        return this.#routeDownload(response.Metadata.cid, key, options);
-      };
-
-      return response;
-    } catch (err) {
-      if (err.name === "NotFound") {
-        return false;
-      }
-      throw err;
-    }
-  }
-
-  /**
-   * @summary Downloads an object from the specified bucket using the provided key.
-   * @param {string} key - The key of the object to be downloaded.
-   * @param {objectOptions} [options] - The options for downloading the object..
-   * @returns {Promise} - A promise that resolves with the contents of the downloaded object as a Stream.
-   * @example
-   * // Download object with name of `download-object-example`
-   * await objectManager.download(`download-object-example`);
-   */
-  async download(key, options) {
-    // Download via IPFS Gateway if Setup or S3 by Default
-    if (typeof this.#gatewayConfiguration.endpoint === "string") {
-      const objectToFetch = await this.get(key, options);
-      return objectToFetch.download();
-    } else {
-      const command = new GetObjectCommand({
-          Bucket: options?.bucket || this.#defaultBucket,
-          Key: key,
-        }),
-        response = await this.#client.send(command);
-
-      return response.Body;
-    }
-  }
-
-  /**
-   * @typedef {Object} listObjectsResult
-   * @property {boolean} IsTruncated Indicates if more results exist on the server
-   * @property {string} NextContinuationToken ContinuationToken used to paginate list requests
-   * @property {Array} Contents List of Keys stored in the S3 Bucket
-   * @property {string} Contents.Key Key of the Object
-   * @property {string} Contents.LastModified Date Last Modified of the Object
-   * @property {string} Contents.CID CID of the Object
-   * @property {string} Contents.ETag ETag of the Object
-   * @property {number} Contents.Size Size in Bytes of the Object
-   * @property {string} Contents.StorageClass Class of Storage of the Object
-   * @property {function} Contents.download Convenience function to download the item using the S3 gateway
-   */
-
-  /**
-   * @typedef {Object} listObjectOptions
-   * @property {string} [Bucket] The name of the bucket. If not provided, the default bucket will be used.
-   * @property {string} [ContinuationToken=null] Continues listing from this objects name.
-   * @property {string} [Delimiter=null] Character used to group keys
-   * @property {number} [MaxKeys=1000] The maximum number of objects to retrieve. Defaults to 1000.
-   */
-
-  /**
-   * Retrieves a list of objects from a specified bucket.
-   *
-   * @param {listObjectOptions} options - The options for listing objects.
-   * @returns {Promise} - A promise that resolves to an array of objects.
-   * @example
-   * // List objects in bucket with a limit of 1000
-   * await objectManager.list({
-   *   MaxKeys: 1000
-   * });
-   */
-  async list(
-    options = {
-      Bucket: this.#defaultBucket,
-      ContinuationToken: null,
-      Delimiter: null,
-      MaxKeys: 1000,
-    },
-  ) {
-    if (options?.MaxKeys && options.MaxKeys > 100000) {
-      throw new Error(`MaxKeys Maximum value is 100000`);
-    }
-    const bucket = options?.Bucket || this.#defaultBucket,
-      limit = options?.MaxKeys || 1000,
-      commandOptions = {
-        Bucket: bucket,
-        MaxKeys: limit,
-      },
-      command = new ListObjectsV2Command({
-        ...options,
-        ...commandOptions,
-      });
-
-    const { Contents, IsTruncated, NextContinuationToken } =
-      await this.#client.send(command);
-    return { Contents, IsTruncated, NextContinuationToken };
-  }
-
-  /**
-   * @summary Deletes an object from the specified bucket using the provided key.
-   * @param {string} key - The key of the object to be deleted.
-   * @param {objectOptions} [options] - The options for deleting the file.
-   * @returns {Promise} - A Promise that resolves with the result of the delete operation.
-   * @example
-   * // Delete object with name of `delete-object-example`
-   * await objectManager.delete(`delete-object-example`);
-   */
-  async delete(key, options) {
-    const command = new DeleteObjectCommand({
-      Bucket: options?.bucket || this.#defaultBucket,
-      Key: key,
-    });
-
-    await this.#client.send(command);
-    return true;
-  }
-
-  /**
-   * @typedef {Object} copyObjectOptions
-   * @property {string} [sourceBucket] The source bucket from where the object is to be copied.
-   * @property {string} [destinationKey] The key of the object in the destination bucket. By default, it is the same as the sourceKey.
-   */
-
-  /**
-   * If the destinationKey is not provided, the object will be copied with the same key as the sourceKey.
-   *
-   * @summary Copy the object from sourceKey in the sourceBucket to destinationKey in the destinationBucket.
-   * @param {string} sourceKey - The key of the object to be copied from the sourceBucket.
-   * @param {string} destinationBucket - The bucket where the object will be copied to.
-   * @param {copyObjectOptions} [options] - Additional options for the copy operation.
-   *
-   * @returns {Promise} - A Promise that resolves with the result of the copy operation.
-   * @example
-   * // Copy object `copy-object-test` from `copy-object-test-pass-src` to `copy-object-test-pass-dest`
-   * // TIP: Set bucket on constructor and it will be used as the default source for copying objects.
-   * await objectManager.copy(`copy-object-test`, `copy-object-dest`, {
-   *   sourceBucket: `copy-object-src`
-   * });
-   */
-  async copy(
-    sourceKey,
-    destinationBucket,
-    options = {
-      sourceBucket: this.#defaultBucket,
-      destinationKey: undefined,
-    },
-  ) {
-    const copySource = `${
-        options?.sourceBucket || this.#defaultBucket
-      }/${sourceKey}`,
-      command = new CopyObjectCommand({
-        CopySource: copySource,
-        Bucket: destinationBucket,
-        Key: options?.destinationKey || sourceKey,
-      });
-
-    await this.#client.send(command);
-    return true;
-  }
-}
-
-/** Provides methods for managing pins in an REST endpoint. */
-class PinManager {
-  #DEFAULT_ENDPOINT = "https://api.filebase.io";
-  #DEFAULT_TIMEOUT = 60000;
-
-  #client;
-  #credentials;
-  #gatewayConfiguration;
-  #defaultBucket;
-
-  /**
-   * @typedef {Object} pinManagerOptions Optional settings for the constructor.
-   * @property {string} [bucket] Default bucket to use.
-   * @property {pinDownloadOptions} [gateway] Default gateway to use.
-   */
-
-  /**
-   * @typedef {Object} pinDownloadOptions Optional settings for downloading pins
-   * @property {string} endpoint Default gateway to use.
-   * @property {string} [token] Token for the default gateway.
-   * @property {number} [timeout=60000] Timeout for the default gateway
-   */
-
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @param {pinManagerOptions} [options] - Optional settings for the constructor.
-   * @tutorial quickstart-pin
-   * @example
-   * import { PinManager } from "@filebase/sdk";
-   * const pinManager = new PinManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", {
-   *   bucket: "my-default-bucket",
-   *   gateway: {
-   *     endpoint: "https://my-default-gateway.mydomain.com
-   *     token: SUPER_SECRET_GATEWAY_TOKEN
-   *   }
-   * });
-   */
-  constructor(clientKey, clientSecret, options) {
-    this.#defaultBucket = options?.bucket;
-    const PSAClientEndpoint =
-        process.env.NODE_ENV === "test"
-          ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT
-          : this.#DEFAULT_ENDPOINT,
-      baseURL = `${PSAClientEndpoint}/v1/ipfs/pins`;
-    this.#credentials = {
-      key: clientKey,
-      secret: clientSecret,
-    };
-    this.#client = axios.create({
-      baseURL: baseURL,
-      timeout: this.#DEFAULT_TIMEOUT,
-    });
-
-    this.#gatewayConfiguration = {
-      endpoint: options?.gateway?.endpoint,
-      token: options?.gateway?.token,
-      timeout: options?.gateway?.timeout || this.#DEFAULT_TIMEOUT,
-    };
-  }
-
-  /**
-   * @typedef {Object} pinStatus
-   * @property {string} requestid Globally unique identifier of the pin request; can be used to check the status of ongoing pinning, or pin removal
-   * @property {string} status Status a pin object can have at a pinning service. ("queued","pinning","pinned","failed")
-   * @property {string} created Immutable timestamp indicating when a pin request entered a pinning service; can be used for filtering results and pagination
-   * @property {Object} pin Pin object
-   * @property {string} pin.cid Content Identifier (CID) pinned recursively
-   * @property {string} pin.name Name for pinned data; can be used for lookups later
-   * @property {Array} pin.origins Optional list of multiaddrs known to provide the data
-   * @property {Object} pin.meta Optional metadata for pin object
-   * @property {Array} delegates List of multiaddrs designated by pinning service that will receive the pin data
-   * @property {object} [info] Optional info for PinStatus response
-   * @property {function} download Convenience function to download pin
-   */
-
-  /**
-   * @typedef {Object} pinOptions
-   * @property {string} [bucket] - The bucket to pin the IPFS CID into.
-   */
-
-  /**
-   * @typedef {Object} listPinOptions
-   * @property {Array} [cid] Return pin objects responsible for pinning the specified CID(s); be aware that using longer hash functions introduces further constraints on the number of CIDs that will fit under the limit of 2000 characters per URL in browser contexts
-   * @property {string} [name] Return pin objects with specified name (by default a case-sensitive, exact match)
-   * @property {string} [match] Customize the text matching strategy applied when the name filter is present; exact (the default) is a case-sensitive exact match, partial matches anywhere in the name, iexact and ipartial are case-insensitive versions of the exact and partial strategies
-   * @property {Array} [status] Return pin objects for pins with the specified status (when missing, service defaults to pinned only)
-   * @property {string} [before] Return results created (queued) before provided timestamp
-   * @property {string} [after] Return results created (queued) after provided timestamp
-   * @property {number} [limit] Max records to return
-   * @property {Object} [meta] Return pin objects that match specified metadata keys passed as a string representation of a JSON object; when implementing a client library, make sure the parameter is URL-encoded to ensure safe transport
-   */
-
-  /**
-   * @typedef {Object} listPinResults
-   * @property {number} count Total number of pin objects that exist for passed query filters
-   * @property {Array} Array of PinStatus results
-   */
-
-  /**
-   * @summary List the pins in a given bucket
-   * @param {listPinOptions} [listOptions]
-   * @param {pinOptions} [options]
-   * @returns {Promise}
-   * @example
-   * // List pins in bucket with a limit of 1000
-   * await pinManager.list({
-   *   limit: 1000
-   * });
-   */
-  async list(listOptions, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options?.bucket),
-        getResponse = await this.#client.request({
-          method: "GET",
-          params: listOptions,
-          headers: { Authorization: `Bearer ${encodedToken}` },
-        });
-      for (let pinStatus of getResponse.data.results) {
-        pinStatus.download = () => {
-          return this.download(pinStatus.pin.cid);
-        };
-      }
-      return getResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Create a pin in the selected bucket
-   * @param {string} key Key or path of the file in the bucket
-   * @param {string} cid Content Identifier (CID) to be pinned recursively
-   * @param {Object} [metadata] Optional metadata for pin object
-   * @param {pinOptions} [options] Options for pinning the object
-   * @returns {Promise}
-   * @example
-   * // Create Pin with Metadata
-   * await pinManager.create("my-pin", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", {
-   *   "application": "my-custom-app-on-filebase"
-   * });
-   */
-  async create(key, cid, metadata, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options?.bucket),
-        pinStatus = await this.#client.request({
-          method: "POST",
-          data: {
-            cid,
-            name: key,
-            meta: metadata,
-          },
-          headers: { Authorization: `Bearer ${encodedToken}` },
-        });
-      pinStatus.data.download = () => {
-        return this.download(pinStatus.data.pin.cid);
-      };
-      return pinStatus.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @typedef {Object} replacePinOptions
-   * @augments pinOptions
-   * @property {Object} [metadata] Optional metadata to set on pin during replacement
-   * @property {string} [name] Optional name for pin to set during replacement
-   */
-
-  /**
-   * @summary Replace a pinned object in the selected bucket
-   * @param {string} requestid Unique ID for the pinned object
-   * @param {string} cid Content Identifier (CID) to be pinned recursively
-   * @param {replacePinOptions} [options] Options for pinning the object
-   * @returns {Promise}
-   * @example
-   * // Replace Pin with Metadata
-   * await pinManager.create("qr4231213", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", {
-   *   "revision": Date.now()
-   * }
-   */
-  async replace(requestid, cid, options) {
-    try {
-      let replaceData = {
-        cid,
-        meta: options?.metadata || {},
-      };
-      if (options?.name) {
-        replaceData.name = options.name;
-      }
-
-      const encodedToken = this.#getEncodedToken(options?.bucket),
-        pinStatusResult = await this.#client.request({
-          method: "POST",
-          url: `/${requestid}`,
-          data: replaceData,
-          validateStatus: (status) => {
-            return status === 200;
-          },
-          headers: { Authorization: `Bearer ${encodedToken}` },
-        });
-      const pinStatus = pinStatusResult.data;
-      pinStatus.download = () => {
-        return this.download(pinStatus.pin.cid);
-      };
-      return pinStatus;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Download a pin from the selected IPFS gateway
-   * @param {string} cid
-   * @param {pinDownloadOptions} [options]
-   * @returns {Promise}
-   * @example
-   * // Download Pin by CID
-   * await pinManager.download("QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF");
-   */
-  async download(cid, options) {
-    const downloadOptions = Object.assign(this.#gatewayConfiguration, options);
-    return downloadFromGateway(cid, downloadOptions);
-  }
-
-  /**
-   * @summary Get details about a pinned object
-   * @param {string} requestid Globally unique identifier of the pin request
-   * @param {pinOptions} [options] Options for getting the pin
-   * @returns {Promise}
-   * @example
-   * // Get Pin Info by RequestId
-   * await pinManager.get("qr4231214");
-   */
-  async get(requestid, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options?.bucket),
-        getResponseResult = await this.#client.request({
-          method: "GET",
-          url: `/${requestid}`,
-          headers: { Authorization: `Bearer ${encodedToken}` },
-          validateStatus: (status) => {
-            return status === 200 || status === 404;
-          },
-        });
-      if (getResponseResult.status === 404) {
-        return false;
-      }
-      const pinStatus = getResponseResult.data;
-      pinStatus.download = () => {
-        return this.download(pinStatus.pin.cid);
-      };
-      return pinStatus;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Delete a pinned object from the selected bucket
-   * @param requestid Globally unique identifier of the pin request
-   * @param {pinOptions} [options] Options for deleting the pin
-   * @returns {Promise}
-   * @example
-   * // Delete Pin by RequestId
-   * await pinManager.delete("qr4231213");
-   */
-  async delete(requestid, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options?.bucket);
-      await this.#client.request({
-        method: "DELETE",
-        url: `/${requestid}`,
-        headers: { Authorization: `Bearer ${encodedToken}` },
-        validateStatus: (status) => {
-          return status === 202;
-        },
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  #getEncodedToken(bucket) {
-    bucket = bucket || this.#defaultBucket;
-    return Buffer.from(
-      `${this.#credentials.key}:${this.#credentials.secret}:${bucket}`,
-    ).toString("base64");
-  }
-}
-
-export { BucketManager, GatewayManager, NameManager, ObjectManager, PinManager };
diff --git a/dist/index.d.ts b/dist/index.d.ts
deleted file mode 100644
index e2f52a1..0000000
--- a/dist/index.d.ts
+++ /dev/null
@@ -1,1354 +0,0 @@
-import { S3Client, CreateBucketCommand, ListBucketsCommand, DeleteBucketCommand, PutBucketAclCommand, GetBucketAclCommand, HeadObjectCommand, GetObjectCommand, ListObjectsV2Command, DeleteObjectCommand, CopyObjectCommand } from '@aws-sdk/client-s3';
-import axios from 'axios';
-import { Upload } from '@aws-sdk/lib-storage';
-import { CarWriter } from '@ipld/car';
-import { car } from '@helia/car';
-import { unixfs } from '@helia/unixfs';
-import { FsBlockstore } from 'blockstore-fs';
-import { createWriteStream, createReadStream } from 'node:fs';
-import { mkdir, rm } from 'node:fs/promises';
-import os from 'node:os';
-import path from 'node:path';
-import { Readable } from 'node:stream';
-import { v4 } from 'uuid';
-
-/** Provides methods for managing buckets in an S3 endpoint. */
-class BucketManager {
-  #DEFAULT_ENDPOINT = "https://s3.filebase.com";
-  #DEFAULT_REGION = "us-east-1";
-
-  #client;
-
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @tutorial quickstart-bucket
-   * @example
-   * import { BucketManager } from "@filebase/sdk";
-   * const bucketManager = new BucketManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
-   */
-  constructor(clientKey, clientSecret) {
-    const clientEndpoint =
-        process.env.NODE_ENV === "test"
-          ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT
-          : this.#DEFAULT_ENDPOINT,
-      clientConfiguration = {
-        credentials: {
-          accessKeyId: clientKey,
-          secretAccessKey: clientSecret,
-        },
-        endpoint: clientEndpoint,
-        region: this.#DEFAULT_REGION,
-        forcePathStyle: true,
-      };
-    this.#client = new S3Client(clientConfiguration);
-  }
-
-  /**
-   * @typedef {Object} bucket
-   * @property {string} Name The name of the bucket
-   * @property {date} Date the bucket was created
-   */
-
-  /**
-   * @summary Creates a new bucket with the specified name.
-   * @param {string} name - The name of the bucket to create.
-   * @returns {Promise} - A promise that resolves when the bucket is created.
-   * @example
-   * // Create bucket with name of `create-bucket-example`
-   * await bucketManager.create(`create-bucket-example`);
-   */
-  async create(name) {
-    const command = new CreateBucketCommand({
-      Bucket: name,
-    });
-
-    return await this.#client.send(command);
-  }
-
-  /**
-   * @summary Lists the buckets in the client.
-   * @returns {Promise>} - A promise that resolves with an array of objects representing the buckets in the client.
-   * @example
-   * // List all buckets
-   * await bucketManager.list();
-   */
-  async list() {
-    const command = new ListBucketsCommand({}),
-      { Buckets } = await this.#client.send(command);
-
-    return Buckets;
-  }
-
-  /**
-   * @summary Deletes the specified bucket.
-   * @param {string} name - The name of the bucket to delete.
-   * @returns {Promise} - A promise that resolves when the bucket is deleted.
-   * @example
-   * // Delete bucket with name of `bucket-name-to-delete`
-   * await bucketManager.delete(`bucket-name-to-delete`);
-   */
-  async delete(name) {
-    const command = new DeleteBucketCommand({
-      Bucket: name,
-    });
-
-    await this.#client.send(command);
-    return true;
-  }
-
-  /**
-   * @summary Sets the privacy of a given bucket.
-   * @param {string} name - The name of the bucket to toggle.
-   * @param {boolean} targetState - The new target state. [true=private,false=public]
-   * @returns {Promise} A promise that resolves to true if the bucket was successfully toggled.
-   * @example
-   * // Toggle bucket with label of `toggle-bucket-example`
-   * await bucketManager.setPrivacy(`toggle-bucket-example`, true);  // Enabled
-   * await bucketManager.setPrivacy(`toggle-bucket-example`, false); // Disabled
-   */
-
-  async setPrivacy(name, targetState) {
-    const command = new PutBucketAclCommand({
-      Bucket: name,
-      ACL: targetState ? "private" : "public-read",
-    });
-
-    await this.#client.send(command);
-    return true;
-  }
-
-  /**
-   * @summary Gets the privacy of a given bucket
-   * @param {string} name - The name of the bucket to query.
-   * @returns {Promise} A promise that resolves to true if the bucket is private.
-   */
-  async getPrivacy(name) {
-    const command = new GetBucketAclCommand({
-      Bucket: name,
-    });
-
-    const response = await this.#client.send(command),
-      readPermission = response.Grants.find((grant) => {
-        return grant.Grantee.Type === "Group" && grant.Permission === "READ";
-      });
-    return !(typeof readPermission !== "undefined");
-  }
-}
-
-const GATEWAY_DEFAULT_TIMEOUT = 60000;
-
-async function downloadFromGateway(cid, options) {
-  if (typeof options.endpoint !== "string") {
-    throw new Error(`Default Gateway must be set`);
-  }
-
-  const downloadHeaders = {};
-  if (options.token) {
-    downloadHeaders["x-filebase-gateway-token"] = options.token;
-  }
-
-  const downloadResponse = await axios.request({
-    method: "GET",
-    baseURL: options.endpoint,
-    url: `/ipfs/${cid}`,
-    headers: downloadHeaders,
-    type: "stream",
-    timeout: options?.timeout || GATEWAY_DEFAULT_TIMEOUT,
-  });
-  return downloadResponse.data;
-}
-
-function apiErrorHandler(err) {
-  if (
-    err?.response &&
-    err?.response?.status &&
-    (err.response.status.toString()[0] === "4" ||
-      err.response.status.toString()[0] === "5")
-  ) {
-    throw new Error(
-      err.response.data.error?.details ||
-        err.response.data.error?.reason ||
-        err,
-    );
-  }
-  throw err;
-}
-
-class GatewayManager {
-  #DEFAULT_ENDPOINT = "https://api.filebase.io";
-  #DEFAULT_TIMEOUT = 60000;
-
-  #client;
-
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @tutorial quickstart-gateway
-   * @example
-   * import { GatewayManager } from "@filebase/sdk";
-   * const gatewayManager = new GatewayManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
-   */
-  constructor(clientKey, clientSecret) {
-    const clientEndpoint =
-        process.env.NODE_ENV === "test"
-          ? process.env.TEST_GW_ENDPOINT || this.#DEFAULT_ENDPOINT
-          : this.#DEFAULT_ENDPOINT,
-      encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString(
-        "base64",
-      ),
-      baseURL = `${clientEndpoint}/v1/gateways`;
-    this.#client = axios.create({
-      baseURL: baseURL,
-      timeout: this.#DEFAULT_TIMEOUT,
-      headers: { Authorization: `Bearer ${encodedToken}` },
-    });
-  }
-
-  /**
-   * @typedef {Object} gateway
-   * @property {string} name Name for the gateway
-   * @property {string} domain Custom Domain for the gateway
-   * @property {boolean} enabled Whether the gateway is enabled or not
-   * @property {string} private Whether the gateway is scoped to users content
-   * @property {date} created_at Date the gateway was created
-   * @property {date} updated_at Date the gateway was last updated
-   */
-
-  /**
-   * @typedef {Object} gatewayOptions
-   * @property {boolean} [domain] Optional Domain to allow for using a Custom Domain
-   * @property {string} [enabled] Optional Toggle to use for enabling the gateway
-   * @property {boolean} [private] Optional Boolean determining if gateway is Public or Private
-   */
-
-  /**
-   * @summary Creates a gateway with the given name and options
-   * @param {string} name Unique name across entire platform for the gateway.  Must be a valid subdomain name.
-   * @param {gatewayOptions} [options]
-   * @returns {Promise} - A promise that resolves to the value of a gateway.
-   * @example
-   * // Create gateway with name of `create-gateway-example` and a custom domain of `cname.mycustomdomain.com`.
-   * // The custom domain must already exist and have a CNAME record pointed at `create-gateway-example.myfilebase.com`.
-   * await gatewayManager.create(`create-gateway-example`, {
-   *   domain: `cname.mycustomdomain.com`
-   * });
-   */
-  async create(name, options = {}) {
-    try {
-      let createOptions = {
-        name,
-      };
-      if (typeof options.domain === "string") {
-        createOptions.domain = options.domain;
-      }
-      if (typeof options.enabled === "boolean") {
-        createOptions.enabled = options.enabled;
-      }
-      if (typeof options.private === "boolean") {
-        createOptions.private = options.private;
-      }
-      const createResponse = await this.#client.request({
-        method: "POST",
-        data: createOptions,
-      });
-      return createResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Deletes a gateway with the given name.
-   * @param {string} name - The name of the gateway to delete.
-   * @returns {Promise} - A promise that resolves to true if the gateway was successfully deleted.
-   * @example
-   * // Delete gateway with name of `delete-gateway-example`
-   * await gatewayManager.delete(`delete-name-example`);
-   */
-  async delete(name) {
-    try {
-      await this.#client.request({
-        method: "DELETE",
-        url: `/${name}`,
-        validateStatus: (status) => {
-          return status === 204;
-        },
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Returns the value of a gateway
-   * @param {string} name - Parameter representing the name to get.
-   * @returns {Promise} - A promise that resolves to the value of a gateway.
-   * @example
-   * // Get gateway with name of `gateway-get-example`
-   * await gatewayManager.get(`gateway-get-example`);
-   */
-  async get(name) {
-    try {
-      const getResponse = await this.#client.request({
-        method: "GET",
-        url: `/${name}`,
-        validateStatus: (status) => {
-          return status === 200 || status === 404;
-        },
-      });
-      return getResponse.status === 200 ? getResponse.data : false;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Returns a list of gateways
-   * @returns {Promise>} - A promise that resolves to an array of gateways.
-   * @example
-   * // List all gateways
-   * await gatewayManager.list();
-   */
-  async list() {
-    try {
-      const getResponse = await this.#client.request({
-        method: "GET",
-      });
-      return getResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Updates the specified gateway.
-   * @param {string} name - The name of the gateway to update.
-   * @param {gatewayOptions} options - The options for the update operation.
-   *
-   * @returns {Promise} - A Promise that resolves to true if the gateway was updated.
-   * @example
-   * // Update gateway with name of `update-gateway-example` and set the gateway to only serve CIDs pinned by user.
-   * await gatewayManager.update(`update-gateway-example`, {
-   *   private: true
-   * });
-   */
-  async update(name, options) {
-    try {
-      const updateOptions = {
-        name,
-      };
-      if (options?.domain) {
-        updateOptions.domain = String(options.private);
-      }
-      if (options?.enabled) {
-        updateOptions.enabled = Boolean(options.enabled);
-      }
-      if (options?.private) {
-        updateOptions.private = Boolean(options.private);
-      }
-      await this.#client.request({
-        method: "PUT",
-        url: `/${name}`,
-        data: updateOptions,
-        validateStatus: (status) => {
-          return status === 200;
-        },
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Toggles the enabled state of a given gateway.
-   * @param {string} name - The name of the gateway to toggle.
-   * @param {boolean} targetState - The new target state.
-   * @returns {Promise} A promise that resolves to true if the gateway was successfully toggled.
-   * @example
-   * // Toggle gateway with label of `toggle-gateway-example`
-   * await gatewayManager.toggle(`toggle-gateway-example`, true);  // Enabled
-   * await gatewayManager.toggle(`toggle-gateway-example`, false); // Disabled
-   */
-  async toggle(name, targetState) {
-    try {
-      await this.#client.request({
-        method: "PUT",
-        url: `/${name}`,
-        data: {
-          enabled: Boolean(targetState),
-        },
-        validateStatus: (status) => {
-          return status === 200;
-        },
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-}
-
-/** Provides methods for managing names in an REST endpoint. */
-class NameManager {
-  #DEFAULT_ENDPOINT = "https://api.filebase.io";
-  #DEFAULT_TIMEOUT = 60000;
-
-  #client;
-
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @tutorial quickstart-name
-   * @example
-   * import { NameManager } from "@filebase/sdk";
-   * const nameManager = new NameManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
-   */
-  constructor(clientKey, clientSecret) {
-    const clientEndpoint =
-        process.env.NODE_ENV === "test"
-          ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT
-          : this.#DEFAULT_ENDPOINT,
-      encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString(
-        "base64",
-      ),
-      baseURL = `${clientEndpoint}/v1/names`;
-    this.#client = axios.create({
-      baseURL: baseURL,
-      timeout: this.#DEFAULT_TIMEOUT,
-      headers: { Authorization: `Bearer ${encodedToken}` },
-    });
-  }
-
-  /**
-   * @typedef {Object} name
-   * @property {string} label Descriptive label for the Key
-   * @property {string} network_key IPNS Key CID
-   * @property {string} cid Value that name Publishes
-   * @property {number} sequence Version Number for the name
-   * @property {boolean} enabled Whether the name is being Published or not
-   * @property {date} published_at Date the name was last published to the DHT
-   * @property {date} created_at Date the name was created
-   * @property {date} updated_at Date the name was last updated
-   */
-
-  /**
-   * @typedef {Object} nameOptions
-   * @property {boolean} [enabled] Whether the name is enabled or not.
-   */
-
-  /**
-   * @summary Creates a new IPNS name with the given name as the label and CID.
-   * @param {string} label - The label of the new IPNS name.
-   * @param {string} cid - The CID of the IPNS name.
-   * @param {nameOptions} [options] - Additional options for the IPNS name.
-   * @returns {Promise} - A Promise that resolves with the response JSON.
-   * @example
-   * // Create IPNS name with label of `create-name-example` and CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
-   * await nameManager.create(`create-name-example`, `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`);
-   */
-  async create(
-    label,
-    cid,
-    options = {
-      enabled: true,
-    },
-  ) {
-    try {
-      const createResponse = await this.#client.request({
-        method: "POST",
-        data: {
-          label,
-          cid,
-          enabled: options?.enabled !== false,
-        },
-      });
-      return createResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Imports a user's IPNS private key.
-   * @param {string} label - The label for the IPNS name.
-   * @param {string} cid - The CID (Content Identifier) of the data.
-   * @param {string} privateKey - The existing private key encoded in Base64.
-   * @param {nameOptions} [options] - Additional options for the IPNS name.
-   * @returns {Promise} - A Promise that resolves to the server response.
-   * @example
-   * // Import IPNS private key with label of `create-name-example`, CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
-   * // and a private key encoded with base64
-   * await nameManager.import(
-   *  `create-name-example`,
-   *  `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
-   *  `BASE64_ENCODED_PRIVATEKEY`
-   * );
-   */
-  async import(
-    label,
-    cid,
-    privateKey,
-    options = {
-      enabled: true,
-    },
-  ) {
-    try {
-      const importResponse = await this.#client.request({
-        method: "POST",
-        data: {
-          label,
-          cid,
-          network_private_key: privateKey,
-          enabled: options?.enabled !== false,
-        },
-      });
-      return importResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Updates the specified name with the given CID.
-   * @param {string} label - The label of the name to update.
-   * @param {string} cid - The cid to associate with the name.
-   * @param {nameOptions} options - The options for the set operation.
-   *
-   * @returns {Promise} - A Promise that resolves to true if the IPNS name was updated.
-   * @example
-   * // Update name with label of `update-name-example` and set the value of the IPNS name.
-   * await nameManager.update(`update-name-example`, `bafybeidt4nmaci476lyon2mvgfmwyzysdazienhxs2bqnfpdainzjuwjom`);
-   */
-  async update(label, cid, options = {}) {
-    try {
-      const updateOptions = {
-        cid,
-      };
-      if (options?.enabled) {
-        updateOptions.enabled = Boolean(options.enabled);
-      }
-      await this.#client.request({
-        method: "PUT",
-        url: `/${label}`,
-        data: updateOptions,
-        validateStatus: (status) => {
-          return status === 200;
-        },
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Returns the value of an IPNS name
-   * @param {string} label - Parameter representing the label of the name to resolve.
-   * @returns {Promise} - A promise that resolves to the value of a name.
-   * @example
-   * // Get IPNS name with label of `list-name-example`
-   * await nameManager.get(`list-name-example`);
-   */
-  async get(label) {
-    try {
-      const getResponse = await this.#client.request({
-        method: "GET",
-        url: `/${label}`,
-        validateStatus: (status) => {
-          return status === 200 || status === 404;
-        },
-      });
-      return getResponse.status === 200 ? getResponse.data : false;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Returns a list of IPNS names
-   * @returns {Promise>} - A promise that resolves to an array of names.
-   * @example
-   * // List all IPNS names
-   * await nameManager.list();
-   */
-  async list() {
-    try {
-      const listResponse = await this.#client.request({
-        method: "GET",
-      });
-      return listResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Deletes an IPNS name with the given label.
-   * @param {string} label - The label of the IPNS name to delete.
-   * @returns {Promise} - A promise that resolves to true if the IPNS name was successfully deleted.
-   * @example
-   * // List IPNS name with label of `delete-name-example`
-   * await nameManager.delete(`delete-name-example`);
-   */
-  async delete(label) {
-    try {
-      await this.#client.request({
-        method: "DELETE",
-        url: `/${label}`,
-        validateStatus: (status) => {
-          return status === 204;
-        },
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Toggles the enabled state of a given IPNS name.
-   * @param {string} label - The label of the IPNS name to toggle.
-   * @param {boolean} targetState - The new target state.
-   * @returns {Promise} A promise that resolves to true if the IPNS name was successfully toggled.
-   * @example
-   * // Toggle IPNS name with label of `toggle-name-example`
-   * await nameManager.toggle(`toggle-name-example`, true);  // Enabled
-   * await nameManager.toggle(`toggle-name-example`, false); // Disabled
-   */
-  async toggle(label, targetState) {
-    try {
-      await this.#client.request({
-        method: "PUT",
-        url: `/${label}`,
-        data: {
-          enabled: targetState,
-        },
-        validateStatus: (status) => {
-          return status === 200;
-        },
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-}
-
-// S3 Imports
-
-/** Interacts with an S3 client to perform various operations on objects in a bucket. */
-class ObjectManager {
-  #DEFAULT_ENDPOINT = "https://s3.filebase.com";
-  #DEFAULT_REGION = "us-east-1";
-  #DEFAULT_MAX_CONCURRENT_UPLOADS = 4;
-
-  #client;
-  #credentials;
-  #defaultBucket;
-  #gatewayConfiguration;
-  #maxConcurrentUploads;
-
-  /**
-   * @typedef {Object} objectManagerOptions Optional settings for the constructor.
-   * @property {string} [bucket] Default bucket to use.
-   * @property {objectDownloadOptions} [gateway] Default gateway to use.
-   * @property {number} [maxConcurrentUploads] The maximum number of concurrent uploads.
-   */
-
-  /**
-   * @typedef {Object} objectDownloadOptions Optional settings for downloading objects
-   * @property {string} endpoint Default gateway to use.
-   * @property {string} [token] Token for the default gateway.
-   * @property {number} [timeout=60000] Timeout for the default gateway
-   */
-
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @param {objectManagerOptions} options - Optional settings for the constructor.
-   * @tutorial quickstart-object
-   * @example
-   * import { ObjectManager } from "@filebase/sdk";
-   * const objectManager = new ObjectManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", {
-   *   bucket: "my-default-bucket",
-   *   maxConcurrentUploads: 4,
-   *   gateway: {
-   *     endpoint: "https://my-default-gateway.mydomain.com
-   *     token: SUPER_SECRET_GATEWAY_TOKEN
-   *   }
-   * });
-   */
-  constructor(clientKey, clientSecret, options) {
-    const clientEndpoint =
-        process.env.NODE_ENV === "test"
-          ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT
-          : this.#DEFAULT_ENDPOINT,
-      clientConfiguration = {
-        credentials: {
-          accessKeyId: clientKey,
-          secretAccessKey: clientSecret,
-        },
-        endpoint: clientEndpoint,
-        region: this.#DEFAULT_REGION,
-        forcePathStyle: true,
-      };
-    this.#defaultBucket = options?.bucket;
-    this.#maxConcurrentUploads =
-      options?.maxConcurrentUploads || this.#DEFAULT_MAX_CONCURRENT_UPLOADS;
-    this.#credentials = {
-      key: clientKey,
-      secret: clientSecret,
-    };
-    this.#client = new S3Client(clientConfiguration);
-
-    this.#gatewayConfiguration = {
-      endpoint: options?.gateway?.endpoint,
-      token: options?.gateway?.token,
-      timeout: options?.gateway?.timeout,
-    };
-  }
-
-  /**
-   * @typedef {Object} objectOptions
-   * @property {string} [bucket] - The bucket to pin the IPFS CID into.
-   */
-
-  /**
-   * @typedef {Object} objectHeadResult
-   * @property {string} cid The CID of the uploaded object
-   * @property {function} download Convenience function to download the object via S3 or the selected gateway
-   * @property {array} [entries] If a directory then returns an array of the containing objects
-   * @property {string} entries.cid The CID of the uploaded object
-   * @property {string} entries.path The path of the object
-   */
-
-  /**
-   * If the source parameter is an array of objects, it will pack multiple files into a CAR file for upload.
-   * The method returns a Promise that resolves to an object containing the CID (Content Identifier) of the uploaded file
-   * and an optional entries object when uploading a CAR file.
-   *
-   * @summary Uploads a file or a CAR file to the specified bucket.
-   * @param {string} key - The key or path of the file in the bucket.
-   * @param {Buffer|ReadableStream|Array} source - The content of the object to be uploaded.
-   *    If an array of files is provided, each file should have a 'path' property specifying the path of the file
-   *    and a 'content' property specifying the content of the file.  The SDK will then construct a CAR file locally
-   *    and use that as the content of the object to be uploaded.
-   * @param {Object} [metadata] Optional metadata for pin object
-   * @param {objectOptions} [options] - The options for uploading the object.
-   * @returns {Promise}
-   * @example
-   * // Upload Object
-   * await objectManager.upload("my-object", Buffer.from("Hello World!"));
-   * // Upload Object with Metadata
-   * await objectManager.upload("my-custom-object", Buffer.from("Hello Big World!"), {
-   *   "application": "my-filebase-app"
-   * });
-   * // Upload Directory
-   * await objectManager.upload("my-first-directory", [
-   *  {
-   *   path: "/testObjects/1.txt",
-   *   content: Buffer.from("upload test object", "utf-8"),
-   *  },
-   *  {
-   *   path: "/testObjects/deep/1.txt",
-   *   content: Buffer.from("upload deep test object", "utf-8"),
-   *  },
-   *  {
-   *   path: "/topLevel.txt",
-   *   content: Buffer.from("upload top level test object", "utf-8"),
-   *  },
-   * ]);
-   */
-  async upload(key, source, metadata, options) {
-    // Generate Upload UUID
-    const uploadUUID = v4();
-
-    // Setup Upload Options
-    const bucket = options?.bucket || this.#defaultBucket,
-      uploadOptions = {
-        client: this.#client,
-        params: {
-          Bucket: bucket,
-          Key: key,
-          Body: source,
-          Metadata: metadata || {},
-        },
-        queueSize: this.#maxConcurrentUploads,
-        partSize: 26843546, //25.6Mb || 250Gb Max File Size
-      };
-
-    // Pack Multiple Files into CAR file for upload
-    let parsedEntries = {};
-    if (Array.isArray(source)) {
-      // Mark Upload as a CAR file import
-      uploadOptions.params.Metadata = {
-        ...uploadOptions.params.Metadata,
-        import: "car",
-      };
-
-      let temporaryCarFilePath, temporaryBlockstoreDir;
-      try {
-        // Setup Blockstore
-        temporaryBlockstoreDir = path.resolve(
-          os.tmpdir(),
-          "filebase-sdk",
-          "uploads",
-          uploadUUID,
-        );
-        temporaryCarFilePath = `${temporaryBlockstoreDir}/main.car`;
-        await mkdir(temporaryBlockstoreDir, { recursive: true });
-        const temporaryBlockstore = new FsBlockstore(temporaryBlockstoreDir);
-
-        const heliaFs = unixfs({
-          blockstore: temporaryBlockstore,
-        });
-
-        for (let sourceEntry of source) {
-          sourceEntry.path =
-            sourceEntry.path[0] === "/"
-              ? `/${uploadUUID}${sourceEntry.path}`
-              : `/${uploadUUID}/${sourceEntry.path}`;
-        }
-        for await (const entry of heliaFs.addAll(source)) {
-          parsedEntries[entry.path] = entry;
-        }
-        const rootEntry = parsedEntries[uploadUUID];
-
-        // Get carFile stream here
-        const carExporter = car({ blockstore: temporaryBlockstore }),
-          { writer, out } = CarWriter.create([rootEntry.cid]);
-
-        // Put carFile stream to disk
-        const output = createWriteStream(temporaryCarFilePath);
-        Readable.from(out).pipe(output);
-        await carExporter.export(rootEntry.cid, writer);
-
-        // Set Uploader to Read from carFile on disk
-        uploadOptions.params.Body = createReadStream(temporaryCarFilePath);
-
-        // Upload carFile via S3
-        const parallelUploads3 = new Upload(uploadOptions);
-        await parallelUploads3.done();
-        await temporaryBlockstore.close();
-      } finally {
-        if (typeof temporaryBlockstoreDir !== "undefined") {
-          // Delete Temporary Blockstore
-          await rm(temporaryBlockstoreDir, { recursive: true, force: true });
-        }
-      }
-    } else {
-      // Upload file via S3
-      const parallelUploads3 = new Upload(uploadOptions);
-      await parallelUploads3.done();
-    }
-
-    // Get CID from Platform
-    const command = new HeadObjectCommand({
-        Bucket: bucket,
-        Key: key,
-        Body: source,
-      }),
-      headResult = await this.#client.send(command),
-      responseCid = headResult.Metadata.cid;
-
-    if (Object.keys(parsedEntries).length === 0) {
-      return {
-        cid: responseCid,
-        download: () => {
-          return this.#routeDownload(responseCid, key, options);
-        },
-      };
-    }
-    return {
-      cid: responseCid,
-      download: () => {
-        return this.#routeDownload(responseCid, key, options);
-      },
-      entries: parsedEntries,
-    };
-  }
-
-  async #routeDownload(cid, key, options) {
-    return typeof this.#gatewayConfiguration.endpoint !== "undefined"
-      ? downloadFromGateway(cid, this.#gatewayConfiguration)
-      : this.download(key, options);
-  }
-
-  /**
-   * @summary Gets an objects info and metadata using the S3 API.
-   * @param {string} key - The key of the object to be inspected.
-   * @param {objectOptions} [options] - The options for inspecting the object.
-   * @returns {Promise}
-   */
-  async get(key, options) {
-    const bucket = options?.bucket || this.#defaultBucket;
-    try {
-      const command = new HeadObjectCommand({
-          Bucket: bucket,
-          Key: key,
-        }),
-        response = await this.#client.send(command);
-
-      response.download = () => {
-        return this.#routeDownload(response.Metadata.cid, key, options);
-      };
-
-      return response;
-    } catch (err) {
-      if (err.name === "NotFound") {
-        return false;
-      }
-      throw err;
-    }
-  }
-
-  /**
-   * @summary Downloads an object from the specified bucket using the provided key.
-   * @param {string} key - The key of the object to be downloaded.
-   * @param {objectOptions} [options] - The options for downloading the object..
-   * @returns {Promise} - A promise that resolves with the contents of the downloaded object as a Stream.
-   * @example
-   * // Download object with name of `download-object-example`
-   * await objectManager.download(`download-object-example`);
-   */
-  async download(key, options) {
-    // Download via IPFS Gateway if Setup or S3 by Default
-    if (typeof this.#gatewayConfiguration.endpoint === "string") {
-      const objectToFetch = await this.get(key, options);
-      return objectToFetch.download();
-    } else {
-      const command = new GetObjectCommand({
-          Bucket: options?.bucket || this.#defaultBucket,
-          Key: key,
-        }),
-        response = await this.#client.send(command);
-
-      return response.Body;
-    }
-  }
-
-  /**
-   * @typedef {Object} listObjectsResult
-   * @property {boolean} IsTruncated Indicates if more results exist on the server
-   * @property {string} NextContinuationToken ContinuationToken used to paginate list requests
-   * @property {Array} Contents List of Keys stored in the S3 Bucket
-   * @property {string} Contents.Key Key of the Object
-   * @property {string} Contents.LastModified Date Last Modified of the Object
-   * @property {string} Contents.CID CID of the Object
-   * @property {string} Contents.ETag ETag of the Object
-   * @property {number} Contents.Size Size in Bytes of the Object
-   * @property {string} Contents.StorageClass Class of Storage of the Object
-   * @property {function} Contents.download Convenience function to download the item using the S3 gateway
-   */
-
-  /**
-   * @typedef {Object} listObjectOptions
-   * @property {string} [Bucket] The name of the bucket. If not provided, the default bucket will be used.
-   * @property {string} [ContinuationToken=null] Continues listing from this objects name.
-   * @property {string} [Delimiter=null] Character used to group keys
-   * @property {number} [MaxKeys=1000] The maximum number of objects to retrieve. Defaults to 1000.
-   */
-
-  /**
-   * Retrieves a list of objects from a specified bucket.
-   *
-   * @param {listObjectOptions} options - The options for listing objects.
-   * @returns {Promise} - A promise that resolves to an array of objects.
-   * @example
-   * // List objects in bucket with a limit of 1000
-   * await objectManager.list({
-   *   MaxKeys: 1000
-   * });
-   */
-  async list(
-    options = {
-      Bucket: this.#defaultBucket,
-      ContinuationToken: null,
-      Delimiter: null,
-      MaxKeys: 1000,
-    },
-  ) {
-    if (options?.MaxKeys && options.MaxKeys > 100000) {
-      throw new Error(`MaxKeys Maximum value is 100000`);
-    }
-    const bucket = options?.Bucket || this.#defaultBucket,
-      limit = options?.MaxKeys || 1000,
-      commandOptions = {
-        Bucket: bucket,
-        MaxKeys: limit,
-      },
-      command = new ListObjectsV2Command({
-        ...options,
-        ...commandOptions,
-      });
-
-    const { Contents, IsTruncated, NextContinuationToken } =
-      await this.#client.send(command);
-    return { Contents, IsTruncated, NextContinuationToken };
-  }
-
-  /**
-   * @summary Deletes an object from the specified bucket using the provided key.
-   * @param {string} key - The key of the object to be deleted.
-   * @param {objectOptions} [options] - The options for deleting the file.
-   * @returns {Promise} - A Promise that resolves with the result of the delete operation.
-   * @example
-   * // Delete object with name of `delete-object-example`
-   * await objectManager.delete(`delete-object-example`);
-   */
-  async delete(key, options) {
-    const command = new DeleteObjectCommand({
-      Bucket: options?.bucket || this.#defaultBucket,
-      Key: key,
-    });
-
-    await this.#client.send(command);
-    return true;
-  }
-
-  /**
-   * @typedef {Object} copyObjectOptions
-   * @property {string} [sourceBucket] The source bucket from where the object is to be copied.
-   * @property {string} [destinationKey] The key of the object in the destination bucket. By default, it is the same as the sourceKey.
-   */
-
-  /**
-   * If the destinationKey is not provided, the object will be copied with the same key as the sourceKey.
-   *
-   * @summary Copy the object from sourceKey in the sourceBucket to destinationKey in the destinationBucket.
-   * @param {string} sourceKey - The key of the object to be copied from the sourceBucket.
-   * @param {string} destinationBucket - The bucket where the object will be copied to.
-   * @param {copyObjectOptions} [options] - Additional options for the copy operation.
-   *
-   * @returns {Promise} - A Promise that resolves with the result of the copy operation.
-   * @example
-   * // Copy object `copy-object-test` from `copy-object-test-pass-src` to `copy-object-test-pass-dest`
-   * // TIP: Set bucket on constructor and it will be used as the default source for copying objects.
-   * await objectManager.copy(`copy-object-test`, `copy-object-dest`, {
-   *   sourceBucket: `copy-object-src`
-   * });
-   */
-  async copy(
-    sourceKey,
-    destinationBucket,
-    options = {
-      sourceBucket: this.#defaultBucket,
-      destinationKey: undefined,
-    },
-  ) {
-    const copySource = `${
-        options?.sourceBucket || this.#defaultBucket
-      }/${sourceKey}`,
-      command = new CopyObjectCommand({
-        CopySource: copySource,
-        Bucket: destinationBucket,
-        Key: options?.destinationKey || sourceKey,
-      });
-
-    await this.#client.send(command);
-    return true;
-  }
-}
-
-/** Provides methods for managing pins in an REST endpoint. */
-class PinManager {
-  #DEFAULT_ENDPOINT = "https://api.filebase.io";
-  #DEFAULT_TIMEOUT = 60000;
-
-  #client;
-  #credentials;
-  #gatewayConfiguration;
-  #defaultBucket;
-
-  /**
-   * @typedef {Object} pinManagerOptions Optional settings for the constructor.
-   * @property {string} [bucket] Default bucket to use.
-   * @property {pinDownloadOptions} [gateway] Default gateway to use.
-   */
-
-  /**
-   * @typedef {Object} pinDownloadOptions Optional settings for downloading pins
-   * @property {string} endpoint Default gateway to use.
-   * @property {string} [token] Token for the default gateway.
-   * @property {number} [timeout=60000] Timeout for the default gateway
-   */
-
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @param {pinManagerOptions} [options] - Optional settings for the constructor.
-   * @tutorial quickstart-pin
-   * @example
-   * import { PinManager } from "@filebase/sdk";
-   * const pinManager = new PinManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", {
-   *   bucket: "my-default-bucket",
-   *   gateway: {
-   *     endpoint: "https://my-default-gateway.mydomain.com
-   *     token: SUPER_SECRET_GATEWAY_TOKEN
-   *   }
-   * });
-   */
-  constructor(clientKey, clientSecret, options) {
-    this.#defaultBucket = options?.bucket;
-    const PSAClientEndpoint =
-        process.env.NODE_ENV === "test"
-          ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT
-          : this.#DEFAULT_ENDPOINT,
-      baseURL = `${PSAClientEndpoint}/v1/ipfs/pins`;
-    this.#credentials = {
-      key: clientKey,
-      secret: clientSecret,
-    };
-    this.#client = axios.create({
-      baseURL: baseURL,
-      timeout: this.#DEFAULT_TIMEOUT,
-    });
-
-    this.#gatewayConfiguration = {
-      endpoint: options?.gateway?.endpoint,
-      token: options?.gateway?.token,
-      timeout: options?.gateway?.timeout || this.#DEFAULT_TIMEOUT,
-    };
-  }
-
-  /**
-   * @typedef {Object} pinStatus
-   * @property {string} requestid Globally unique identifier of the pin request; can be used to check the status of ongoing pinning, or pin removal
-   * @property {string} status Status a pin object can have at a pinning service. ("queued","pinning","pinned","failed")
-   * @property {string} created Immutable timestamp indicating when a pin request entered a pinning service; can be used for filtering results and pagination
-   * @property {Object} pin Pin object
-   * @property {string} pin.cid Content Identifier (CID) pinned recursively
-   * @property {string} pin.name Name for pinned data; can be used for lookups later
-   * @property {Array} pin.origins Optional list of multiaddrs known to provide the data
-   * @property {Object} pin.meta Optional metadata for pin object
-   * @property {Array} delegates List of multiaddrs designated by pinning service that will receive the pin data
-   * @property {object} [info] Optional info for PinStatus response
-   * @property {function} download Convenience function to download pin
-   */
-
-  /**
-   * @typedef {Object} pinOptions
-   * @property {string} [bucket] - The bucket to pin the IPFS CID into.
-   */
-
-  /**
-   * @typedef {Object} listPinOptions
-   * @property {Array} [cid] Return pin objects responsible for pinning the specified CID(s); be aware that using longer hash functions introduces further constraints on the number of CIDs that will fit under the limit of 2000 characters per URL in browser contexts
-   * @property {string} [name] Return pin objects with specified name (by default a case-sensitive, exact match)
-   * @property {string} [match] Customize the text matching strategy applied when the name filter is present; exact (the default) is a case-sensitive exact match, partial matches anywhere in the name, iexact and ipartial are case-insensitive versions of the exact and partial strategies
-   * @property {Array} [status] Return pin objects for pins with the specified status (when missing, service defaults to pinned only)
-   * @property {string} [before] Return results created (queued) before provided timestamp
-   * @property {string} [after] Return results created (queued) after provided timestamp
-   * @property {number} [limit] Max records to return
-   * @property {Object} [meta] Return pin objects that match specified metadata keys passed as a string representation of a JSON object; when implementing a client library, make sure the parameter is URL-encoded to ensure safe transport
-   */
-
-  /**
-   * @typedef {Object} listPinResults
-   * @property {number} count Total number of pin objects that exist for passed query filters
-   * @property {Array} Array of PinStatus results
-   */
-
-  /**
-   * @summary List the pins in a given bucket
-   * @param {listPinOptions} [listOptions]
-   * @param {pinOptions} [options]
-   * @returns {Promise}
-   * @example
-   * // List pins in bucket with a limit of 1000
-   * await pinManager.list({
-   *   limit: 1000
-   * });
-   */
-  async list(listOptions, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options?.bucket),
-        getResponse = await this.#client.request({
-          method: "GET",
-          params: listOptions,
-          headers: { Authorization: `Bearer ${encodedToken}` },
-        });
-      for (let pinStatus of getResponse.data.results) {
-        pinStatus.download = () => {
-          return this.download(pinStatus.pin.cid);
-        };
-      }
-      return getResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Create a pin in the selected bucket
-   * @param {string} key Key or path of the file in the bucket
-   * @param {string} cid Content Identifier (CID) to be pinned recursively
-   * @param {Object} [metadata] Optional metadata for pin object
-   * @param {pinOptions} [options] Options for pinning the object
-   * @returns {Promise}
-   * @example
-   * // Create Pin with Metadata
-   * await pinManager.create("my-pin", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", {
-   *   "application": "my-custom-app-on-filebase"
-   * });
-   */
-  async create(key, cid, metadata, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options?.bucket),
-        pinStatus = await this.#client.request({
-          method: "POST",
-          data: {
-            cid,
-            name: key,
-            meta: metadata,
-          },
-          headers: { Authorization: `Bearer ${encodedToken}` },
-        });
-      pinStatus.data.download = () => {
-        return this.download(pinStatus.data.pin.cid);
-      };
-      return pinStatus.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @typedef {Object} replacePinOptions
-   * @augments pinOptions
-   * @property {Object} [metadata] Optional metadata to set on pin during replacement
-   * @property {string} [name] Optional name for pin to set during replacement
-   */
-
-  /**
-   * @summary Replace a pinned object in the selected bucket
-   * @param {string} requestid Unique ID for the pinned object
-   * @param {string} cid Content Identifier (CID) to be pinned recursively
-   * @param {replacePinOptions} [options] Options for pinning the object
-   * @returns {Promise}
-   * @example
-   * // Replace Pin with Metadata
-   * await pinManager.create("qr4231213", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", {
-   *   "revision": Date.now()
-   * }
-   */
-  async replace(requestid, cid, options) {
-    try {
-      let replaceData = {
-        cid,
-        meta: options?.metadata || {},
-      };
-      if (options?.name) {
-        replaceData.name = options.name;
-      }
-
-      const encodedToken = this.#getEncodedToken(options?.bucket),
-        pinStatusResult = await this.#client.request({
-          method: "POST",
-          url: `/${requestid}`,
-          data: replaceData,
-          validateStatus: (status) => {
-            return status === 200;
-          },
-          headers: { Authorization: `Bearer ${encodedToken}` },
-        });
-      const pinStatus = pinStatusResult.data;
-      pinStatus.download = () => {
-        return this.download(pinStatus.pin.cid);
-      };
-      return pinStatus;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Download a pin from the selected IPFS gateway
-   * @param {string} cid
-   * @param {pinDownloadOptions} [options]
-   * @returns {Promise}
-   * @example
-   * // Download Pin by CID
-   * await pinManager.download("QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF");
-   */
-  async download(cid, options) {
-    const downloadOptions = Object.assign(this.#gatewayConfiguration, options);
-    return downloadFromGateway(cid, downloadOptions);
-  }
-
-  /**
-   * @summary Get details about a pinned object
-   * @param {string} requestid Globally unique identifier of the pin request
-   * @param {pinOptions} [options] Options for getting the pin
-   * @returns {Promise}
-   * @example
-   * // Get Pin Info by RequestId
-   * await pinManager.get("qr4231214");
-   */
-  async get(requestid, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options?.bucket),
-        getResponseResult = await this.#client.request({
-          method: "GET",
-          url: `/${requestid}`,
-          headers: { Authorization: `Bearer ${encodedToken}` },
-          validateStatus: (status) => {
-            return status === 200 || status === 404;
-          },
-        });
-      if (getResponseResult.status === 404) {
-        return false;
-      }
-      const pinStatus = getResponseResult.data;
-      pinStatus.download = () => {
-        return this.download(pinStatus.pin.cid);
-      };
-      return pinStatus;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  /**
-   * @summary Delete a pinned object from the selected bucket
-   * @param requestid Globally unique identifier of the pin request
-   * @param {pinOptions} [options] Options for deleting the pin
-   * @returns {Promise}
-   * @example
-   * // Delete Pin by RequestId
-   * await pinManager.delete("qr4231213");
-   */
-  async delete(requestid, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options?.bucket);
-      await this.#client.request({
-        method: "DELETE",
-        url: `/${requestid}`,
-        headers: { Authorization: `Bearer ${encodedToken}` },
-        validateStatus: (status) => {
-          return status === 202;
-        },
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-
-  #getEncodedToken(bucket) {
-    bucket = bucket || this.#defaultBucket;
-    return Buffer.from(
-      `${this.#credentials.key}:${this.#credentials.secret}:${bucket}`,
-    ).toString("base64");
-  }
-}
-
-export { BucketManager, GatewayManager, NameManager, ObjectManager, PinManager };
diff --git a/dist/index.js b/dist/index.js
deleted file mode 100644
index 9538230..0000000
--- a/dist/index.js
+++ /dev/null
@@ -1,18037 +0,0 @@
-var __create = Object.create;
-var __defProp = Object.defineProperty;
-var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
-var __getOwnPropNames = Object.getOwnPropertyNames;
-var __getProtoOf = Object.getPrototypeOf;
-var __hasOwnProp = Object.prototype.hasOwnProperty;
-var __commonJS = (cb, mod) => function __require() {
-  return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
-};
-var __export = (target, all) => {
-  for (var name4 in all)
-    __defProp(target, name4, { get: all[name4], enumerable: true });
-};
-var __copyProps = (to, from4, except, desc) => {
-  if (from4 && typeof from4 === "object" || typeof from4 === "function") {
-    for (let key of __getOwnPropNames(from4))
-      if (!__hasOwnProp.call(to, key) && key !== except)
-        __defProp(to, key, { get: () => from4[key], enumerable: !(desc = __getOwnPropDesc(from4, key)) || desc.enumerable });
-  }
-  return to;
-};
-var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
-  // If the importer is in node compatibility mode or this is not an ESM
-  // file that has been converted to a CommonJS file using a Babel-
-  // compatible transform (i.e. "__esModule" has not been set), then set
-  // "default" to the CommonJS "module.exports" for node compatibility.
-  isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
-  mod
-));
-var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
-
-// node_modules/varint/encode.js
-var require_encode = __commonJS({
-  "node_modules/varint/encode.js"(exports2, module2) {
-    module2.exports = encode12;
-    var MSB3 = 128;
-    var REST3 = 127;
-    var MSBALL3 = ~REST3;
-    var INT3 = Math.pow(2, 31);
-    function encode12(num, out, offset) {
-      if (Number.MAX_SAFE_INTEGER && num > Number.MAX_SAFE_INTEGER) {
-        encode12.bytes = 0;
-        throw new RangeError("Could not encode varint");
-      }
-      out = out || [];
-      offset = offset || 0;
-      var oldOffset = offset;
-      while (num >= INT3) {
-        out[offset++] = num & 255 | MSB3;
-        num /= 128;
-      }
-      while (num & MSBALL3) {
-        out[offset++] = num & 255 | MSB3;
-        num >>>= 7;
-      }
-      out[offset] = num | 0;
-      encode12.bytes = offset - oldOffset + 1;
-      return out;
-    }
-  }
-});
-
-// node_modules/varint/decode.js
-var require_decode = __commonJS({
-  "node_modules/varint/decode.js"(exports2, module2) {
-    module2.exports = read4;
-    var MSB3 = 128;
-    var REST3 = 127;
-    function read4(buf2, offset) {
-      var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf2.length;
-      do {
-        if (counter >= l || shift > 49) {
-          read4.bytes = 0;
-          throw new RangeError("Could not decode varint");
-        }
-        b = buf2[counter++];
-        res += shift < 28 ? (b & REST3) << shift : (b & REST3) * Math.pow(2, shift);
-        shift += 7;
-      } while (b >= MSB3);
-      read4.bytes = counter - offset;
-      return res;
-    }
-  }
-});
-
-// node_modules/varint/length.js
-var require_length = __commonJS({
-  "node_modules/varint/length.js"(exports2, module2) {
-    var N13 = Math.pow(2, 7);
-    var N23 = Math.pow(2, 14);
-    var N33 = Math.pow(2, 21);
-    var N43 = Math.pow(2, 28);
-    var N53 = Math.pow(2, 35);
-    var N63 = Math.pow(2, 42);
-    var N73 = Math.pow(2, 49);
-    var N83 = Math.pow(2, 56);
-    var N93 = Math.pow(2, 63);
-    module2.exports = function(value) {
-      return value < N13 ? 1 : value < N23 ? 2 : value < N33 ? 3 : value < N43 ? 4 : value < N53 ? 5 : value < N63 ? 6 : value < N73 ? 7 : value < N83 ? 8 : value < N93 ? 9 : 10;
-    };
-  }
-});
-
-// node_modules/varint/index.js
-var require_varint = __commonJS({
-  "node_modules/varint/index.js"(exports2, module2) {
-    module2.exports = {
-      encode: require_encode(),
-      decode: require_decode(),
-      encodingLength: require_length()
-    };
-  }
-});
-
-// node_modules/eventemitter3/index.js
-var require_eventemitter3 = __commonJS({
-  "node_modules/eventemitter3/index.js"(exports2, module2) {
-    "use strict";
-    var has = Object.prototype.hasOwnProperty;
-    var prefix = "~";
-    function Events() {
-    }
-    if (Object.create) {
-      Events.prototype = /* @__PURE__ */ Object.create(null);
-      if (!new Events().__proto__)
-        prefix = false;
-    }
-    function EE(fn, context, once) {
-      this.fn = fn;
-      this.context = context;
-      this.once = once || false;
-    }
-    function addListener(emitter, event, fn, context, once) {
-      if (typeof fn !== "function") {
-        throw new TypeError("The listener must be a function");
-      }
-      var listener = new EE(fn, context || emitter, once), evt = prefix ? prefix + event : event;
-      if (!emitter._events[evt])
-        emitter._events[evt] = listener, emitter._eventsCount++;
-      else if (!emitter._events[evt].fn)
-        emitter._events[evt].push(listener);
-      else
-        emitter._events[evt] = [emitter._events[evt], listener];
-      return emitter;
-    }
-    function clearEvent(emitter, evt) {
-      if (--emitter._eventsCount === 0)
-        emitter._events = new Events();
-      else
-        delete emitter._events[evt];
-    }
-    function EventEmitter2() {
-      this._events = new Events();
-      this._eventsCount = 0;
-    }
-    EventEmitter2.prototype.eventNames = function eventNames() {
-      var names = [], events, name4;
-      if (this._eventsCount === 0)
-        return names;
-      for (name4 in events = this._events) {
-        if (has.call(events, name4))
-          names.push(prefix ? name4.slice(1) : name4);
-      }
-      if (Object.getOwnPropertySymbols) {
-        return names.concat(Object.getOwnPropertySymbols(events));
-      }
-      return names;
-    };
-    EventEmitter2.prototype.listeners = function listeners(event) {
-      var evt = prefix ? prefix + event : event, handlers = this._events[evt];
-      if (!handlers)
-        return [];
-      if (handlers.fn)
-        return [handlers.fn];
-      for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) {
-        ee[i] = handlers[i].fn;
-      }
-      return ee;
-    };
-    EventEmitter2.prototype.listenerCount = function listenerCount(event) {
-      var evt = prefix ? prefix + event : event, listeners = this._events[evt];
-      if (!listeners)
-        return 0;
-      if (listeners.fn)
-        return 1;
-      return listeners.length;
-    };
-    EventEmitter2.prototype.emit = function emit(event, a1, a2, a3, a4, a5) {
-      var evt = prefix ? prefix + event : event;
-      if (!this._events[evt])
-        return false;
-      var listeners = this._events[evt], len = arguments.length, args, i;
-      if (listeners.fn) {
-        if (listeners.once)
-          this.removeListener(event, listeners.fn, void 0, true);
-        switch (len) {
-          case 1:
-            return listeners.fn.call(listeners.context), true;
-          case 2:
-            return listeners.fn.call(listeners.context, a1), true;
-          case 3:
-            return listeners.fn.call(listeners.context, a1, a2), true;
-          case 4:
-            return listeners.fn.call(listeners.context, a1, a2, a3), true;
-          case 5:
-            return listeners.fn.call(listeners.context, a1, a2, a3, a4), true;
-          case 6:
-            return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true;
-        }
-        for (i = 1, args = new Array(len - 1); i < len; i++) {
-          args[i - 1] = arguments[i];
-        }
-        listeners.fn.apply(listeners.context, args);
-      } else {
-        var length4 = listeners.length, j;
-        for (i = 0; i < length4; i++) {
-          if (listeners[i].once)
-            this.removeListener(event, listeners[i].fn, void 0, true);
-          switch (len) {
-            case 1:
-              listeners[i].fn.call(listeners[i].context);
-              break;
-            case 2:
-              listeners[i].fn.call(listeners[i].context, a1);
-              break;
-            case 3:
-              listeners[i].fn.call(listeners[i].context, a1, a2);
-              break;
-            case 4:
-              listeners[i].fn.call(listeners[i].context, a1, a2, a3);
-              break;
-            default:
-              if (!args)
-                for (j = 1, args = new Array(len - 1); j < len; j++) {
-                  args[j - 1] = arguments[j];
-                }
-              listeners[i].fn.apply(listeners[i].context, args);
-          }
-        }
-      }
-      return true;
-    };
-    EventEmitter2.prototype.on = function on(event, fn, context) {
-      return addListener(this, event, fn, context, false);
-    };
-    EventEmitter2.prototype.once = function once(event, fn, context) {
-      return addListener(this, event, fn, context, true);
-    };
-    EventEmitter2.prototype.removeListener = function removeListener(event, fn, context, once) {
-      var evt = prefix ? prefix + event : event;
-      if (!this._events[evt])
-        return this;
-      if (!fn) {
-        clearEvent(this, evt);
-        return this;
-      }
-      var listeners = this._events[evt];
-      if (listeners.fn) {
-        if (listeners.fn === fn && (!once || listeners.once) && (!context || listeners.context === context)) {
-          clearEvent(this, evt);
-        }
-      } else {
-        for (var i = 0, events = [], length4 = listeners.length; i < length4; i++) {
-          if (listeners[i].fn !== fn || once && !listeners[i].once || context && listeners[i].context !== context) {
-            events.push(listeners[i]);
-          }
-        }
-        if (events.length)
-          this._events[evt] = events.length === 1 ? events[0] : events;
-        else
-          clearEvent(this, evt);
-      }
-      return this;
-    };
-    EventEmitter2.prototype.removeAllListeners = function removeAllListeners(event) {
-      var evt;
-      if (event) {
-        evt = prefix ? prefix + event : event;
-        if (this._events[evt])
-          clearEvent(this, evt);
-      } else {
-        this._events = new Events();
-        this._eventsCount = 0;
-      }
-      return this;
-    };
-    EventEmitter2.prototype.off = EventEmitter2.prototype.removeListener;
-    EventEmitter2.prototype.addListener = EventEmitter2.prototype.on;
-    EventEmitter2.prefixed = prefix;
-    EventEmitter2.EventEmitter = EventEmitter2;
-    if ("undefined" !== typeof module2) {
-      module2.exports = EventEmitter2;
-    }
-  }
-});
-
-// node_modules/err-code/index.js
-var require_err_code = __commonJS({
-  "node_modules/err-code/index.js"(exports2, module2) {
-    "use strict";
-    function assign(obj, props) {
-      for (const key in props) {
-        Object.defineProperty(obj, key, {
-          value: props[key],
-          enumerable: true,
-          configurable: true
-        });
-      }
-      return obj;
-    }
-    function createError(err, code5, props) {
-      if (!err || typeof err === "string") {
-        throw new TypeError("Please pass an Error to err-code");
-      }
-      if (!props) {
-        props = {};
-      }
-      if (typeof code5 === "object") {
-        props = code5;
-        code5 = "";
-      }
-      if (code5) {
-        props.code = code5;
-      }
-      try {
-        return assign(err, props);
-      } catch (_) {
-        props.message = err.message;
-        props.stack = err.stack;
-        const ErrClass = function() {
-        };
-        ErrClass.prototype = Object.create(Object.getPrototypeOf(err));
-        const output = assign(new ErrClass(), props);
-        return output;
-      }
-    }
-    module2.exports = createError;
-  }
-});
-
-// node_modules/murmurhash3js-revisited/lib/murmurHash3js.js
-var require_murmurHash3js = __commonJS({
-  "node_modules/murmurhash3js-revisited/lib/murmurHash3js.js"(exports2, module2) {
-    (function(root, undefined2) {
-      "use strict";
-      var library = {
-        "version": "3.0.0",
-        "x86": {},
-        "x64": {},
-        "inputValidation": true
-      };
-      function _validBytes(bytes) {
-        if (!Array.isArray(bytes) && !ArrayBuffer.isView(bytes)) {
-          return false;
-        }
-        for (var i = 0; i < bytes.length; i++) {
-          if (!Number.isInteger(bytes[i]) || bytes[i] < 0 || bytes[i] > 255) {
-            return false;
-          }
-        }
-        return true;
-      }
-      function _x86Multiply(m, n) {
-        return (m & 65535) * n + (((m >>> 16) * n & 65535) << 16);
-      }
-      function _x86Rotl(m, n) {
-        return m << n | m >>> 32 - n;
-      }
-      function _x86Fmix(h) {
-        h ^= h >>> 16;
-        h = _x86Multiply(h, 2246822507);
-        h ^= h >>> 13;
-        h = _x86Multiply(h, 3266489909);
-        h ^= h >>> 16;
-        return h;
-      }
-      function _x64Add(m, n) {
-        m = [m[0] >>> 16, m[0] & 65535, m[1] >>> 16, m[1] & 65535];
-        n = [n[0] >>> 16, n[0] & 65535, n[1] >>> 16, n[1] & 65535];
-        var o = [0, 0, 0, 0];
-        o[3] += m[3] + n[3];
-        o[2] += o[3] >>> 16;
-        o[3] &= 65535;
-        o[2] += m[2] + n[2];
-        o[1] += o[2] >>> 16;
-        o[2] &= 65535;
-        o[1] += m[1] + n[1];
-        o[0] += o[1] >>> 16;
-        o[1] &= 65535;
-        o[0] += m[0] + n[0];
-        o[0] &= 65535;
-        return [o[0] << 16 | o[1], o[2] << 16 | o[3]];
-      }
-      function _x64Multiply(m, n) {
-        m = [m[0] >>> 16, m[0] & 65535, m[1] >>> 16, m[1] & 65535];
-        n = [n[0] >>> 16, n[0] & 65535, n[1] >>> 16, n[1] & 65535];
-        var o = [0, 0, 0, 0];
-        o[3] += m[3] * n[3];
-        o[2] += o[3] >>> 16;
-        o[3] &= 65535;
-        o[2] += m[2] * n[3];
-        o[1] += o[2] >>> 16;
-        o[2] &= 65535;
-        o[2] += m[3] * n[2];
-        o[1] += o[2] >>> 16;
-        o[2] &= 65535;
-        o[1] += m[1] * n[3];
-        o[0] += o[1] >>> 16;
-        o[1] &= 65535;
-        o[1] += m[2] * n[2];
-        o[0] += o[1] >>> 16;
-        o[1] &= 65535;
-        o[1] += m[3] * n[1];
-        o[0] += o[1] >>> 16;
-        o[1] &= 65535;
-        o[0] += m[0] * n[3] + m[1] * n[2] + m[2] * n[1] + m[3] * n[0];
-        o[0] &= 65535;
-        return [o[0] << 16 | o[1], o[2] << 16 | o[3]];
-      }
-      function _x64Rotl(m, n) {
-        n %= 64;
-        if (n === 32) {
-          return [m[1], m[0]];
-        } else if (n < 32) {
-          return [m[0] << n | m[1] >>> 32 - n, m[1] << n | m[0] >>> 32 - n];
-        } else {
-          n -= 32;
-          return [m[1] << n | m[0] >>> 32 - n, m[0] << n | m[1] >>> 32 - n];
-        }
-      }
-      function _x64LeftShift(m, n) {
-        n %= 64;
-        if (n === 0) {
-          return m;
-        } else if (n < 32) {
-          return [m[0] << n | m[1] >>> 32 - n, m[1] << n];
-        } else {
-          return [m[1] << n - 32, 0];
-        }
-      }
-      function _x64Xor(m, n) {
-        return [m[0] ^ n[0], m[1] ^ n[1]];
-      }
-      function _x64Fmix(h) {
-        h = _x64Xor(h, [0, h[0] >>> 1]);
-        h = _x64Multiply(h, [4283543511, 3981806797]);
-        h = _x64Xor(h, [0, h[0] >>> 1]);
-        h = _x64Multiply(h, [3301882366, 444984403]);
-        h = _x64Xor(h, [0, h[0] >>> 1]);
-        return h;
-      }
-      library.x86.hash32 = function(bytes, seed) {
-        if (library.inputValidation && !_validBytes(bytes)) {
-          return undefined2;
-        }
-        seed = seed || 0;
-        var remainder = bytes.length % 4;
-        var blocks = bytes.length - remainder;
-        var h1 = seed;
-        var k1 = 0;
-        var c1 = 3432918353;
-        var c2 = 461845907;
-        for (var i = 0; i < blocks; i = i + 4) {
-          k1 = bytes[i] | bytes[i + 1] << 8 | bytes[i + 2] << 16 | bytes[i + 3] << 24;
-          k1 = _x86Multiply(k1, c1);
-          k1 = _x86Rotl(k1, 15);
-          k1 = _x86Multiply(k1, c2);
-          h1 ^= k1;
-          h1 = _x86Rotl(h1, 13);
-          h1 = _x86Multiply(h1, 5) + 3864292196;
-        }
-        k1 = 0;
-        switch (remainder) {
-          case 3:
-            k1 ^= bytes[i + 2] << 16;
-          case 2:
-            k1 ^= bytes[i + 1] << 8;
-          case 1:
-            k1 ^= bytes[i];
-            k1 = _x86Multiply(k1, c1);
-            k1 = _x86Rotl(k1, 15);
-            k1 = _x86Multiply(k1, c2);
-            h1 ^= k1;
-        }
-        h1 ^= bytes.length;
-        h1 = _x86Fmix(h1);
-        return h1 >>> 0;
-      };
-      library.x86.hash128 = function(bytes, seed) {
-        if (library.inputValidation && !_validBytes(bytes)) {
-          return undefined2;
-        }
-        seed = seed || 0;
-        var remainder = bytes.length % 16;
-        var blocks = bytes.length - remainder;
-        var h1 = seed;
-        var h2 = seed;
-        var h3 = seed;
-        var h4 = seed;
-        var k1 = 0;
-        var k2 = 0;
-        var k3 = 0;
-        var k4 = 0;
-        var c1 = 597399067;
-        var c2 = 2869860233;
-        var c3 = 951274213;
-        var c4 = 2716044179;
-        for (var i = 0; i < blocks; i = i + 16) {
-          k1 = bytes[i] | bytes[i + 1] << 8 | bytes[i + 2] << 16 | bytes[i + 3] << 24;
-          k2 = bytes[i + 4] | bytes[i + 5] << 8 | bytes[i + 6] << 16 | bytes[i + 7] << 24;
-          k3 = bytes[i + 8] | bytes[i + 9] << 8 | bytes[i + 10] << 16 | bytes[i + 11] << 24;
-          k4 = bytes[i + 12] | bytes[i + 13] << 8 | bytes[i + 14] << 16 | bytes[i + 15] << 24;
-          k1 = _x86Multiply(k1, c1);
-          k1 = _x86Rotl(k1, 15);
-          k1 = _x86Multiply(k1, c2);
-          h1 ^= k1;
-          h1 = _x86Rotl(h1, 19);
-          h1 += h2;
-          h1 = _x86Multiply(h1, 5) + 1444728091;
-          k2 = _x86Multiply(k2, c2);
-          k2 = _x86Rotl(k2, 16);
-          k2 = _x86Multiply(k2, c3);
-          h2 ^= k2;
-          h2 = _x86Rotl(h2, 17);
-          h2 += h3;
-          h2 = _x86Multiply(h2, 5) + 197830471;
-          k3 = _x86Multiply(k3, c3);
-          k3 = _x86Rotl(k3, 17);
-          k3 = _x86Multiply(k3, c4);
-          h3 ^= k3;
-          h3 = _x86Rotl(h3, 15);
-          h3 += h4;
-          h3 = _x86Multiply(h3, 5) + 2530024501;
-          k4 = _x86Multiply(k4, c4);
-          k4 = _x86Rotl(k4, 18);
-          k4 = _x86Multiply(k4, c1);
-          h4 ^= k4;
-          h4 = _x86Rotl(h4, 13);
-          h4 += h1;
-          h4 = _x86Multiply(h4, 5) + 850148119;
-        }
-        k1 = 0;
-        k2 = 0;
-        k3 = 0;
-        k4 = 0;
-        switch (remainder) {
-          case 15:
-            k4 ^= bytes[i + 14] << 16;
-          case 14:
-            k4 ^= bytes[i + 13] << 8;
-          case 13:
-            k4 ^= bytes[i + 12];
-            k4 = _x86Multiply(k4, c4);
-            k4 = _x86Rotl(k4, 18);
-            k4 = _x86Multiply(k4, c1);
-            h4 ^= k4;
-          case 12:
-            k3 ^= bytes[i + 11] << 24;
-          case 11:
-            k3 ^= bytes[i + 10] << 16;
-          case 10:
-            k3 ^= bytes[i + 9] << 8;
-          case 9:
-            k3 ^= bytes[i + 8];
-            k3 = _x86Multiply(k3, c3);
-            k3 = _x86Rotl(k3, 17);
-            k3 = _x86Multiply(k3, c4);
-            h3 ^= k3;
-          case 8:
-            k2 ^= bytes[i + 7] << 24;
-          case 7:
-            k2 ^= bytes[i + 6] << 16;
-          case 6:
-            k2 ^= bytes[i + 5] << 8;
-          case 5:
-            k2 ^= bytes[i + 4];
-            k2 = _x86Multiply(k2, c2);
-            k2 = _x86Rotl(k2, 16);
-            k2 = _x86Multiply(k2, c3);
-            h2 ^= k2;
-          case 4:
-            k1 ^= bytes[i + 3] << 24;
-          case 3:
-            k1 ^= bytes[i + 2] << 16;
-          case 2:
-            k1 ^= bytes[i + 1] << 8;
-          case 1:
-            k1 ^= bytes[i];
-            k1 = _x86Multiply(k1, c1);
-            k1 = _x86Rotl(k1, 15);
-            k1 = _x86Multiply(k1, c2);
-            h1 ^= k1;
-        }
-        h1 ^= bytes.length;
-        h2 ^= bytes.length;
-        h3 ^= bytes.length;
-        h4 ^= bytes.length;
-        h1 += h2;
-        h1 += h3;
-        h1 += h4;
-        h2 += h1;
-        h3 += h1;
-        h4 += h1;
-        h1 = _x86Fmix(h1);
-        h2 = _x86Fmix(h2);
-        h3 = _x86Fmix(h3);
-        h4 = _x86Fmix(h4);
-        h1 += h2;
-        h1 += h3;
-        h1 += h4;
-        h2 += h1;
-        h3 += h1;
-        h4 += h1;
-        return ("00000000" + (h1 >>> 0).toString(16)).slice(-8) + ("00000000" + (h2 >>> 0).toString(16)).slice(-8) + ("00000000" + (h3 >>> 0).toString(16)).slice(-8) + ("00000000" + (h4 >>> 0).toString(16)).slice(-8);
-      };
-      library.x64.hash128 = function(bytes, seed) {
-        if (library.inputValidation && !_validBytes(bytes)) {
-          return undefined2;
-        }
-        seed = seed || 0;
-        var remainder = bytes.length % 16;
-        var blocks = bytes.length - remainder;
-        var h1 = [0, seed];
-        var h2 = [0, seed];
-        var k1 = [0, 0];
-        var k2 = [0, 0];
-        var c1 = [2277735313, 289559509];
-        var c2 = [1291169091, 658871167];
-        for (var i = 0; i < blocks; i = i + 16) {
-          k1 = [bytes[i + 4] | bytes[i + 5] << 8 | bytes[i + 6] << 16 | bytes[i + 7] << 24, bytes[i] | bytes[i + 1] << 8 | bytes[i + 2] << 16 | bytes[i + 3] << 24];
-          k2 = [bytes[i + 12] | bytes[i + 13] << 8 | bytes[i + 14] << 16 | bytes[i + 15] << 24, bytes[i + 8] | bytes[i + 9] << 8 | bytes[i + 10] << 16 | bytes[i + 11] << 24];
-          k1 = _x64Multiply(k1, c1);
-          k1 = _x64Rotl(k1, 31);
-          k1 = _x64Multiply(k1, c2);
-          h1 = _x64Xor(h1, k1);
-          h1 = _x64Rotl(h1, 27);
-          h1 = _x64Add(h1, h2);
-          h1 = _x64Add(_x64Multiply(h1, [0, 5]), [0, 1390208809]);
-          k2 = _x64Multiply(k2, c2);
-          k2 = _x64Rotl(k2, 33);
-          k2 = _x64Multiply(k2, c1);
-          h2 = _x64Xor(h2, k2);
-          h2 = _x64Rotl(h2, 31);
-          h2 = _x64Add(h2, h1);
-          h2 = _x64Add(_x64Multiply(h2, [0, 5]), [0, 944331445]);
-        }
-        k1 = [0, 0];
-        k2 = [0, 0];
-        switch (remainder) {
-          case 15:
-            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 14]], 48));
-          case 14:
-            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 13]], 40));
-          case 13:
-            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 12]], 32));
-          case 12:
-            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 11]], 24));
-          case 11:
-            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 10]], 16));
-          case 10:
-            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 9]], 8));
-          case 9:
-            k2 = _x64Xor(k2, [0, bytes[i + 8]]);
-            k2 = _x64Multiply(k2, c2);
-            k2 = _x64Rotl(k2, 33);
-            k2 = _x64Multiply(k2, c1);
-            h2 = _x64Xor(h2, k2);
-          case 8:
-            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 7]], 56));
-          case 7:
-            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 6]], 48));
-          case 6:
-            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 5]], 40));
-          case 5:
-            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 4]], 32));
-          case 4:
-            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 3]], 24));
-          case 3:
-            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 2]], 16));
-          case 2:
-            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 1]], 8));
-          case 1:
-            k1 = _x64Xor(k1, [0, bytes[i]]);
-            k1 = _x64Multiply(k1, c1);
-            k1 = _x64Rotl(k1, 31);
-            k1 = _x64Multiply(k1, c2);
-            h1 = _x64Xor(h1, k1);
-        }
-        h1 = _x64Xor(h1, [0, bytes.length]);
-        h2 = _x64Xor(h2, [0, bytes.length]);
-        h1 = _x64Add(h1, h2);
-        h2 = _x64Add(h2, h1);
-        h1 = _x64Fmix(h1);
-        h2 = _x64Fmix(h2);
-        h1 = _x64Add(h1, h2);
-        h2 = _x64Add(h2, h1);
-        return ("00000000" + (h1[0] >>> 0).toString(16)).slice(-8) + ("00000000" + (h1[1] >>> 0).toString(16)).slice(-8) + ("00000000" + (h2[0] >>> 0).toString(16)).slice(-8) + ("00000000" + (h2[1] >>> 0).toString(16)).slice(-8);
-      };
-      if (typeof exports2 !== "undefined") {
-        if (typeof module2 !== "undefined" && module2.exports) {
-          exports2 = module2.exports = library;
-        }
-        exports2.murmurHash3 = library;
-      } else if (typeof define === "function" && define.amd) {
-        define([], function() {
-          return library;
-        });
-      } else {
-        library._murmurHash3 = root.murmurHash3;
-        library.noConflict = function() {
-          root.murmurHash3 = library._murmurHash3;
-          library._murmurHash3 = undefined2;
-          library.noConflict = undefined2;
-          return library;
-        };
-        root.murmurHash3 = library;
-      }
-    })(exports2);
-  }
-});
-
-// node_modules/murmurhash3js-revisited/index.js
-var require_murmurhash3js_revisited = __commonJS({
-  "node_modules/murmurhash3js-revisited/index.js"(exports2, module2) {
-    module2.exports = require_murmurHash3js();
-  }
-});
-
-// node_modules/sparse-array/index.js
-var require_sparse_array = __commonJS({
-  "node_modules/sparse-array/index.js"(exports2, module2) {
-    "use strict";
-    var BITS_PER_BYTE = 7;
-    module2.exports = class SparseArray {
-      constructor() {
-        this._bitArrays = [];
-        this._data = [];
-        this._length = 0;
-        this._changedLength = false;
-        this._changedData = false;
-      }
-      set(index, value) {
-        let pos = this._internalPositionFor(index, false);
-        if (value === void 0) {
-          if (pos !== -1) {
-            this._unsetInternalPos(pos);
-            this._unsetBit(index);
-            this._changedLength = true;
-            this._changedData = true;
-          }
-        } else {
-          let needsSort = false;
-          if (pos === -1) {
-            pos = this._data.length;
-            this._setBit(index);
-            this._changedData = true;
-          } else {
-            needsSort = true;
-          }
-          this._setInternalPos(pos, index, value, needsSort);
-          this._changedLength = true;
-        }
-      }
-      unset(index) {
-        this.set(index, void 0);
-      }
-      get(index) {
-        this._sortData();
-        const pos = this._internalPositionFor(index, true);
-        if (pos === -1) {
-          return void 0;
-        }
-        return this._data[pos][1];
-      }
-      push(value) {
-        this.set(this.length, value);
-        return this.length;
-      }
-      get length() {
-        this._sortData();
-        if (this._changedLength) {
-          const last2 = this._data[this._data.length - 1];
-          this._length = last2 ? last2[0] + 1 : 0;
-          this._changedLength = false;
-        }
-        return this._length;
-      }
-      forEach(iterator) {
-        let i = 0;
-        while (i < this.length) {
-          iterator(this.get(i), i, this);
-          i++;
-        }
-      }
-      map(iterator) {
-        let i = 0;
-        let mapped = new Array(this.length);
-        while (i < this.length) {
-          mapped[i] = iterator(this.get(i), i, this);
-          i++;
-        }
-        return mapped;
-      }
-      reduce(reducer, initialValue) {
-        let i = 0;
-        let acc = initialValue;
-        while (i < this.length) {
-          const value = this.get(i);
-          acc = reducer(acc, value, i);
-          i++;
-        }
-        return acc;
-      }
-      find(finder) {
-        let i = 0, found, last2;
-        while (i < this.length && !found) {
-          last2 = this.get(i);
-          found = finder(last2);
-          i++;
-        }
-        return found ? last2 : void 0;
-      }
-      _internalPositionFor(index, noCreate) {
-        const bytePos = this._bytePosFor(index, noCreate);
-        if (bytePos >= this._bitArrays.length) {
-          return -1;
-        }
-        const byte = this._bitArrays[bytePos];
-        const bitPos = index - bytePos * BITS_PER_BYTE;
-        const exists2 = (byte & 1 << bitPos) > 0;
-        if (!exists2) {
-          return -1;
-        }
-        const previousPopCount = this._bitArrays.slice(0, bytePos).reduce(popCountReduce, 0);
-        const mask = ~(4294967295 << bitPos + 1);
-        const bytePopCount = popCount(byte & mask);
-        const arrayPos = previousPopCount + bytePopCount - 1;
-        return arrayPos;
-      }
-      _bytePosFor(index, noCreate) {
-        const bytePos = Math.floor(index / BITS_PER_BYTE);
-        const targetLength = bytePos + 1;
-        while (!noCreate && this._bitArrays.length < targetLength) {
-          this._bitArrays.push(0);
-        }
-        return bytePos;
-      }
-      _setBit(index) {
-        const bytePos = this._bytePosFor(index, false);
-        this._bitArrays[bytePos] |= 1 << index - bytePos * BITS_PER_BYTE;
-      }
-      _unsetBit(index) {
-        const bytePos = this._bytePosFor(index, false);
-        this._bitArrays[bytePos] &= ~(1 << index - bytePos * BITS_PER_BYTE);
-      }
-      _setInternalPos(pos, index, value, needsSort) {
-        const data = this._data;
-        const elem = [index, value];
-        if (needsSort) {
-          this._sortData();
-          data[pos] = elem;
-        } else {
-          if (data.length) {
-            if (data[data.length - 1][0] >= index) {
-              data.push(elem);
-            } else if (data[0][0] <= index) {
-              data.unshift(elem);
-            } else {
-              const randomIndex = Math.round(data.length / 2);
-              this._data = data.slice(0, randomIndex).concat(elem).concat(data.slice(randomIndex));
-            }
-          } else {
-            this._data.push(elem);
-          }
-          this._changedData = true;
-          this._changedLength = true;
-        }
-      }
-      _unsetInternalPos(pos) {
-        this._data.splice(pos, 1);
-      }
-      _sortData() {
-        if (this._changedData) {
-          this._data.sort(sortInternal);
-        }
-        this._changedData = false;
-      }
-      bitField() {
-        const bytes = [];
-        let pendingBitsForResultingByte = 8;
-        let pendingBitsForNewByte = 0;
-        let resultingByte = 0;
-        let newByte;
-        const pending = this._bitArrays.slice();
-        while (pending.length || pendingBitsForNewByte) {
-          if (pendingBitsForNewByte === 0) {
-            newByte = pending.shift();
-            pendingBitsForNewByte = 7;
-          }
-          const usingBits = Math.min(pendingBitsForNewByte, pendingBitsForResultingByte);
-          const mask = ~(255 << usingBits);
-          const masked = newByte & mask;
-          resultingByte |= masked << 8 - pendingBitsForResultingByte;
-          newByte = newByte >>> usingBits;
-          pendingBitsForNewByte -= usingBits;
-          pendingBitsForResultingByte -= usingBits;
-          if (!pendingBitsForResultingByte || !pendingBitsForNewByte && !pending.length) {
-            bytes.push(resultingByte);
-            resultingByte = 0;
-            pendingBitsForResultingByte = 8;
-          }
-        }
-        for (var i = bytes.length - 1; i > 0; i--) {
-          const value = bytes[i];
-          if (value === 0) {
-            bytes.pop();
-          } else {
-            break;
-          }
-        }
-        return bytes;
-      }
-      compactArray() {
-        this._sortData();
-        return this._data.map(valueOnly);
-      }
-    };
-    function popCountReduce(count, byte) {
-      return count + popCount(byte);
-    }
-    function popCount(_v) {
-      let v = _v;
-      v = v - (v >> 1 & 1431655765);
-      v = (v & 858993459) + (v >> 2 & 858993459);
-      return (v + (v >> 4) & 252645135) * 16843009 >> 24;
-    }
-    function sortInternal(a, b) {
-      return a[0] - b[0];
-    }
-    function valueOnly(elem) {
-      return elem[1];
-    }
-  }
-});
-
-// node_modules/rabin-wasm/src/rabin.js
-var require_rabin = __commonJS({
-  "node_modules/rabin-wasm/src/rabin.js"(exports2, module2) {
-    var Rabin = class {
-      /**
-       * Creates an instance of Rabin.
-       * @param { import("./../dist/rabin-wasm") } asModule
-       * @param {number} [bits=12]
-       * @param {number} [min=8 * 1024]
-       * @param {number} [max=32 * 1024]
-       * @param {number} polynomial
-       * @memberof Rabin
-       */
-      constructor(asModule, bits = 12, min = 8 * 1024, max = 32 * 1024, windowSize = 64, polynomial) {
-        this.bits = bits;
-        this.min = min;
-        this.max = max;
-        this.asModule = asModule;
-        this.rabin = new asModule.Rabin(bits, min, max, windowSize, polynomial);
-        this.polynomial = polynomial;
-      }
-      /**
-       * Fingerprints the buffer
-       *
-       * @param {Uint8Array} buf
-       * @returns {Array}
-       * @memberof Rabin
-       */
-      fingerprint(buf2) {
-        const {
-          __retain,
-          __release,
-          __allocArray,
-          __getInt32Array,
-          Int32Array_ID,
-          Uint8Array_ID
-        } = this.asModule;
-        const lengths = new Int32Array(Math.ceil(buf2.length / this.min));
-        const lengthsPtr = __retain(__allocArray(Int32Array_ID, lengths));
-        const pointer = __retain(__allocArray(Uint8Array_ID, buf2));
-        const out = this.rabin.fingerprint(pointer, lengthsPtr);
-        const processed = __getInt32Array(out);
-        __release(pointer);
-        __release(lengthsPtr);
-        const end = processed.indexOf(0);
-        return end >= 0 ? processed.subarray(0, end) : processed;
-      }
-    };
-    module2.exports = Rabin;
-  }
-});
-
-// node_modules/@assemblyscript/loader/index.js
-var require_loader = __commonJS({
-  "node_modules/@assemblyscript/loader/index.js"(exports2) {
-    "use strict";
-    var ID_OFFSET = -8;
-    var SIZE_OFFSET = -4;
-    var ARRAYBUFFER_ID = 0;
-    var STRING_ID = 1;
-    var ARRAYBUFFERVIEW = 1 << 0;
-    var ARRAY = 1 << 1;
-    var SET = 1 << 2;
-    var MAP = 1 << 3;
-    var VAL_ALIGN_OFFSET = 5;
-    var VAL_ALIGN = 1 << VAL_ALIGN_OFFSET;
-    var VAL_SIGNED = 1 << 10;
-    var VAL_FLOAT = 1 << 11;
-    var VAL_NULLABLE = 1 << 12;
-    var VAL_MANAGED = 1 << 13;
-    var KEY_ALIGN_OFFSET = 14;
-    var KEY_ALIGN = 1 << KEY_ALIGN_OFFSET;
-    var KEY_SIGNED = 1 << 19;
-    var KEY_FLOAT = 1 << 20;
-    var KEY_NULLABLE = 1 << 21;
-    var KEY_MANAGED = 1 << 22;
-    var ARRAYBUFFERVIEW_BUFFER_OFFSET = 0;
-    var ARRAYBUFFERVIEW_DATASTART_OFFSET = 4;
-    var ARRAYBUFFERVIEW_DATALENGTH_OFFSET = 8;
-    var ARRAYBUFFERVIEW_SIZE = 12;
-    var ARRAY_LENGTH_OFFSET = 12;
-    var ARRAY_SIZE = 16;
-    var BIGINT = typeof BigUint64Array !== "undefined";
-    var THIS = Symbol();
-    var CHUNKSIZE = 1024;
-    function getStringImpl(buffer2, ptr) {
-      const U32 = new Uint32Array(buffer2);
-      const U16 = new Uint16Array(buffer2);
-      var length4 = U32[ptr + SIZE_OFFSET >>> 2] >>> 1;
-      var offset = ptr >>> 1;
-      if (length4 <= CHUNKSIZE)
-        return String.fromCharCode.apply(String, U16.subarray(offset, offset + length4));
-      const parts = [];
-      do {
-        const last2 = U16[offset + CHUNKSIZE - 1];
-        const size = last2 >= 55296 && last2 < 56320 ? CHUNKSIZE - 1 : CHUNKSIZE;
-        parts.push(String.fromCharCode.apply(String, U16.subarray(offset, offset += size)));
-        length4 -= size;
-      } while (length4 > CHUNKSIZE);
-      return parts.join("") + String.fromCharCode.apply(String, U16.subarray(offset, offset + length4));
-    }
-    function preInstantiate(imports) {
-      const baseModule = {};
-      function getString(memory, ptr) {
-        if (!memory)
-          return "";
-        return getStringImpl(memory.buffer, ptr);
-      }
-      const env = imports.env = imports.env || {};
-      env.abort = env.abort || function abort(mesg, file, line, colm) {
-        const memory = baseModule.memory || env.memory;
-        throw Error("abort: " + getString(memory, mesg) + " at " + getString(memory, file) + ":" + line + ":" + colm);
-      };
-      env.trace = env.trace || function trace(mesg, n) {
-        const memory = baseModule.memory || env.memory;
-        console.log("trace: " + getString(memory, mesg) + (n ? " " : "") + Array.prototype.slice.call(arguments, 2, 2 + n).join(", "));
-      };
-      imports.Math = imports.Math || Math;
-      imports.Date = imports.Date || Date;
-      return baseModule;
-    }
-    function postInstantiate(baseModule, instance) {
-      const rawExports = instance.exports;
-      const memory = rawExports.memory;
-      const table = rawExports.table;
-      const alloc4 = rawExports["__alloc"];
-      const retain = rawExports["__retain"];
-      const rttiBase = rawExports["__rtti_base"] || ~0;
-      function getInfo(id) {
-        const U32 = new Uint32Array(memory.buffer);
-        const count = U32[rttiBase >>> 2];
-        if ((id >>>= 0) >= count)
-          throw Error("invalid id: " + id);
-        return U32[(rttiBase + 4 >>> 2) + id * 2];
-      }
-      function getBase(id) {
-        const U32 = new Uint32Array(memory.buffer);
-        const count = U32[rttiBase >>> 2];
-        if ((id >>>= 0) >= count)
-          throw Error("invalid id: " + id);
-        return U32[(rttiBase + 4 >>> 2) + id * 2 + 1];
-      }
-      function getValueAlign(info) {
-        return 31 - Math.clz32(info >>> VAL_ALIGN_OFFSET & 31);
-      }
-      function getKeyAlign(info) {
-        return 31 - Math.clz32(info >>> KEY_ALIGN_OFFSET & 31);
-      }
-      function __allocString(str) {
-        const length4 = str.length;
-        const ptr = alloc4(length4 << 1, STRING_ID);
-        const U16 = new Uint16Array(memory.buffer);
-        for (var i = 0, p = ptr >>> 1; i < length4; ++i)
-          U16[p + i] = str.charCodeAt(i);
-        return ptr;
-      }
-      baseModule.__allocString = __allocString;
-      function __getString(ptr) {
-        const buffer2 = memory.buffer;
-        const id = new Uint32Array(buffer2)[ptr + ID_OFFSET >>> 2];
-        if (id !== STRING_ID)
-          throw Error("not a string: " + ptr);
-        return getStringImpl(buffer2, ptr);
-      }
-      baseModule.__getString = __getString;
-      function getView(alignLog2, signed, float) {
-        const buffer2 = memory.buffer;
-        if (float) {
-          switch (alignLog2) {
-            case 2:
-              return new Float32Array(buffer2);
-            case 3:
-              return new Float64Array(buffer2);
-          }
-        } else {
-          switch (alignLog2) {
-            case 0:
-              return new (signed ? Int8Array : Uint8Array)(buffer2);
-            case 1:
-              return new (signed ? Int16Array : Uint16Array)(buffer2);
-            case 2:
-              return new (signed ? Int32Array : Uint32Array)(buffer2);
-            case 3:
-              return new (signed ? BigInt64Array : BigUint64Array)(buffer2);
-          }
-        }
-        throw Error("unsupported align: " + alignLog2);
-      }
-      function __allocArray(id, values) {
-        const info = getInfo(id);
-        if (!(info & (ARRAYBUFFERVIEW | ARRAY)))
-          throw Error("not an array: " + id + " @ " + info);
-        const align = getValueAlign(info);
-        const length4 = values.length;
-        const buf2 = alloc4(length4 << align, ARRAYBUFFER_ID);
-        const arr = alloc4(info & ARRAY ? ARRAY_SIZE : ARRAYBUFFERVIEW_SIZE, id);
-        const U32 = new Uint32Array(memory.buffer);
-        U32[arr + ARRAYBUFFERVIEW_BUFFER_OFFSET >>> 2] = retain(buf2);
-        U32[arr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2] = buf2;
-        U32[arr + ARRAYBUFFERVIEW_DATALENGTH_OFFSET >>> 2] = length4 << align;
-        if (info & ARRAY)
-          U32[arr + ARRAY_LENGTH_OFFSET >>> 2] = length4;
-        const view = getView(align, info & VAL_SIGNED, info & VAL_FLOAT);
-        if (info & VAL_MANAGED) {
-          for (let i = 0; i < length4; ++i)
-            view[(buf2 >>> align) + i] = retain(values[i]);
-        } else {
-          view.set(values, buf2 >>> align);
-        }
-        return arr;
-      }
-      baseModule.__allocArray = __allocArray;
-      function __getArrayView(arr) {
-        const U32 = new Uint32Array(memory.buffer);
-        const id = U32[arr + ID_OFFSET >>> 2];
-        const info = getInfo(id);
-        if (!(info & ARRAYBUFFERVIEW))
-          throw Error("not an array: " + id);
-        const align = getValueAlign(info);
-        var buf2 = U32[arr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2];
-        const length4 = info & ARRAY ? U32[arr + ARRAY_LENGTH_OFFSET >>> 2] : U32[buf2 + SIZE_OFFSET >>> 2] >>> align;
-        return getView(align, info & VAL_SIGNED, info & VAL_FLOAT).subarray(buf2 >>>= align, buf2 + length4);
-      }
-      baseModule.__getArrayView = __getArrayView;
-      function __getArray(arr) {
-        const input = __getArrayView(arr);
-        const len = input.length;
-        const out = new Array(len);
-        for (let i = 0; i < len; i++)
-          out[i] = input[i];
-        return out;
-      }
-      baseModule.__getArray = __getArray;
-      function __getArrayBuffer(ptr) {
-        const buffer2 = memory.buffer;
-        const length4 = new Uint32Array(buffer2)[ptr + SIZE_OFFSET >>> 2];
-        return buffer2.slice(ptr, ptr + length4);
-      }
-      baseModule.__getArrayBuffer = __getArrayBuffer;
-      function getTypedArray(Type2, alignLog2, ptr) {
-        return new Type2(getTypedArrayView(Type2, alignLog2, ptr));
-      }
-      function getTypedArrayView(Type2, alignLog2, ptr) {
-        const buffer2 = memory.buffer;
-        const U32 = new Uint32Array(buffer2);
-        const bufPtr = U32[ptr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2];
-        return new Type2(buffer2, bufPtr, U32[bufPtr + SIZE_OFFSET >>> 2] >>> alignLog2);
-      }
-      baseModule.__getInt8Array = getTypedArray.bind(null, Int8Array, 0);
-      baseModule.__getInt8ArrayView = getTypedArrayView.bind(null, Int8Array, 0);
-      baseModule.__getUint8Array = getTypedArray.bind(null, Uint8Array, 0);
-      baseModule.__getUint8ArrayView = getTypedArrayView.bind(null, Uint8Array, 0);
-      baseModule.__getUint8ClampedArray = getTypedArray.bind(null, Uint8ClampedArray, 0);
-      baseModule.__getUint8ClampedArrayView = getTypedArrayView.bind(null, Uint8ClampedArray, 0);
-      baseModule.__getInt16Array = getTypedArray.bind(null, Int16Array, 1);
-      baseModule.__getInt16ArrayView = getTypedArrayView.bind(null, Int16Array, 1);
-      baseModule.__getUint16Array = getTypedArray.bind(null, Uint16Array, 1);
-      baseModule.__getUint16ArrayView = getTypedArrayView.bind(null, Uint16Array, 1);
-      baseModule.__getInt32Array = getTypedArray.bind(null, Int32Array, 2);
-      baseModule.__getInt32ArrayView = getTypedArrayView.bind(null, Int32Array, 2);
-      baseModule.__getUint32Array = getTypedArray.bind(null, Uint32Array, 2);
-      baseModule.__getUint32ArrayView = getTypedArrayView.bind(null, Uint32Array, 2);
-      if (BIGINT) {
-        baseModule.__getInt64Array = getTypedArray.bind(null, BigInt64Array, 3);
-        baseModule.__getInt64ArrayView = getTypedArrayView.bind(null, BigInt64Array, 3);
-        baseModule.__getUint64Array = getTypedArray.bind(null, BigUint64Array, 3);
-        baseModule.__getUint64ArrayView = getTypedArrayView.bind(null, BigUint64Array, 3);
-      }
-      baseModule.__getFloat32Array = getTypedArray.bind(null, Float32Array, 2);
-      baseModule.__getFloat32ArrayView = getTypedArrayView.bind(null, Float32Array, 2);
-      baseModule.__getFloat64Array = getTypedArray.bind(null, Float64Array, 3);
-      baseModule.__getFloat64ArrayView = getTypedArrayView.bind(null, Float64Array, 3);
-      function __instanceof(ptr, baseId) {
-        const U32 = new Uint32Array(memory.buffer);
-        var id = U32[ptr + ID_OFFSET >>> 2];
-        if (id <= U32[rttiBase >>> 2]) {
-          do
-            if (id == baseId)
-              return true;
-          while (id = getBase(id));
-        }
-        return false;
-      }
-      baseModule.__instanceof = __instanceof;
-      baseModule.memory = baseModule.memory || memory;
-      baseModule.table = baseModule.table || table;
-      return demangle(rawExports, baseModule);
-    }
-    function isResponse(o) {
-      return typeof Response !== "undefined" && o instanceof Response;
-    }
-    async function instantiate(source, imports) {
-      if (isResponse(source = await source))
-        return instantiateStreaming(source, imports);
-      return postInstantiate(
-        preInstantiate(imports || (imports = {})),
-        await WebAssembly.instantiate(
-          source instanceof WebAssembly.Module ? source : await WebAssembly.compile(source),
-          imports
-        )
-      );
-    }
-    exports2.instantiate = instantiate;
-    function instantiateSync(source, imports) {
-      return postInstantiate(
-        preInstantiate(imports || (imports = {})),
-        new WebAssembly.Instance(
-          source instanceof WebAssembly.Module ? source : new WebAssembly.Module(source),
-          imports
-        )
-      );
-    }
-    exports2.instantiateSync = instantiateSync;
-    async function instantiateStreaming(source, imports) {
-      if (!WebAssembly.instantiateStreaming) {
-        return instantiate(
-          isResponse(source = await source) ? source.arrayBuffer() : source,
-          imports
-        );
-      }
-      return postInstantiate(
-        preInstantiate(imports || (imports = {})),
-        (await WebAssembly.instantiateStreaming(source, imports)).instance
-      );
-    }
-    exports2.instantiateStreaming = instantiateStreaming;
-    function demangle(exports3, baseModule) {
-      var module3 = baseModule ? Object.create(baseModule) : {};
-      var setArgumentsLength = exports3["__argumentsLength"] ? function(length4) {
-        exports3["__argumentsLength"].value = length4;
-      } : exports3["__setArgumentsLength"] || exports3["__setargc"] || function() {
-      };
-      for (let internalName in exports3) {
-        if (!Object.prototype.hasOwnProperty.call(exports3, internalName))
-          continue;
-        const elem = exports3[internalName];
-        let parts = internalName.split(".");
-        let curr = module3;
-        while (parts.length > 1) {
-          let part = parts.shift();
-          if (!Object.prototype.hasOwnProperty.call(curr, part))
-            curr[part] = {};
-          curr = curr[part];
-        }
-        let name4 = parts[0];
-        let hash = name4.indexOf("#");
-        if (hash >= 0) {
-          let className = name4.substring(0, hash);
-          let classElem = curr[className];
-          if (typeof classElem === "undefined" || !classElem.prototype) {
-            let ctor = function(...args) {
-              return ctor.wrap(ctor.prototype.constructor(0, ...args));
-            };
-            ctor.prototype = {
-              valueOf: function valueOf() {
-                return this[THIS];
-              }
-            };
-            ctor.wrap = function(thisValue) {
-              return Object.create(ctor.prototype, { [THIS]: { value: thisValue, writable: false } });
-            };
-            if (classElem)
-              Object.getOwnPropertyNames(classElem).forEach(
-                (name5) => Object.defineProperty(ctor, name5, Object.getOwnPropertyDescriptor(classElem, name5))
-              );
-            curr[className] = ctor;
-          }
-          name4 = name4.substring(hash + 1);
-          curr = curr[className].prototype;
-          if (/^(get|set):/.test(name4)) {
-            if (!Object.prototype.hasOwnProperty.call(curr, name4 = name4.substring(4))) {
-              let getter = exports3[internalName.replace("set:", "get:")];
-              let setter = exports3[internalName.replace("get:", "set:")];
-              Object.defineProperty(curr, name4, {
-                get: function() {
-                  return getter(this[THIS]);
-                },
-                set: function(value) {
-                  setter(this[THIS], value);
-                },
-                enumerable: true
-              });
-            }
-          } else {
-            if (name4 === "constructor") {
-              (curr[name4] = (...args) => {
-                setArgumentsLength(args.length);
-                return elem(...args);
-              }).original = elem;
-            } else {
-              (curr[name4] = function(...args) {
-                setArgumentsLength(args.length);
-                return elem(this[THIS], ...args);
-              }).original = elem;
-            }
-          }
-        } else {
-          if (/^(get|set):/.test(name4)) {
-            if (!Object.prototype.hasOwnProperty.call(curr, name4 = name4.substring(4))) {
-              Object.defineProperty(curr, name4, {
-                get: exports3[internalName.replace("set:", "get:")],
-                set: exports3[internalName.replace("get:", "set:")],
-                enumerable: true
-              });
-            }
-          } else if (typeof elem === "function" && elem !== setArgumentsLength) {
-            (curr[name4] = (...args) => {
-              setArgumentsLength(args.length);
-              return elem(...args);
-            }).original = elem;
-          } else {
-            curr[name4] = elem;
-          }
-        }
-      }
-      return module3;
-    }
-    exports2.demangle = demangle;
-  }
-});
-
-// node_modules/rabin-wasm/dist/rabin-wasm.node.js
-var require_rabin_wasm_node = __commonJS({
-  "node_modules/rabin-wasm/dist/rabin-wasm.node.js"(exports2, module2) {
-    var { instantiateSync } = require_loader();
-    var fs6 = require("fs");
-    loadWebAssembly.supported = typeof WebAssembly !== "undefined";
-    async function loadWebAssembly(imp = {}) {
-      if (!loadWebAssembly.supported)
-        return null;
-      return instantiateSync(fs6.readFileSync(__dirname + "/../dist/rabin.wasm"), imp);
-    }
-    module2.exports = loadWebAssembly;
-  }
-});
-
-// node_modules/rabin-wasm/src/index.js
-var require_src = __commonJS({
-  "node_modules/rabin-wasm/src/index.js"(exports2, module2) {
-    var Rabin = require_rabin();
-    var getRabin = require_rabin_wasm_node();
-    var create5 = async (avg, min, max, windowSize, polynomial) => {
-      const compiled = await getRabin();
-      return new Rabin(compiled, avg, min, max, windowSize, polynomial);
-    };
-    module2.exports = {
-      Rabin,
-      create: create5
-    };
-  }
-});
-
-// node_modules/is-plain-obj/index.js
-var require_is_plain_obj = __commonJS({
-  "node_modules/is-plain-obj/index.js"(exports2, module2) {
-    "use strict";
-    module2.exports = (value) => {
-      if (Object.prototype.toString.call(value) !== "[object Object]") {
-        return false;
-      }
-      const prototype = Object.getPrototypeOf(value);
-      return prototype === null || prototype === Object.prototype;
-    };
-  }
-});
-
-// node_modules/merge-options/index.js
-var require_merge_options = __commonJS({
-  "node_modules/merge-options/index.js"(exports2, module2) {
-    "use strict";
-    var isOptionObject = require_is_plain_obj();
-    var { hasOwnProperty } = Object.prototype;
-    var { propertyIsEnumerable } = Object;
-    var defineProperty = (object, name4, value) => Object.defineProperty(object, name4, {
-      value,
-      writable: true,
-      enumerable: true,
-      configurable: true
-    });
-    var globalThis2 = exports2;
-    var defaultMergeOptions = {
-      concatArrays: false,
-      ignoreUndefined: false
-    };
-    var getEnumerableOwnPropertyKeys = (value) => {
-      const keys = [];
-      for (const key in value) {
-        if (hasOwnProperty.call(value, key)) {
-          keys.push(key);
-        }
-      }
-      if (Object.getOwnPropertySymbols) {
-        const symbols = Object.getOwnPropertySymbols(value);
-        for (const symbol2 of symbols) {
-          if (propertyIsEnumerable.call(value, symbol2)) {
-            keys.push(symbol2);
-          }
-        }
-      }
-      return keys;
-    };
-    function clone(value) {
-      if (Array.isArray(value)) {
-        return cloneArray(value);
-      }
-      if (isOptionObject(value)) {
-        return cloneOptionObject(value);
-      }
-      return value;
-    }
-    function cloneArray(array) {
-      const result = array.slice(0, 0);
-      getEnumerableOwnPropertyKeys(array).forEach((key) => {
-        defineProperty(result, key, clone(array[key]));
-      });
-      return result;
-    }
-    function cloneOptionObject(object) {
-      const result = Object.getPrototypeOf(object) === null ? /* @__PURE__ */ Object.create(null) : {};
-      getEnumerableOwnPropertyKeys(object).forEach((key) => {
-        defineProperty(result, key, clone(object[key]));
-      });
-      return result;
-    }
-    var mergeKeys = (merged, source, keys, config) => {
-      keys.forEach((key) => {
-        if (typeof source[key] === "undefined" && config.ignoreUndefined) {
-          return;
-        }
-        if (key in merged && merged[key] !== Object.getPrototypeOf(merged)) {
-          defineProperty(merged, key, merge2(merged[key], source[key], config));
-        } else {
-          defineProperty(merged, key, clone(source[key]));
-        }
-      });
-      return merged;
-    };
-    var concatArrays = (merged, source, config) => {
-      let result = merged.slice(0, 0);
-      let resultIndex = 0;
-      [merged, source].forEach((array) => {
-        const indices = [];
-        for (let k = 0; k < array.length; k++) {
-          if (!hasOwnProperty.call(array, k)) {
-            continue;
-          }
-          indices.push(String(k));
-          if (array === merged) {
-            defineProperty(result, resultIndex++, array[k]);
-          } else {
-            defineProperty(result, resultIndex++, clone(array[k]));
-          }
-        }
-        result = mergeKeys(result, array, getEnumerableOwnPropertyKeys(array).filter((key) => !indices.includes(key)), config);
-      });
-      return result;
-    };
-    function merge2(merged, source, config) {
-      if (config.concatArrays && Array.isArray(merged) && Array.isArray(source)) {
-        return concatArrays(merged, source, config);
-      }
-      if (!isOptionObject(source) || !isOptionObject(merged)) {
-        return clone(source);
-      }
-      return mergeKeys(merged, source, getEnumerableOwnPropertyKeys(source), config);
-    }
-    module2.exports = function(...options) {
-      const config = merge2(clone(defaultMergeOptions), this !== globalThis2 && this || {}, defaultMergeOptions);
-      let merged = { _: {} };
-      for (const option of options) {
-        if (option === void 0) {
-          continue;
-        }
-        if (!isOptionObject(option)) {
-          throw new TypeError("`" + option + "` is not an Option Object");
-        }
-        merged = merge2(merged, { _: option }, config);
-      }
-      return merged._;
-    };
-  }
-});
-
-// node_modules/ms/index.js
-var require_ms = __commonJS({
-  "node_modules/ms/index.js"(exports2, module2) {
-    var s = 1e3;
-    var m = s * 60;
-    var h = m * 60;
-    var d = h * 24;
-    var w = d * 7;
-    var y = d * 365.25;
-    module2.exports = function(val, options) {
-      options = options || {};
-      var type = typeof val;
-      if (type === "string" && val.length > 0) {
-        return parse(val);
-      } else if (type === "number" && isFinite(val)) {
-        return options.long ? fmtLong(val) : fmtShort(val);
-      }
-      throw new Error(
-        "val is not a non-empty string or a valid number. val=" + JSON.stringify(val)
-      );
-    };
-    function parse(str) {
-      str = String(str);
-      if (str.length > 100) {
-        return;
-      }
-      var match2 = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
-        str
-      );
-      if (!match2) {
-        return;
-      }
-      var n = parseFloat(match2[1]);
-      var type = (match2[2] || "ms").toLowerCase();
-      switch (type) {
-        case "years":
-        case "year":
-        case "yrs":
-        case "yr":
-        case "y":
-          return n * y;
-        case "weeks":
-        case "week":
-        case "w":
-          return n * w;
-        case "days":
-        case "day":
-        case "d":
-          return n * d;
-        case "hours":
-        case "hour":
-        case "hrs":
-        case "hr":
-        case "h":
-          return n * h;
-        case "minutes":
-        case "minute":
-        case "mins":
-        case "min":
-        case "m":
-          return n * m;
-        case "seconds":
-        case "second":
-        case "secs":
-        case "sec":
-        case "s":
-          return n * s;
-        case "milliseconds":
-        case "millisecond":
-        case "msecs":
-        case "msec":
-        case "ms":
-          return n;
-        default:
-          return void 0;
-      }
-    }
-    function fmtShort(ms) {
-      var msAbs = Math.abs(ms);
-      if (msAbs >= d) {
-        return Math.round(ms / d) + "d";
-      }
-      if (msAbs >= h) {
-        return Math.round(ms / h) + "h";
-      }
-      if (msAbs >= m) {
-        return Math.round(ms / m) + "m";
-      }
-      if (msAbs >= s) {
-        return Math.round(ms / s) + "s";
-      }
-      return ms + "ms";
-    }
-    function fmtLong(ms) {
-      var msAbs = Math.abs(ms);
-      if (msAbs >= d) {
-        return plural(ms, msAbs, d, "day");
-      }
-      if (msAbs >= h) {
-        return plural(ms, msAbs, h, "hour");
-      }
-      if (msAbs >= m) {
-        return plural(ms, msAbs, m, "minute");
-      }
-      if (msAbs >= s) {
-        return plural(ms, msAbs, s, "second");
-      }
-      return ms + " ms";
-    }
-    function plural(ms, msAbs, n, name4) {
-      var isPlural = msAbs >= n * 1.5;
-      return Math.round(ms / n) + " " + name4 + (isPlural ? "s" : "");
-    }
-  }
-});
-
-// node_modules/debug/src/common.js
-var require_common = __commonJS({
-  "node_modules/debug/src/common.js"(exports2, module2) {
-    function setup(env) {
-      createDebug.debug = createDebug;
-      createDebug.default = createDebug;
-      createDebug.coerce = coerce3;
-      createDebug.disable = disable;
-      createDebug.enable = enable;
-      createDebug.enabled = enabled;
-      createDebug.humanize = require_ms();
-      createDebug.destroy = destroy;
-      Object.keys(env).forEach((key) => {
-        createDebug[key] = env[key];
-      });
-      createDebug.names = [];
-      createDebug.skips = [];
-      createDebug.formatters = {};
-      function selectColor(namespace) {
-        let hash = 0;
-        for (let i = 0; i < namespace.length; i++) {
-          hash = (hash << 5) - hash + namespace.charCodeAt(i);
-          hash |= 0;
-        }
-        return createDebug.colors[Math.abs(hash) % createDebug.colors.length];
-      }
-      createDebug.selectColor = selectColor;
-      function createDebug(namespace) {
-        let prevTime;
-        let enableOverride = null;
-        let namespacesCache;
-        let enabledCache;
-        function debug3(...args) {
-          if (!debug3.enabled) {
-            return;
-          }
-          const self = debug3;
-          const curr = Number(/* @__PURE__ */ new Date());
-          const ms = curr - (prevTime || curr);
-          self.diff = ms;
-          self.prev = prevTime;
-          self.curr = curr;
-          prevTime = curr;
-          args[0] = createDebug.coerce(args[0]);
-          if (typeof args[0] !== "string") {
-            args.unshift("%O");
-          }
-          let index = 0;
-          args[0] = args[0].replace(/%([a-zA-Z%])/g, (match2, format3) => {
-            if (match2 === "%%") {
-              return "%";
-            }
-            index++;
-            const formatter = createDebug.formatters[format3];
-            if (typeof formatter === "function") {
-              const val = args[index];
-              match2 = formatter.call(self, val);
-              args.splice(index, 1);
-              index--;
-            }
-            return match2;
-          });
-          createDebug.formatArgs.call(self, args);
-          const logFn = self.log || createDebug.log;
-          logFn.apply(self, args);
-        }
-        debug3.namespace = namespace;
-        debug3.useColors = createDebug.useColors();
-        debug3.color = createDebug.selectColor(namespace);
-        debug3.extend = extend;
-        debug3.destroy = createDebug.destroy;
-        Object.defineProperty(debug3, "enabled", {
-          enumerable: true,
-          configurable: false,
-          get: () => {
-            if (enableOverride !== null) {
-              return enableOverride;
-            }
-            if (namespacesCache !== createDebug.namespaces) {
-              namespacesCache = createDebug.namespaces;
-              enabledCache = createDebug.enabled(namespace);
-            }
-            return enabledCache;
-          },
-          set: (v) => {
-            enableOverride = v;
-          }
-        });
-        if (typeof createDebug.init === "function") {
-          createDebug.init(debug3);
-        }
-        return debug3;
-      }
-      function extend(namespace, delimiter) {
-        const newDebug = createDebug(this.namespace + (typeof delimiter === "undefined" ? ":" : delimiter) + namespace);
-        newDebug.log = this.log;
-        return newDebug;
-      }
-      function enable(namespaces) {
-        createDebug.save(namespaces);
-        createDebug.namespaces = namespaces;
-        createDebug.names = [];
-        createDebug.skips = [];
-        let i;
-        const split = (typeof namespaces === "string" ? namespaces : "").split(/[\s,]+/);
-        const len = split.length;
-        for (i = 0; i < len; i++) {
-          if (!split[i]) {
-            continue;
-          }
-          namespaces = split[i].replace(/\*/g, ".*?");
-          if (namespaces[0] === "-") {
-            createDebug.skips.push(new RegExp("^" + namespaces.slice(1) + "$"));
-          } else {
-            createDebug.names.push(new RegExp("^" + namespaces + "$"));
-          }
-        }
-      }
-      function disable() {
-        const namespaces = [
-          ...createDebug.names.map(toNamespace),
-          ...createDebug.skips.map(toNamespace).map((namespace) => "-" + namespace)
-        ].join(",");
-        createDebug.enable("");
-        return namespaces;
-      }
-      function enabled(name4) {
-        if (name4[name4.length - 1] === "*") {
-          return true;
-        }
-        let i;
-        let len;
-        for (i = 0, len = createDebug.skips.length; i < len; i++) {
-          if (createDebug.skips[i].test(name4)) {
-            return false;
-          }
-        }
-        for (i = 0, len = createDebug.names.length; i < len; i++) {
-          if (createDebug.names[i].test(name4)) {
-            return true;
-          }
-        }
-        return false;
-      }
-      function toNamespace(regexp) {
-        return regexp.toString().substring(2, regexp.toString().length - 2).replace(/\.\*\?$/, "*");
-      }
-      function coerce3(val) {
-        if (val instanceof Error) {
-          return val.stack || val.message;
-        }
-        return val;
-      }
-      function destroy() {
-        console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.");
-      }
-      createDebug.enable(createDebug.load());
-      return createDebug;
-    }
-    module2.exports = setup;
-  }
-});
-
-// node_modules/debug/src/browser.js
-var require_browser = __commonJS({
-  "node_modules/debug/src/browser.js"(exports2, module2) {
-    exports2.formatArgs = formatArgs;
-    exports2.save = save;
-    exports2.load = load;
-    exports2.useColors = useColors;
-    exports2.storage = localstorage();
-    exports2.destroy = /* @__PURE__ */ (() => {
-      let warned = false;
-      return () => {
-        if (!warned) {
-          warned = true;
-          console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.");
-        }
-      };
-    })();
-    exports2.colors = [
-      "#0000CC",
-      "#0000FF",
-      "#0033CC",
-      "#0033FF",
-      "#0066CC",
-      "#0066FF",
-      "#0099CC",
-      "#0099FF",
-      "#00CC00",
-      "#00CC33",
-      "#00CC66",
-      "#00CC99",
-      "#00CCCC",
-      "#00CCFF",
-      "#3300CC",
-      "#3300FF",
-      "#3333CC",
-      "#3333FF",
-      "#3366CC",
-      "#3366FF",
-      "#3399CC",
-      "#3399FF",
-      "#33CC00",
-      "#33CC33",
-      "#33CC66",
-      "#33CC99",
-      "#33CCCC",
-      "#33CCFF",
-      "#6600CC",
-      "#6600FF",
-      "#6633CC",
-      "#6633FF",
-      "#66CC00",
-      "#66CC33",
-      "#9900CC",
-      "#9900FF",
-      "#9933CC",
-      "#9933FF",
-      "#99CC00",
-      "#99CC33",
-      "#CC0000",
-      "#CC0033",
-      "#CC0066",
-      "#CC0099",
-      "#CC00CC",
-      "#CC00FF",
-      "#CC3300",
-      "#CC3333",
-      "#CC3366",
-      "#CC3399",
-      "#CC33CC",
-      "#CC33FF",
-      "#CC6600",
-      "#CC6633",
-      "#CC9900",
-      "#CC9933",
-      "#CCCC00",
-      "#CCCC33",
-      "#FF0000",
-      "#FF0033",
-      "#FF0066",
-      "#FF0099",
-      "#FF00CC",
-      "#FF00FF",
-      "#FF3300",
-      "#FF3333",
-      "#FF3366",
-      "#FF3399",
-      "#FF33CC",
-      "#FF33FF",
-      "#FF6600",
-      "#FF6633",
-      "#FF9900",
-      "#FF9933",
-      "#FFCC00",
-      "#FFCC33"
-    ];
-    function useColors() {
-      if (typeof window !== "undefined" && window.process && (window.process.type === "renderer" || window.process.__nwjs)) {
-        return true;
-      }
-      if (typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) {
-        return false;
-      }
-      return typeof document !== "undefined" && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773
-      typeof window !== "undefined" && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31?
-      // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
-      typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker
-      typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/);
-    }
-    function formatArgs(args) {
-      args[0] = (this.useColors ? "%c" : "") + this.namespace + (this.useColors ? " %c" : " ") + args[0] + (this.useColors ? "%c " : " ") + "+" + module2.exports.humanize(this.diff);
-      if (!this.useColors) {
-        return;
-      }
-      const c = "color: " + this.color;
-      args.splice(1, 0, c, "color: inherit");
-      let index = 0;
-      let lastC = 0;
-      args[0].replace(/%[a-zA-Z%]/g, (match2) => {
-        if (match2 === "%%") {
-          return;
-        }
-        index++;
-        if (match2 === "%c") {
-          lastC = index;
-        }
-      });
-      args.splice(lastC, 0, c);
-    }
-    exports2.log = console.debug || console.log || (() => {
-    });
-    function save(namespaces) {
-      try {
-        if (namespaces) {
-          exports2.storage.setItem("debug", namespaces);
-        } else {
-          exports2.storage.removeItem("debug");
-        }
-      } catch (error) {
-      }
-    }
-    function load() {
-      let r;
-      try {
-        r = exports2.storage.getItem("debug");
-      } catch (error) {
-      }
-      if (!r && typeof process !== "undefined" && "env" in process) {
-        r = process.env.DEBUG;
-      }
-      return r;
-    }
-    function localstorage() {
-      try {
-        return localStorage;
-      } catch (error) {
-      }
-    }
-    module2.exports = require_common()(exports2);
-    var { formatters } = module2.exports;
-    formatters.j = function(v) {
-      try {
-        return JSON.stringify(v);
-      } catch (error) {
-        return "[UnexpectedJSONParseError]: " + error.message;
-      }
-    };
-  }
-});
-
-// node_modules/debug/src/node.js
-var require_node = __commonJS({
-  "node_modules/debug/src/node.js"(exports2, module2) {
-    var tty = require("tty");
-    var util = require("util");
-    exports2.init = init;
-    exports2.log = log12;
-    exports2.formatArgs = formatArgs;
-    exports2.save = save;
-    exports2.load = load;
-    exports2.useColors = useColors;
-    exports2.destroy = util.deprecate(
-      () => {
-      },
-      "Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."
-    );
-    exports2.colors = [6, 2, 3, 4, 5, 1];
-    try {
-      const supportsColor = require("supports-color");
-      if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {
-        exports2.colors = [
-          20,
-          21,
-          26,
-          27,
-          32,
-          33,
-          38,
-          39,
-          40,
-          41,
-          42,
-          43,
-          44,
-          45,
-          56,
-          57,
-          62,
-          63,
-          68,
-          69,
-          74,
-          75,
-          76,
-          77,
-          78,
-          79,
-          80,
-          81,
-          92,
-          93,
-          98,
-          99,
-          112,
-          113,
-          128,
-          129,
-          134,
-          135,
-          148,
-          149,
-          160,
-          161,
-          162,
-          163,
-          164,
-          165,
-          166,
-          167,
-          168,
-          169,
-          170,
-          171,
-          172,
-          173,
-          178,
-          179,
-          184,
-          185,
-          196,
-          197,
-          198,
-          199,
-          200,
-          201,
-          202,
-          203,
-          204,
-          205,
-          206,
-          207,
-          208,
-          209,
-          214,
-          215,
-          220,
-          221
-        ];
-      }
-    } catch (error) {
-    }
-    exports2.inspectOpts = Object.keys(process.env).filter((key) => {
-      return /^debug_/i.test(key);
-    }).reduce((obj, key) => {
-      const prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, (_, k) => {
-        return k.toUpperCase();
-      });
-      let val = process.env[key];
-      if (/^(yes|on|true|enabled)$/i.test(val)) {
-        val = true;
-      } else if (/^(no|off|false|disabled)$/i.test(val)) {
-        val = false;
-      } else if (val === "null") {
-        val = null;
-      } else {
-        val = Number(val);
-      }
-      obj[prop] = val;
-      return obj;
-    }, {});
-    function useColors() {
-      return "colors" in exports2.inspectOpts ? Boolean(exports2.inspectOpts.colors) : tty.isatty(process.stderr.fd);
-    }
-    function formatArgs(args) {
-      const { namespace: name4, useColors: useColors2 } = this;
-      if (useColors2) {
-        const c = this.color;
-        const colorCode = "\x1B[3" + (c < 8 ? c : "8;5;" + c);
-        const prefix = `  ${colorCode};1m${name4} \x1B[0m`;
-        args[0] = prefix + args[0].split("\n").join("\n" + prefix);
-        args.push(colorCode + "m+" + module2.exports.humanize(this.diff) + "\x1B[0m");
-      } else {
-        args[0] = getDate() + name4 + " " + args[0];
-      }
-    }
-    function getDate() {
-      if (exports2.inspectOpts.hideDate) {
-        return "";
-      }
-      return (/* @__PURE__ */ new Date()).toISOString() + " ";
-    }
-    function log12(...args) {
-      return process.stderr.write(util.format(...args) + "\n");
-    }
-    function save(namespaces) {
-      if (namespaces) {
-        process.env.DEBUG = namespaces;
-      } else {
-        delete process.env.DEBUG;
-      }
-    }
-    function load() {
-      return process.env.DEBUG;
-    }
-    function init(debug3) {
-      debug3.inspectOpts = {};
-      const keys = Object.keys(exports2.inspectOpts);
-      for (let i = 0; i < keys.length; i++) {
-        debug3.inspectOpts[keys[i]] = exports2.inspectOpts[keys[i]];
-      }
-    }
-    module2.exports = require_common()(exports2);
-    var { formatters } = module2.exports;
-    formatters.o = function(v) {
-      this.inspectOpts.colors = this.useColors;
-      return util.inspect(v, this.inspectOpts).split("\n").map((str) => str.trim()).join(" ");
-    };
-    formatters.O = function(v) {
-      this.inspectOpts.colors = this.useColors;
-      return util.inspect(v, this.inspectOpts);
-    };
-  }
-});
-
-// node_modules/debug/src/index.js
-var require_src2 = __commonJS({
-  "node_modules/debug/src/index.js"(exports2, module2) {
-    if (typeof process === "undefined" || process.type === "renderer" || process.browser === true || process.__nwjs) {
-      module2.exports = require_browser();
-    } else {
-      module2.exports = require_node();
-    }
-  }
-});
-
-// node_modules/balanced-match/index.js
-var require_balanced_match = __commonJS({
-  "node_modules/balanced-match/index.js"(exports2, module2) {
-    "use strict";
-    module2.exports = balanced2;
-    function balanced2(a, b, str) {
-      if (a instanceof RegExp)
-        a = maybeMatch(a, str);
-      if (b instanceof RegExp)
-        b = maybeMatch(b, str);
-      var r = range(a, b, str);
-      return r && {
-        start: r[0],
-        end: r[1],
-        pre: str.slice(0, r[0]),
-        body: str.slice(r[0] + a.length, r[1]),
-        post: str.slice(r[1] + b.length)
-      };
-    }
-    function maybeMatch(reg, str) {
-      var m = str.match(reg);
-      return m ? m[0] : null;
-    }
-    balanced2.range = range;
-    function range(a, b, str) {
-      var begs, beg, left, right, result;
-      var ai = str.indexOf(a);
-      var bi = str.indexOf(b, ai + 1);
-      var i = ai;
-      if (ai >= 0 && bi > 0) {
-        if (a === b) {
-          return [ai, bi];
-        }
-        begs = [];
-        left = str.length;
-        while (i >= 0 && !result) {
-          if (i == ai) {
-            begs.push(i);
-            ai = str.indexOf(a, i + 1);
-          } else if (begs.length == 1) {
-            result = [begs.pop(), bi];
-          } else {
-            beg = begs.pop();
-            if (beg < left) {
-              left = beg;
-              right = bi;
-            }
-            bi = str.indexOf(b, i + 1);
-          }
-          i = ai < bi && ai >= 0 ? ai : bi;
-        }
-        if (begs.length) {
-          result = [left, right];
-        }
-      }
-      return result;
-    }
-  }
-});
-
-// node_modules/brace-expansion/index.js
-var require_brace_expansion = __commonJS({
-  "node_modules/brace-expansion/index.js"(exports2, module2) {
-    var balanced2 = require_balanced_match();
-    module2.exports = expandTop;
-    var escSlash = "\0SLASH" + Math.random() + "\0";
-    var escOpen = "\0OPEN" + Math.random() + "\0";
-    var escClose = "\0CLOSE" + Math.random() + "\0";
-    var escComma = "\0COMMA" + Math.random() + "\0";
-    var escPeriod = "\0PERIOD" + Math.random() + "\0";
-    function numeric(str) {
-      return parseInt(str, 10) == str ? parseInt(str, 10) : str.charCodeAt(0);
-    }
-    function escapeBraces(str) {
-      return str.split("\\\\").join(escSlash).split("\\{").join(escOpen).split("\\}").join(escClose).split("\\,").join(escComma).split("\\.").join(escPeriod);
-    }
-    function unescapeBraces(str) {
-      return str.split(escSlash).join("\\").split(escOpen).join("{").split(escClose).join("}").split(escComma).join(",").split(escPeriod).join(".");
-    }
-    function parseCommaParts(str) {
-      if (!str)
-        return [""];
-      var parts = [];
-      var m = balanced2("{", "}", str);
-      if (!m)
-        return str.split(",");
-      var pre = m.pre;
-      var body = m.body;
-      var post = m.post;
-      var p = pre.split(",");
-      p[p.length - 1] += "{" + body + "}";
-      var postParts = parseCommaParts(post);
-      if (post.length) {
-        p[p.length - 1] += postParts.shift();
-        p.push.apply(p, postParts);
-      }
-      parts.push.apply(parts, p);
-      return parts;
-    }
-    function expandTop(str) {
-      if (!str)
-        return [];
-      if (str.substr(0, 2) === "{}") {
-        str = "\\{\\}" + str.substr(2);
-      }
-      return expand2(escapeBraces(str), true).map(unescapeBraces);
-    }
-    function embrace(str) {
-      return "{" + str + "}";
-    }
-    function isPadded(el) {
-      return /^-?0\d/.test(el);
-    }
-    function lte(i, y) {
-      return i <= y;
-    }
-    function gte(i, y) {
-      return i >= y;
-    }
-    function expand2(str, isTop) {
-      var expansions = [];
-      var m = balanced2("{", "}", str);
-      if (!m)
-        return [str];
-      var pre = m.pre;
-      var post = m.post.length ? expand2(m.post, false) : [""];
-      if (/\$$/.test(m.pre)) {
-        for (var k = 0; k < post.length; k++) {
-          var expansion = pre + "{" + m.body + "}" + post[k];
-          expansions.push(expansion);
-        }
-      } else {
-        var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
-        var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
-        var isSequence = isNumericSequence || isAlphaSequence;
-        var isOptions = m.body.indexOf(",") >= 0;
-        if (!isSequence && !isOptions) {
-          if (m.post.match(/,.*\}/)) {
-            str = m.pre + "{" + m.body + escClose + m.post;
-            return expand2(str);
-          }
-          return [str];
-        }
-        var n;
-        if (isSequence) {
-          n = m.body.split(/\.\./);
-        } else {
-          n = parseCommaParts(m.body);
-          if (n.length === 1) {
-            n = expand2(n[0], false).map(embrace);
-            if (n.length === 1) {
-              return post.map(function(p) {
-                return m.pre + n[0] + p;
-              });
-            }
-          }
-        }
-        var N;
-        if (isSequence) {
-          var x = numeric(n[0]);
-          var y = numeric(n[1]);
-          var width = Math.max(n[0].length, n[1].length);
-          var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1;
-          var test = lte;
-          var reverse = y < x;
-          if (reverse) {
-            incr *= -1;
-            test = gte;
-          }
-          var pad = n.some(isPadded);
-          N = [];
-          for (var i = x; test(i, y); i += incr) {
-            var c;
-            if (isAlphaSequence) {
-              c = String.fromCharCode(i);
-              if (c === "\\")
-                c = "";
-            } else {
-              c = String(i);
-              if (pad) {
-                var need = width - c.length;
-                if (need > 0) {
-                  var z = new Array(need + 1).join("0");
-                  if (i < 0)
-                    c = "-" + z + c.slice(1);
-                  else
-                    c = z + c;
-                }
-              }
-            }
-            N.push(c);
-          }
-        } else {
-          N = [];
-          for (var j = 0; j < n.length; j++) {
-            N.push.apply(N, expand2(n[j], false));
-          }
-        }
-        for (var j = 0; j < N.length; j++) {
-          for (var k = 0; k < post.length; k++) {
-            var expansion = pre + N[j] + post[k];
-            if (!isTop || isSequence || expansion)
-              expansions.push(expansion);
-          }
-        }
-      }
-      return expansions;
-    }
-  }
-});
-
-// node_modules/fast-write-atomic/index.js
-var require_fast_write_atomic = __commonJS({
-  "node_modules/fast-write-atomic/index.js"(exports2, module2) {
-    "use strict";
-    var { open, write: write2, close, rename, fsync, unlink } = require("fs");
-    var { join, dirname } = require("path");
-    var counter = 0;
-    function cleanup(dest, err, cb) {
-      unlink(dest, function() {
-        cb(err);
-      });
-    }
-    function closeAndCleanup(fd, dest, err, cb) {
-      close(fd, cleanup.bind(null, dest, err, cb));
-    }
-    function writeLoop(fd, content, contentLength, offset, cb) {
-      write2(fd, content, offset, function(err, bytesWritten) {
-        if (err) {
-          cb(err);
-          return;
-        }
-        return bytesWritten < contentLength - offset ? writeLoop(fd, content, contentLength, offset + bytesWritten, cb) : cb(null);
-      });
-    }
-    function openLoop(dest, cb) {
-      open(dest, "w", function(err, fd) {
-        if (err) {
-          return err.code === "EMFILE" ? openLoop(dest, cb) : cb(err);
-        }
-        cb(null, fd);
-      });
-    }
-    function writeAtomic2(path6, content, cb) {
-      const tmp = join(dirname(path6), "." + process.pid + "." + counter++);
-      openLoop(tmp, function(err, fd) {
-        if (err) {
-          cb(err);
-          return;
-        }
-        const contentLength = Buffer.byteLength(content);
-        writeLoop(fd, content, contentLength, 0, function(err2) {
-          if (err2) {
-            closeAndCleanup(fd, tmp, err2, cb);
-            return;
-          }
-          fsync(fd, function(err3) {
-            if (err3) {
-              closeAndCleanup(fd, tmp, err3, cb);
-              return;
-            }
-            close(fd, function(err4) {
-              if (err4) {
-                cleanup(tmp, err4, cb);
-                return;
-              }
-              rename(tmp, path6, (err5) => {
-                if (err5) {
-                  cleanup(tmp, err5, cb);
-                  return;
-                }
-                cb(null);
-              });
-            });
-          });
-        });
-        content = null;
-      });
-    }
-    module2.exports = writeAtomic2;
-  }
-});
-
-// src/index.js
-var src_exports3 = {};
-__export(src_exports3, {
-  BucketManager: () => bucketManager_default,
-  GatewayManager: () => gatewayManager_default,
-  NameManager: () => nameManager_default,
-  ObjectManager: () => objectManager_default,
-  PinManager: () => pinManager_default
-});
-module.exports = __toCommonJS(src_exports3);
-
-// src/bucketManager.js
-var import_client_s3 = require("@aws-sdk/client-s3");
-var BucketManager = class {
-  #DEFAULT_ENDPOINT = "https://s3.filebase.com";
-  #DEFAULT_REGION = "us-east-1";
-  #client;
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @tutorial quickstart-bucket
-   * @example
-   * import { BucketManager } from "@filebase/sdk";
-   * const bucketManager = new BucketManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
-   */
-  constructor(clientKey, clientSecret) {
-    const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, clientConfiguration = {
-      credentials: {
-        accessKeyId: clientKey,
-        secretAccessKey: clientSecret
-      },
-      endpoint: clientEndpoint,
-      region: this.#DEFAULT_REGION,
-      forcePathStyle: true
-    };
-    this.#client = new import_client_s3.S3Client(clientConfiguration);
-  }
-  /**
-   * @typedef {Object} bucket
-   * @property {string} Name The name of the bucket
-   * @property {date} Date the bucket was created
-   */
-  /**
-   * @summary Creates a new bucket with the specified name.
-   * @param {string} name - The name of the bucket to create.
-   * @returns {Promise} - A promise that resolves when the bucket is created.
-   * @example
-   * // Create bucket with name of `create-bucket-example`
-   * await bucketManager.create(`create-bucket-example`);
-   */
-  async create(name4) {
-    const command = new import_client_s3.CreateBucketCommand({
-      Bucket: name4
-    });
-    return await this.#client.send(command);
-  }
-  /**
-   * @summary Lists the buckets in the client.
-   * @returns {Promise>} - A promise that resolves with an array of objects representing the buckets in the client.
-   * @example
-   * // List all buckets
-   * await bucketManager.list();
-   */
-  async list() {
-    const command = new import_client_s3.ListBucketsCommand({}), { Buckets } = await this.#client.send(command);
-    return Buckets;
-  }
-  /**
-   * @summary Deletes the specified bucket.
-   * @param {string} name - The name of the bucket to delete.
-   * @returns {Promise} - A promise that resolves when the bucket is deleted.
-   * @example
-   * // Delete bucket with name of `bucket-name-to-delete`
-   * await bucketManager.delete(`bucket-name-to-delete`);
-   */
-  async delete(name4) {
-    const command = new import_client_s3.DeleteBucketCommand({
-      Bucket: name4
-    });
-    await this.#client.send(command);
-    return true;
-  }
-  /**
-   * @summary Sets the privacy of a given bucket.
-   * @param {string} name - The name of the bucket to toggle.
-   * @param {boolean} targetState - The new target state. [true=private,false=public]
-   * @returns {Promise} A promise that resolves to true if the bucket was successfully toggled.
-   * @example
-   * // Toggle bucket with label of `toggle-bucket-example`
-   * await bucketManager.setPrivacy(`toggle-bucket-example`, true);  // Enabled
-   * await bucketManager.setPrivacy(`toggle-bucket-example`, false); // Disabled
-   */
-  async setPrivacy(name4, targetState) {
-    const command = new import_client_s3.PutBucketAclCommand({
-      Bucket: name4,
-      ACL: targetState ? "private" : "public-read"
-    });
-    await this.#client.send(command);
-    return true;
-  }
-  /**
-   * @summary Gets the privacy of a given bucket
-   * @param {string} name - The name of the bucket to query.
-   * @returns {Promise} A promise that resolves to true if the bucket is private.
-   */
-  async getPrivacy(name4) {
-    const command = new import_client_s3.GetBucketAclCommand({
-      Bucket: name4
-    });
-    const response = await this.#client.send(command), readPermission = response.Grants.find((grant) => {
-      return grant.Grantee.Type === "Group" && grant.Permission === "READ";
-    });
-    return !(typeof readPermission !== "undefined");
-  }
-};
-var bucketManager_default = BucketManager;
-
-// src/gatewayManager.js
-var import_axios2 = __toESM(require("axios"));
-
-// src/helpers.js
-var import_axios = __toESM(require("axios"));
-var GATEWAY_DEFAULT_TIMEOUT = 6e4;
-async function downloadFromGateway(cid, options) {
-  if (typeof options.endpoint !== "string") {
-    throw new Error(`Default Gateway must be set`);
-  }
-  const downloadHeaders = {};
-  if (options.token) {
-    downloadHeaders["x-filebase-gateway-token"] = options.token;
-  }
-  const downloadResponse = await import_axios.default.request({
-    method: "GET",
-    baseURL: options.endpoint,
-    url: `/ipfs/${cid}`,
-    headers: downloadHeaders,
-    type: "stream",
-    timeout: (options == null ? void 0 : options.timeout) || GATEWAY_DEFAULT_TIMEOUT
-  });
-  return downloadResponse.data;
-}
-function apiErrorHandler(err) {
-  var _a, _b, _c;
-  if ((err == null ? void 0 : err.response) && ((_a = err == null ? void 0 : err.response) == null ? void 0 : _a.status) && (err.response.status.toString()[0] === "4" || err.response.status.toString()[0] === "5")) {
-    throw new Error(
-      ((_b = err.response.data.error) == null ? void 0 : _b.details) || ((_c = err.response.data.error) == null ? void 0 : _c.reason) || err
-    );
-  }
-  throw err;
-}
-
-// src/gatewayManager.js
-var GatewayManager = class {
-  #DEFAULT_ENDPOINT = "https://api.filebase.io";
-  #DEFAULT_TIMEOUT = 6e4;
-  #client;
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @tutorial quickstart-gateway
-   * @example
-   * import { GatewayManager } from "@filebase/sdk";
-   * const gatewayManager = new GatewayManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
-   */
-  constructor(clientKey, clientSecret) {
-    const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_GW_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString(
-      "base64"
-    ), baseURL = `${clientEndpoint}/v1/gateways`;
-    this.#client = import_axios2.default.create({
-      baseURL,
-      timeout: this.#DEFAULT_TIMEOUT,
-      headers: { Authorization: `Bearer ${encodedToken}` }
-    });
-  }
-  /**
-   * @typedef {Object} gateway
-   * @property {string} name Name for the gateway
-   * @property {string} domain Custom Domain for the gateway
-   * @property {boolean} enabled Whether the gateway is enabled or not
-   * @property {string} private Whether the gateway is scoped to users content
-   * @property {date} created_at Date the gateway was created
-   * @property {date} updated_at Date the gateway was last updated
-   */
-  /**
-   * @typedef {Object} gatewayOptions
-   * @property {boolean} [domain] Optional Domain to allow for using a Custom Domain
-   * @property {string} [enabled] Optional Toggle to use for enabling the gateway
-   * @property {boolean} [private] Optional Boolean determining if gateway is Public or Private
-   */
-  /**
-   * @summary Creates a gateway with the given name and options
-   * @param {string} name Unique name across entire platform for the gateway.  Must be a valid subdomain name.
-   * @param {gatewayOptions} [options]
-   * @returns {Promise} - A promise that resolves to the value of a gateway.
-   * @example
-   * // Create gateway with name of `create-gateway-example` and a custom domain of `cname.mycustomdomain.com`.
-   * // The custom domain must already exist and have a CNAME record pointed at `create-gateway-example.myfilebase.com`.
-   * await gatewayManager.create(`create-gateway-example`, {
-   *   domain: `cname.mycustomdomain.com`
-   * });
-   */
-  async create(name4, options = {}) {
-    try {
-      let createOptions = {
-        name: name4
-      };
-      if (typeof options.domain === "string") {
-        createOptions.domain = options.domain;
-      }
-      if (typeof options.enabled === "boolean") {
-        createOptions.enabled = options.enabled;
-      }
-      if (typeof options.private === "boolean") {
-        createOptions.private = options.private;
-      }
-      const createResponse = await this.#client.request({
-        method: "POST",
-        data: createOptions
-      });
-      return createResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Deletes a gateway with the given name.
-   * @param {string} name - The name of the gateway to delete.
-   * @returns {Promise} - A promise that resolves to true if the gateway was successfully deleted.
-   * @example
-   * // Delete gateway with name of `delete-gateway-example`
-   * await gatewayManager.delete(`delete-name-example`);
-   */
-  async delete(name4) {
-    try {
-      await this.#client.request({
-        method: "DELETE",
-        url: `/${name4}`,
-        validateStatus: (status) => {
-          return status === 204;
-        }
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Returns the value of a gateway
-   * @param {string} name - Parameter representing the name to get.
-   * @returns {Promise} - A promise that resolves to the value of a gateway.
-   * @example
-   * // Get gateway with name of `gateway-get-example`
-   * await gatewayManager.get(`gateway-get-example`);
-   */
-  async get(name4) {
-    try {
-      const getResponse = await this.#client.request({
-        method: "GET",
-        url: `/${name4}`,
-        validateStatus: (status) => {
-          return status === 200 || status === 404;
-        }
-      });
-      return getResponse.status === 200 ? getResponse.data : false;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Returns a list of gateways
-   * @returns {Promise>} - A promise that resolves to an array of gateways.
-   * @example
-   * // List all gateways
-   * await gatewayManager.list();
-   */
-  async list() {
-    try {
-      const getResponse = await this.#client.request({
-        method: "GET"
-      });
-      return getResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Updates the specified gateway.
-   * @param {string} name - The name of the gateway to update.
-   * @param {gatewayOptions} options - The options for the update operation.
-   *
-   * @returns {Promise} - A Promise that resolves to true if the gateway was updated.
-   * @example
-   * // Update gateway with name of `update-gateway-example` and set the gateway to only serve CIDs pinned by user.
-   * await gatewayManager.update(`update-gateway-example`, {
-   *   private: true
-   * });
-   */
-  async update(name4, options) {
-    try {
-      const updateOptions = {
-        name: name4
-      };
-      if (options == null ? void 0 : options.domain) {
-        updateOptions.domain = String(options.private);
-      }
-      if (options == null ? void 0 : options.enabled) {
-        updateOptions.enabled = Boolean(options.enabled);
-      }
-      if (options == null ? void 0 : options.private) {
-        updateOptions.private = Boolean(options.private);
-      }
-      await this.#client.request({
-        method: "PUT",
-        url: `/${name4}`,
-        data: updateOptions,
-        validateStatus: (status) => {
-          return status === 200;
-        }
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Toggles the enabled state of a given gateway.
-   * @param {string} name - The name of the gateway to toggle.
-   * @param {boolean} targetState - The new target state.
-   * @returns {Promise} A promise that resolves to true if the gateway was successfully toggled.
-   * @example
-   * // Toggle gateway with label of `toggle-gateway-example`
-   * await gatewayManager.toggle(`toggle-gateway-example`, true);  // Enabled
-   * await gatewayManager.toggle(`toggle-gateway-example`, false); // Disabled
-   */
-  async toggle(name4, targetState) {
-    try {
-      await this.#client.request({
-        method: "PUT",
-        url: `/${name4}`,
-        data: {
-          enabled: Boolean(targetState)
-        },
-        validateStatus: (status) => {
-          return status === 200;
-        }
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-};
-var gatewayManager_default = GatewayManager;
-
-// src/nameManager.js
-var import_axios3 = __toESM(require("axios"));
-var NameManager = class {
-  #DEFAULT_ENDPOINT = "https://api.filebase.io";
-  #DEFAULT_TIMEOUT = 6e4;
-  #client;
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @tutorial quickstart-name
-   * @example
-   * import { NameManager } from "@filebase/sdk";
-   * const nameManager = new NameManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
-   */
-  constructor(clientKey, clientSecret) {
-    const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString(
-      "base64"
-    ), baseURL = `${clientEndpoint}/v1/names`;
-    this.#client = import_axios3.default.create({
-      baseURL,
-      timeout: this.#DEFAULT_TIMEOUT,
-      headers: { Authorization: `Bearer ${encodedToken}` }
-    });
-  }
-  /**
-   * @typedef {Object} name
-   * @property {string} label Descriptive label for the Key
-   * @property {string} network_key IPNS Key CID
-   * @property {string} cid Value that name Publishes
-   * @property {number} sequence Version Number for the name
-   * @property {boolean} enabled Whether the name is being Published or not
-   * @property {date} published_at Date the name was last published to the DHT
-   * @property {date} created_at Date the name was created
-   * @property {date} updated_at Date the name was last updated
-   */
-  /**
-   * @typedef {Object} nameOptions
-   * @property {boolean} [enabled] Whether the name is enabled or not.
-   */
-  /**
-   * @summary Creates a new IPNS name with the given name as the label and CID.
-   * @param {string} label - The label of the new IPNS name.
-   * @param {string} cid - The CID of the IPNS name.
-   * @param {nameOptions} [options] - Additional options for the IPNS name.
-   * @returns {Promise} - A Promise that resolves with the response JSON.
-   * @example
-   * // Create IPNS name with label of `create-name-example` and CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
-   * await nameManager.create(`create-name-example`, `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`);
-   */
-  async create(label, cid, options = {
-    enabled: true
-  }) {
-    try {
-      const createResponse = await this.#client.request({
-        method: "POST",
-        data: {
-          label,
-          cid,
-          enabled: (options == null ? void 0 : options.enabled) !== false
-        }
-      });
-      return createResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Imports a user's IPNS private key.
-   * @param {string} label - The label for the IPNS name.
-   * @param {string} cid - The CID (Content Identifier) of the data.
-   * @param {string} privateKey - The existing private key encoded in Base64.
-   * @param {nameOptions} [options] - Additional options for the IPNS name.
-   * @returns {Promise} - A Promise that resolves to the server response.
-   * @example
-   * // Import IPNS private key with label of `create-name-example`, CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
-   * // and a private key encoded with base64
-   * await nameManager.import(
-   *  `create-name-example`,
-   *  `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
-   *  `BASE64_ENCODED_PRIVATEKEY`
-   * );
-   */
-  async import(label, cid, privateKey, options = {
-    enabled: true
-  }) {
-    try {
-      const importResponse = await this.#client.request({
-        method: "POST",
-        data: {
-          label,
-          cid,
-          network_private_key: privateKey,
-          enabled: (options == null ? void 0 : options.enabled) !== false
-        }
-      });
-      return importResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Updates the specified name with the given CID.
-   * @param {string} label - The label of the name to update.
-   * @param {string} cid - The cid to associate with the name.
-   * @param {nameOptions} options - The options for the set operation.
-   *
-   * @returns {Promise} - A Promise that resolves to true if the IPNS name was updated.
-   * @example
-   * // Update name with label of `update-name-example` and set the value of the IPNS name.
-   * await nameManager.update(`update-name-example`, `bafybeidt4nmaci476lyon2mvgfmwyzysdazienhxs2bqnfpdainzjuwjom`);
-   */
-  async update(label, cid, options = {}) {
-    try {
-      const updateOptions = {
-        cid
-      };
-      if (options == null ? void 0 : options.enabled) {
-        updateOptions.enabled = Boolean(options.enabled);
-      }
-      await this.#client.request({
-        method: "PUT",
-        url: `/${label}`,
-        data: updateOptions,
-        validateStatus: (status) => {
-          return status === 200;
-        }
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Returns the value of an IPNS name
-   * @param {string} label - Parameter representing the label of the name to resolve.
-   * @returns {Promise} - A promise that resolves to the value of a name.
-   * @example
-   * // Get IPNS name with label of `list-name-example`
-   * await nameManager.get(`list-name-example`);
-   */
-  async get(label) {
-    try {
-      const getResponse = await this.#client.request({
-        method: "GET",
-        url: `/${label}`,
-        validateStatus: (status) => {
-          return status === 200 || status === 404;
-        }
-      });
-      return getResponse.status === 200 ? getResponse.data : false;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Returns a list of IPNS names
-   * @returns {Promise>} - A promise that resolves to an array of names.
-   * @example
-   * // List all IPNS names
-   * await nameManager.list();
-   */
-  async list() {
-    try {
-      const listResponse = await this.#client.request({
-        method: "GET"
-      });
-      return listResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Deletes an IPNS name with the given label.
-   * @param {string} label - The label of the IPNS name to delete.
-   * @returns {Promise} - A promise that resolves to true if the IPNS name was successfully deleted.
-   * @example
-   * // List IPNS name with label of `delete-name-example`
-   * await nameManager.delete(`delete-name-example`);
-   */
-  async delete(label) {
-    try {
-      await this.#client.request({
-        method: "DELETE",
-        url: `/${label}`,
-        validateStatus: (status) => {
-          return status === 204;
-        }
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Toggles the enabled state of a given IPNS name.
-   * @param {string} label - The label of the IPNS name to toggle.
-   * @param {boolean} targetState - The new target state.
-   * @returns {Promise} A promise that resolves to true if the IPNS name was successfully toggled.
-   * @example
-   * // Toggle IPNS name with label of `toggle-name-example`
-   * await nameManager.toggle(`toggle-name-example`, true);  // Enabled
-   * await nameManager.toggle(`toggle-name-example`, false); // Disabled
-   */
-  async toggle(label, targetState) {
-    try {
-      await this.#client.request({
-        method: "PUT",
-        url: `/${label}`,
-        data: {
-          enabled: targetState
-        },
-        validateStatus: (status) => {
-          return status === 200;
-        }
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-};
-var nameManager_default = NameManager;
-
-// src/objectManager.js
-var import_client_s32 = require("@aws-sdk/client-s3");
-var import_lib_storage = require("@aws-sdk/lib-storage");
-
-// node_modules/@ipld/car/src/buffer-reader.js
-var import_fs = __toESM(require("fs"), 1);
-
-// node_modules/cborg/lib/is.js
-var typeofs = [
-  "string",
-  "number",
-  "bigint",
-  "symbol"
-];
-var objectTypeNames = [
-  "Function",
-  "Generator",
-  "AsyncGenerator",
-  "GeneratorFunction",
-  "AsyncGeneratorFunction",
-  "AsyncFunction",
-  "Observable",
-  "Array",
-  "Buffer",
-  "Object",
-  "RegExp",
-  "Date",
-  "Error",
-  "Map",
-  "Set",
-  "WeakMap",
-  "WeakSet",
-  "ArrayBuffer",
-  "SharedArrayBuffer",
-  "DataView",
-  "Promise",
-  "URL",
-  "HTMLElement",
-  "Int8Array",
-  "Uint8Array",
-  "Uint8ClampedArray",
-  "Int16Array",
-  "Uint16Array",
-  "Int32Array",
-  "Uint32Array",
-  "Float32Array",
-  "Float64Array",
-  "BigInt64Array",
-  "BigUint64Array"
-];
-function is(value) {
-  if (value === null) {
-    return "null";
-  }
-  if (value === void 0) {
-    return "undefined";
-  }
-  if (value === true || value === false) {
-    return "boolean";
-  }
-  const typeOf = typeof value;
-  if (typeofs.includes(typeOf)) {
-    return typeOf;
-  }
-  if (typeOf === "function") {
-    return "Function";
-  }
-  if (Array.isArray(value)) {
-    return "Array";
-  }
-  if (isBuffer(value)) {
-    return "Buffer";
-  }
-  const objectType = getObjectType(value);
-  if (objectType) {
-    return objectType;
-  }
-  return "Object";
-}
-function isBuffer(value) {
-  return value && value.constructor && value.constructor.isBuffer && value.constructor.isBuffer.call(null, value);
-}
-function getObjectType(value) {
-  const objectTypeName = Object.prototype.toString.call(value).slice(8, -1);
-  if (objectTypeNames.includes(objectTypeName)) {
-    return objectTypeName;
-  }
-  return void 0;
-}
-
-// node_modules/cborg/lib/token.js
-var Type = class {
-  /**
-   * @param {number} major
-   * @param {string} name
-   * @param {boolean} terminal
-   */
-  constructor(major, name4, terminal) {
-    this.major = major;
-    this.majorEncoded = major << 5;
-    this.name = name4;
-    this.terminal = terminal;
-  }
-  /* c8 ignore next 3 */
-  toString() {
-    return `Type[${this.major}].${this.name}`;
-  }
-  /**
-   * @param {Type} typ
-   * @returns {number}
-   */
-  compare(typ) {
-    return this.major < typ.major ? -1 : this.major > typ.major ? 1 : 0;
-  }
-};
-Type.uint = new Type(0, "uint", true);
-Type.negint = new Type(1, "negint", true);
-Type.bytes = new Type(2, "bytes", true);
-Type.string = new Type(3, "string", true);
-Type.array = new Type(4, "array", false);
-Type.map = new Type(5, "map", false);
-Type.tag = new Type(6, "tag", false);
-Type.float = new Type(7, "float", true);
-Type.false = new Type(7, "false", true);
-Type.true = new Type(7, "true", true);
-Type.null = new Type(7, "null", true);
-Type.undefined = new Type(7, "undefined", true);
-Type.break = new Type(7, "break", true);
-var Token = class {
-  /**
-   * @param {Type} type
-   * @param {any} [value]
-   * @param {number} [encodedLength]
-   */
-  constructor(type, value, encodedLength) {
-    this.type = type;
-    this.value = value;
-    this.encodedLength = encodedLength;
-    this.encodedBytes = void 0;
-    this.byteValue = void 0;
-  }
-  /* c8 ignore next 3 */
-  toString() {
-    return `Token[${this.type}].${this.value}`;
-  }
-};
-
-// node_modules/cborg/lib/byte-utils.js
-var useBuffer = globalThis.process && // @ts-ignore
-!globalThis.process.browser && // @ts-ignore
-globalThis.Buffer && // @ts-ignore
-typeof globalThis.Buffer.isBuffer === "function";
-var textDecoder = new TextDecoder();
-var textEncoder = new TextEncoder();
-function isBuffer2(buf2) {
-  return useBuffer && globalThis.Buffer.isBuffer(buf2);
-}
-function asU8A(buf2) {
-  if (!(buf2 instanceof Uint8Array)) {
-    return Uint8Array.from(buf2);
-  }
-  return isBuffer2(buf2) ? new Uint8Array(buf2.buffer, buf2.byteOffset, buf2.byteLength) : buf2;
-}
-var toString = useBuffer ? (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {Uint8Array} bytes
-   * @param {number} start
-   * @param {number} end
-   */
-  (bytes, start, end) => {
-    return end - start > 64 ? (
-      // eslint-disable-line operator-linebreak
-      // @ts-ignore
-      globalThis.Buffer.from(bytes.subarray(start, end)).toString("utf8")
-    ) : utf8Slice(bytes, start, end);
-  }
-) : (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {Uint8Array} bytes
-   * @param {number} start
-   * @param {number} end
-   */
-  (bytes, start, end) => {
-    return end - start > 64 ? textDecoder.decode(bytes.subarray(start, end)) : utf8Slice(bytes, start, end);
-  }
-);
-var fromString = useBuffer ? (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {string} string
-   */
-  (string2) => {
-    return string2.length > 64 ? (
-      // eslint-disable-line operator-linebreak
-      // @ts-ignore
-      globalThis.Buffer.from(string2)
-    ) : utf8ToBytes(string2);
-  }
-) : (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {string} string
-   */
-  (string2) => {
-    return string2.length > 64 ? textEncoder.encode(string2) : utf8ToBytes(string2);
-  }
-);
-var fromArray = (arr) => {
-  return Uint8Array.from(arr);
-};
-var slice = useBuffer ? (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {Uint8Array} bytes
-   * @param {number} start
-   * @param {number} end
-   */
-  (bytes, start, end) => {
-    if (isBuffer2(bytes)) {
-      return new Uint8Array(bytes.subarray(start, end));
-    }
-    return bytes.slice(start, end);
-  }
-) : (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {Uint8Array} bytes
-   * @param {number} start
-   * @param {number} end
-   */
-  (bytes, start, end) => {
-    return bytes.slice(start, end);
-  }
-);
-var concat = useBuffer ? (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {Uint8Array[]} chunks
-   * @param {number} length
-   * @returns {Uint8Array}
-   */
-  (chunks, length4) => {
-    chunks = chunks.map((c) => c instanceof Uint8Array ? c : (
-      // eslint-disable-line operator-linebreak
-      // @ts-ignore
-      globalThis.Buffer.from(c)
-    ));
-    return asU8A(globalThis.Buffer.concat(chunks, length4));
-  }
-) : (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {Uint8Array[]} chunks
-   * @param {number} length
-   * @returns {Uint8Array}
-   */
-  (chunks, length4) => {
-    const out = new Uint8Array(length4);
-    let off = 0;
-    for (let b of chunks) {
-      if (off + b.length > out.length) {
-        b = b.subarray(0, out.length - off);
-      }
-      out.set(b, off);
-      off += b.length;
-    }
-    return out;
-  }
-);
-var alloc = useBuffer ? (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {number} size
-   * @returns {Uint8Array}
-   */
-  (size) => {
-    return globalThis.Buffer.allocUnsafe(size);
-  }
-) : (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {number} size
-   * @returns {Uint8Array}
-   */
-  (size) => {
-    return new Uint8Array(size);
-  }
-);
-function compare(b1, b2) {
-  if (isBuffer2(b1) && isBuffer2(b2)) {
-    return b1.compare(b2);
-  }
-  for (let i = 0; i < b1.length; i++) {
-    if (b1[i] === b2[i]) {
-      continue;
-    }
-    return b1[i] < b2[i] ? -1 : 1;
-  }
-  return 0;
-}
-function utf8ToBytes(str) {
-  const out = [];
-  let p = 0;
-  for (let i = 0; i < str.length; i++) {
-    let c = str.charCodeAt(i);
-    if (c < 128) {
-      out[p++] = c;
-    } else if (c < 2048) {
-      out[p++] = c >> 6 | 192;
-      out[p++] = c & 63 | 128;
-    } else if ((c & 64512) === 55296 && i + 1 < str.length && (str.charCodeAt(i + 1) & 64512) === 56320) {
-      c = 65536 + ((c & 1023) << 10) + (str.charCodeAt(++i) & 1023);
-      out[p++] = c >> 18 | 240;
-      out[p++] = c >> 12 & 63 | 128;
-      out[p++] = c >> 6 & 63 | 128;
-      out[p++] = c & 63 | 128;
-    } else {
-      out[p++] = c >> 12 | 224;
-      out[p++] = c >> 6 & 63 | 128;
-      out[p++] = c & 63 | 128;
-    }
-  }
-  return out;
-}
-function utf8Slice(buf2, offset, end) {
-  const res = [];
-  while (offset < end) {
-    const firstByte = buf2[offset];
-    let codePoint = null;
-    let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
-    if (offset + bytesPerSequence <= end) {
-      let secondByte, thirdByte, fourthByte, tempCodePoint;
-      switch (bytesPerSequence) {
-        case 1:
-          if (firstByte < 128) {
-            codePoint = firstByte;
-          }
-          break;
-        case 2:
-          secondByte = buf2[offset + 1];
-          if ((secondByte & 192) === 128) {
-            tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
-            if (tempCodePoint > 127) {
-              codePoint = tempCodePoint;
-            }
-          }
-          break;
-        case 3:
-          secondByte = buf2[offset + 1];
-          thirdByte = buf2[offset + 2];
-          if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
-            tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
-            if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
-              codePoint = tempCodePoint;
-            }
-          }
-          break;
-        case 4:
-          secondByte = buf2[offset + 1];
-          thirdByte = buf2[offset + 2];
-          fourthByte = buf2[offset + 3];
-          if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
-            tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
-            if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
-              codePoint = tempCodePoint;
-            }
-          }
-      }
-    }
-    if (codePoint === null) {
-      codePoint = 65533;
-      bytesPerSequence = 1;
-    } else if (codePoint > 65535) {
-      codePoint -= 65536;
-      res.push(codePoint >>> 10 & 1023 | 55296);
-      codePoint = 56320 | codePoint & 1023;
-    }
-    res.push(codePoint);
-    offset += bytesPerSequence;
-  }
-  return decodeCodePointsArray(res);
-}
-var MAX_ARGUMENTS_LENGTH = 4096;
-function decodeCodePointsArray(codePoints) {
-  const len = codePoints.length;
-  if (len <= MAX_ARGUMENTS_LENGTH) {
-    return String.fromCharCode.apply(String, codePoints);
-  }
-  let res = "";
-  let i = 0;
-  while (i < len) {
-    res += String.fromCharCode.apply(
-      String,
-      codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)
-    );
-  }
-  return res;
-}
-
-// node_modules/cborg/lib/bl.js
-var defaultChunkSize = 256;
-var Bl = class {
-  /**
-   * @param {number} [chunkSize]
-   */
-  constructor(chunkSize = defaultChunkSize) {
-    this.chunkSize = chunkSize;
-    this.cursor = 0;
-    this.maxCursor = -1;
-    this.chunks = [];
-    this._initReuseChunk = null;
-  }
-  reset() {
-    this.cursor = 0;
-    this.maxCursor = -1;
-    if (this.chunks.length) {
-      this.chunks = [];
-    }
-    if (this._initReuseChunk !== null) {
-      this.chunks.push(this._initReuseChunk);
-      this.maxCursor = this._initReuseChunk.length - 1;
-    }
-  }
-  /**
-   * @param {Uint8Array|number[]} bytes
-   */
-  push(bytes) {
-    let topChunk = this.chunks[this.chunks.length - 1];
-    const newMax = this.cursor + bytes.length;
-    if (newMax <= this.maxCursor + 1) {
-      const chunkPos = topChunk.length - (this.maxCursor - this.cursor) - 1;
-      topChunk.set(bytes, chunkPos);
-    } else {
-      if (topChunk) {
-        const chunkPos = topChunk.length - (this.maxCursor - this.cursor) - 1;
-        if (chunkPos < topChunk.length) {
-          this.chunks[this.chunks.length - 1] = topChunk.subarray(0, chunkPos);
-          this.maxCursor = this.cursor - 1;
-        }
-      }
-      if (bytes.length < 64 && bytes.length < this.chunkSize) {
-        topChunk = alloc(this.chunkSize);
-        this.chunks.push(topChunk);
-        this.maxCursor += topChunk.length;
-        if (this._initReuseChunk === null) {
-          this._initReuseChunk = topChunk;
-        }
-        topChunk.set(bytes, 0);
-      } else {
-        this.chunks.push(bytes);
-        this.maxCursor += bytes.length;
-      }
-    }
-    this.cursor += bytes.length;
-  }
-  /**
-   * @param {boolean} [reset]
-   * @returns {Uint8Array}
-   */
-  toBytes(reset = false) {
-    let byts;
-    if (this.chunks.length === 1) {
-      const chunk = this.chunks[0];
-      if (reset && this.cursor > chunk.length / 2) {
-        byts = this.cursor === chunk.length ? chunk : chunk.subarray(0, this.cursor);
-        this._initReuseChunk = null;
-        this.chunks = [];
-      } else {
-        byts = slice(chunk, 0, this.cursor);
-      }
-    } else {
-      byts = concat(this.chunks, this.cursor);
-    }
-    if (reset) {
-      this.reset();
-    }
-    return byts;
-  }
-};
-
-// node_modules/cborg/lib/common.js
-var decodeErrPrefix = "CBOR decode error:";
-var encodeErrPrefix = "CBOR encode error:";
-var uintMinorPrefixBytes = [];
-uintMinorPrefixBytes[23] = 1;
-uintMinorPrefixBytes[24] = 2;
-uintMinorPrefixBytes[25] = 3;
-uintMinorPrefixBytes[26] = 5;
-uintMinorPrefixBytes[27] = 9;
-function assertEnoughData(data, pos, need) {
-  if (data.length - pos < need) {
-    throw new Error(`${decodeErrPrefix} not enough data for type`);
-  }
-}
-
-// node_modules/cborg/lib/0uint.js
-var uintBoundaries = [24, 256, 65536, 4294967296, BigInt("18446744073709551616")];
-function readUint8(data, offset, options) {
-  assertEnoughData(data, offset, 1);
-  const value = data[offset];
-  if (options.strict === true && value < uintBoundaries[0]) {
-    throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`);
-  }
-  return value;
-}
-function readUint16(data, offset, options) {
-  assertEnoughData(data, offset, 2);
-  const value = data[offset] << 8 | data[offset + 1];
-  if (options.strict === true && value < uintBoundaries[1]) {
-    throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`);
-  }
-  return value;
-}
-function readUint32(data, offset, options) {
-  assertEnoughData(data, offset, 4);
-  const value = data[offset] * 16777216 + (data[offset + 1] << 16) + (data[offset + 2] << 8) + data[offset + 3];
-  if (options.strict === true && value < uintBoundaries[2]) {
-    throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`);
-  }
-  return value;
-}
-function readUint64(data, offset, options) {
-  assertEnoughData(data, offset, 8);
-  const hi = data[offset] * 16777216 + (data[offset + 1] << 16) + (data[offset + 2] << 8) + data[offset + 3];
-  const lo = data[offset + 4] * 16777216 + (data[offset + 5] << 16) + (data[offset + 6] << 8) + data[offset + 7];
-  const value = (BigInt(hi) << BigInt(32)) + BigInt(lo);
-  if (options.strict === true && value < uintBoundaries[3]) {
-    throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`);
-  }
-  if (value <= Number.MAX_SAFE_INTEGER) {
-    return Number(value);
-  }
-  if (options.allowBigInt === true) {
-    return value;
-  }
-  throw new Error(`${decodeErrPrefix} integers outside of the safe integer range are not supported`);
-}
-function decodeUint8(data, pos, _minor, options) {
-  return new Token(Type.uint, readUint8(data, pos + 1, options), 2);
-}
-function decodeUint16(data, pos, _minor, options) {
-  return new Token(Type.uint, readUint16(data, pos + 1, options), 3);
-}
-function decodeUint32(data, pos, _minor, options) {
-  return new Token(Type.uint, readUint32(data, pos + 1, options), 5);
-}
-function decodeUint64(data, pos, _minor, options) {
-  return new Token(Type.uint, readUint64(data, pos + 1, options), 9);
-}
-function encodeUint(buf2, token) {
-  return encodeUintValue(buf2, 0, token.value);
-}
-function encodeUintValue(buf2, major, uint) {
-  if (uint < uintBoundaries[0]) {
-    const nuint = Number(uint);
-    buf2.push([major | nuint]);
-  } else if (uint < uintBoundaries[1]) {
-    const nuint = Number(uint);
-    buf2.push([major | 24, nuint]);
-  } else if (uint < uintBoundaries[2]) {
-    const nuint = Number(uint);
-    buf2.push([major | 25, nuint >>> 8, nuint & 255]);
-  } else if (uint < uintBoundaries[3]) {
-    const nuint = Number(uint);
-    buf2.push([major | 26, nuint >>> 24 & 255, nuint >>> 16 & 255, nuint >>> 8 & 255, nuint & 255]);
-  } else {
-    const buint = BigInt(uint);
-    if (buint < uintBoundaries[4]) {
-      const set = [major | 27, 0, 0, 0, 0, 0, 0, 0];
-      let lo = Number(buint & BigInt(4294967295));
-      let hi = Number(buint >> BigInt(32) & BigInt(4294967295));
-      set[8] = lo & 255;
-      lo = lo >> 8;
-      set[7] = lo & 255;
-      lo = lo >> 8;
-      set[6] = lo & 255;
-      lo = lo >> 8;
-      set[5] = lo & 255;
-      set[4] = hi & 255;
-      hi = hi >> 8;
-      set[3] = hi & 255;
-      hi = hi >> 8;
-      set[2] = hi & 255;
-      hi = hi >> 8;
-      set[1] = hi & 255;
-      buf2.push(set);
-    } else {
-      throw new Error(`${decodeErrPrefix} encountered BigInt larger than allowable range`);
-    }
-  }
-}
-encodeUint.encodedSize = function encodedSize(token) {
-  return encodeUintValue.encodedSize(token.value);
-};
-encodeUintValue.encodedSize = function encodedSize2(uint) {
-  if (uint < uintBoundaries[0]) {
-    return 1;
-  }
-  if (uint < uintBoundaries[1]) {
-    return 2;
-  }
-  if (uint < uintBoundaries[2]) {
-    return 3;
-  }
-  if (uint < uintBoundaries[3]) {
-    return 5;
-  }
-  return 9;
-};
-encodeUint.compareTokens = function compareTokens(tok1, tok2) {
-  return tok1.value < tok2.value ? -1 : tok1.value > tok2.value ? 1 : (
-    /* c8 ignore next */
-    0
-  );
-};
-
-// node_modules/cborg/lib/1negint.js
-function decodeNegint8(data, pos, _minor, options) {
-  return new Token(Type.negint, -1 - readUint8(data, pos + 1, options), 2);
-}
-function decodeNegint16(data, pos, _minor, options) {
-  return new Token(Type.negint, -1 - readUint16(data, pos + 1, options), 3);
-}
-function decodeNegint32(data, pos, _minor, options) {
-  return new Token(Type.negint, -1 - readUint32(data, pos + 1, options), 5);
-}
-var neg1b = BigInt(-1);
-var pos1b = BigInt(1);
-function decodeNegint64(data, pos, _minor, options) {
-  const int = readUint64(data, pos + 1, options);
-  if (typeof int !== "bigint") {
-    const value = -1 - int;
-    if (value >= Number.MIN_SAFE_INTEGER) {
-      return new Token(Type.negint, value, 9);
-    }
-  }
-  if (options.allowBigInt !== true) {
-    throw new Error(`${decodeErrPrefix} integers outside of the safe integer range are not supported`);
-  }
-  return new Token(Type.negint, neg1b - BigInt(int), 9);
-}
-function encodeNegint(buf2, token) {
-  const negint = token.value;
-  const unsigned = typeof negint === "bigint" ? negint * neg1b - pos1b : negint * -1 - 1;
-  encodeUintValue(buf2, token.type.majorEncoded, unsigned);
-}
-encodeNegint.encodedSize = function encodedSize3(token) {
-  const negint = token.value;
-  const unsigned = typeof negint === "bigint" ? negint * neg1b - pos1b : negint * -1 - 1;
-  if (unsigned < uintBoundaries[0]) {
-    return 1;
-  }
-  if (unsigned < uintBoundaries[1]) {
-    return 2;
-  }
-  if (unsigned < uintBoundaries[2]) {
-    return 3;
-  }
-  if (unsigned < uintBoundaries[3]) {
-    return 5;
-  }
-  return 9;
-};
-encodeNegint.compareTokens = function compareTokens2(tok1, tok2) {
-  return tok1.value < tok2.value ? 1 : tok1.value > tok2.value ? -1 : (
-    /* c8 ignore next */
-    0
-  );
-};
-
-// node_modules/cborg/lib/2bytes.js
-function toToken(data, pos, prefix, length4) {
-  assertEnoughData(data, pos, prefix + length4);
-  const buf2 = slice(data, pos + prefix, pos + prefix + length4);
-  return new Token(Type.bytes, buf2, prefix + length4);
-}
-function decodeBytesCompact(data, pos, minor, _options) {
-  return toToken(data, pos, 1, minor);
-}
-function decodeBytes8(data, pos, _minor, options) {
-  return toToken(data, pos, 2, readUint8(data, pos + 1, options));
-}
-function decodeBytes16(data, pos, _minor, options) {
-  return toToken(data, pos, 3, readUint16(data, pos + 1, options));
-}
-function decodeBytes32(data, pos, _minor, options) {
-  return toToken(data, pos, 5, readUint32(data, pos + 1, options));
-}
-function decodeBytes64(data, pos, _minor, options) {
-  const l = readUint64(data, pos + 1, options);
-  if (typeof l === "bigint") {
-    throw new Error(`${decodeErrPrefix} 64-bit integer bytes lengths not supported`);
-  }
-  return toToken(data, pos, 9, l);
-}
-function tokenBytes(token) {
-  if (token.encodedBytes === void 0) {
-    token.encodedBytes = token.type === Type.string ? fromString(token.value) : token.value;
-  }
-  return token.encodedBytes;
-}
-function encodeBytes(buf2, token) {
-  const bytes = tokenBytes(token);
-  encodeUintValue(buf2, token.type.majorEncoded, bytes.length);
-  buf2.push(bytes);
-}
-encodeBytes.encodedSize = function encodedSize4(token) {
-  const bytes = tokenBytes(token);
-  return encodeUintValue.encodedSize(bytes.length) + bytes.length;
-};
-encodeBytes.compareTokens = function compareTokens3(tok1, tok2) {
-  return compareBytes(tokenBytes(tok1), tokenBytes(tok2));
-};
-function compareBytes(b1, b2) {
-  return b1.length < b2.length ? -1 : b1.length > b2.length ? 1 : compare(b1, b2);
-}
-
-// node_modules/cborg/lib/3string.js
-function toToken2(data, pos, prefix, length4, options) {
-  const totLength = prefix + length4;
-  assertEnoughData(data, pos, totLength);
-  const tok = new Token(Type.string, toString(data, pos + prefix, pos + totLength), totLength);
-  if (options.retainStringBytes === true) {
-    tok.byteValue = slice(data, pos + prefix, pos + totLength);
-  }
-  return tok;
-}
-function decodeStringCompact(data, pos, minor, options) {
-  return toToken2(data, pos, 1, minor, options);
-}
-function decodeString8(data, pos, _minor, options) {
-  return toToken2(data, pos, 2, readUint8(data, pos + 1, options), options);
-}
-function decodeString16(data, pos, _minor, options) {
-  return toToken2(data, pos, 3, readUint16(data, pos + 1, options), options);
-}
-function decodeString32(data, pos, _minor, options) {
-  return toToken2(data, pos, 5, readUint32(data, pos + 1, options), options);
-}
-function decodeString64(data, pos, _minor, options) {
-  const l = readUint64(data, pos + 1, options);
-  if (typeof l === "bigint") {
-    throw new Error(`${decodeErrPrefix} 64-bit integer string lengths not supported`);
-  }
-  return toToken2(data, pos, 9, l, options);
-}
-var encodeString = encodeBytes;
-
-// node_modules/cborg/lib/4array.js
-function toToken3(_data, _pos, prefix, length4) {
-  return new Token(Type.array, length4, prefix);
-}
-function decodeArrayCompact(data, pos, minor, _options) {
-  return toToken3(data, pos, 1, minor);
-}
-function decodeArray8(data, pos, _minor, options) {
-  return toToken3(data, pos, 2, readUint8(data, pos + 1, options));
-}
-function decodeArray16(data, pos, _minor, options) {
-  return toToken3(data, pos, 3, readUint16(data, pos + 1, options));
-}
-function decodeArray32(data, pos, _minor, options) {
-  return toToken3(data, pos, 5, readUint32(data, pos + 1, options));
-}
-function decodeArray64(data, pos, _minor, options) {
-  const l = readUint64(data, pos + 1, options);
-  if (typeof l === "bigint") {
-    throw new Error(`${decodeErrPrefix} 64-bit integer array lengths not supported`);
-  }
-  return toToken3(data, pos, 9, l);
-}
-function decodeArrayIndefinite(data, pos, _minor, options) {
-  if (options.allowIndefinite === false) {
-    throw new Error(`${decodeErrPrefix} indefinite length items not allowed`);
-  }
-  return toToken3(data, pos, 1, Infinity);
-}
-function encodeArray(buf2, token) {
-  encodeUintValue(buf2, Type.array.majorEncoded, token.value);
-}
-encodeArray.compareTokens = encodeUint.compareTokens;
-encodeArray.encodedSize = function encodedSize5(token) {
-  return encodeUintValue.encodedSize(token.value);
-};
-
-// node_modules/cborg/lib/5map.js
-function toToken4(_data, _pos, prefix, length4) {
-  return new Token(Type.map, length4, prefix);
-}
-function decodeMapCompact(data, pos, minor, _options) {
-  return toToken4(data, pos, 1, minor);
-}
-function decodeMap8(data, pos, _minor, options) {
-  return toToken4(data, pos, 2, readUint8(data, pos + 1, options));
-}
-function decodeMap16(data, pos, _minor, options) {
-  return toToken4(data, pos, 3, readUint16(data, pos + 1, options));
-}
-function decodeMap32(data, pos, _minor, options) {
-  return toToken4(data, pos, 5, readUint32(data, pos + 1, options));
-}
-function decodeMap64(data, pos, _minor, options) {
-  const l = readUint64(data, pos + 1, options);
-  if (typeof l === "bigint") {
-    throw new Error(`${decodeErrPrefix} 64-bit integer map lengths not supported`);
-  }
-  return toToken4(data, pos, 9, l);
-}
-function decodeMapIndefinite(data, pos, _minor, options) {
-  if (options.allowIndefinite === false) {
-    throw new Error(`${decodeErrPrefix} indefinite length items not allowed`);
-  }
-  return toToken4(data, pos, 1, Infinity);
-}
-function encodeMap(buf2, token) {
-  encodeUintValue(buf2, Type.map.majorEncoded, token.value);
-}
-encodeMap.compareTokens = encodeUint.compareTokens;
-encodeMap.encodedSize = function encodedSize6(token) {
-  return encodeUintValue.encodedSize(token.value);
-};
-
-// node_modules/cborg/lib/6tag.js
-function decodeTagCompact(_data, _pos, minor, _options) {
-  return new Token(Type.tag, minor, 1);
-}
-function decodeTag8(data, pos, _minor, options) {
-  return new Token(Type.tag, readUint8(data, pos + 1, options), 2);
-}
-function decodeTag16(data, pos, _minor, options) {
-  return new Token(Type.tag, readUint16(data, pos + 1, options), 3);
-}
-function decodeTag32(data, pos, _minor, options) {
-  return new Token(Type.tag, readUint32(data, pos + 1, options), 5);
-}
-function decodeTag64(data, pos, _minor, options) {
-  return new Token(Type.tag, readUint64(data, pos + 1, options), 9);
-}
-function encodeTag(buf2, token) {
-  encodeUintValue(buf2, Type.tag.majorEncoded, token.value);
-}
-encodeTag.compareTokens = encodeUint.compareTokens;
-encodeTag.encodedSize = function encodedSize7(token) {
-  return encodeUintValue.encodedSize(token.value);
-};
-
-// node_modules/cborg/lib/7float.js
-var MINOR_FALSE = 20;
-var MINOR_TRUE = 21;
-var MINOR_NULL = 22;
-var MINOR_UNDEFINED = 23;
-function decodeUndefined(_data, _pos, _minor, options) {
-  if (options.allowUndefined === false) {
-    throw new Error(`${decodeErrPrefix} undefined values are not supported`);
-  } else if (options.coerceUndefinedToNull === true) {
-    return new Token(Type.null, null, 1);
-  }
-  return new Token(Type.undefined, void 0, 1);
-}
-function decodeBreak(_data, _pos, _minor, options) {
-  if (options.allowIndefinite === false) {
-    throw new Error(`${decodeErrPrefix} indefinite length items not allowed`);
-  }
-  return new Token(Type.break, void 0, 1);
-}
-function createToken(value, bytes, options) {
-  if (options) {
-    if (options.allowNaN === false && Number.isNaN(value)) {
-      throw new Error(`${decodeErrPrefix} NaN values are not supported`);
-    }
-    if (options.allowInfinity === false && (value === Infinity || value === -Infinity)) {
-      throw new Error(`${decodeErrPrefix} Infinity values are not supported`);
-    }
-  }
-  return new Token(Type.float, value, bytes);
-}
-function decodeFloat16(data, pos, _minor, options) {
-  return createToken(readFloat16(data, pos + 1), 3, options);
-}
-function decodeFloat32(data, pos, _minor, options) {
-  return createToken(readFloat32(data, pos + 1), 5, options);
-}
-function decodeFloat64(data, pos, _minor, options) {
-  return createToken(readFloat64(data, pos + 1), 9, options);
-}
-function encodeFloat(buf2, token, options) {
-  const float = token.value;
-  if (float === false) {
-    buf2.push([Type.float.majorEncoded | MINOR_FALSE]);
-  } else if (float === true) {
-    buf2.push([Type.float.majorEncoded | MINOR_TRUE]);
-  } else if (float === null) {
-    buf2.push([Type.float.majorEncoded | MINOR_NULL]);
-  } else if (float === void 0) {
-    buf2.push([Type.float.majorEncoded | MINOR_UNDEFINED]);
-  } else {
-    let decoded;
-    let success = false;
-    if (!options || options.float64 !== true) {
-      encodeFloat16(float);
-      decoded = readFloat16(ui8a, 1);
-      if (float === decoded || Number.isNaN(float)) {
-        ui8a[0] = 249;
-        buf2.push(ui8a.slice(0, 3));
-        success = true;
-      } else {
-        encodeFloat32(float);
-        decoded = readFloat32(ui8a, 1);
-        if (float === decoded) {
-          ui8a[0] = 250;
-          buf2.push(ui8a.slice(0, 5));
-          success = true;
-        }
-      }
-    }
-    if (!success) {
-      encodeFloat64(float);
-      decoded = readFloat64(ui8a, 1);
-      ui8a[0] = 251;
-      buf2.push(ui8a.slice(0, 9));
-    }
-  }
-}
-encodeFloat.encodedSize = function encodedSize8(token, options) {
-  const float = token.value;
-  if (float === false || float === true || float === null || float === void 0) {
-    return 1;
-  }
-  if (!options || options.float64 !== true) {
-    encodeFloat16(float);
-    let decoded = readFloat16(ui8a, 1);
-    if (float === decoded || Number.isNaN(float)) {
-      return 3;
-    }
-    encodeFloat32(float);
-    decoded = readFloat32(ui8a, 1);
-    if (float === decoded) {
-      return 5;
-    }
-  }
-  return 9;
-};
-var buffer = new ArrayBuffer(9);
-var dataView = new DataView(buffer, 1);
-var ui8a = new Uint8Array(buffer, 0);
-function encodeFloat16(inp) {
-  if (inp === Infinity) {
-    dataView.setUint16(0, 31744, false);
-  } else if (inp === -Infinity) {
-    dataView.setUint16(0, 64512, false);
-  } else if (Number.isNaN(inp)) {
-    dataView.setUint16(0, 32256, false);
-  } else {
-    dataView.setFloat32(0, inp);
-    const valu32 = dataView.getUint32(0);
-    const exponent = (valu32 & 2139095040) >> 23;
-    const mantissa = valu32 & 8388607;
-    if (exponent === 255) {
-      dataView.setUint16(0, 31744, false);
-    } else if (exponent === 0) {
-      dataView.setUint16(0, (inp & 2147483648) >> 16 | mantissa >> 13, false);
-    } else {
-      const logicalExponent = exponent - 127;
-      if (logicalExponent < -24) {
-        dataView.setUint16(0, 0);
-      } else if (logicalExponent < -14) {
-        dataView.setUint16(0, (valu32 & 2147483648) >> 16 | /* sign bit */
-        1 << 24 + logicalExponent, false);
-      } else {
-        dataView.setUint16(0, (valu32 & 2147483648) >> 16 | logicalExponent + 15 << 10 | mantissa >> 13, false);
-      }
-    }
-  }
-}
-function readFloat16(ui8a2, pos) {
-  if (ui8a2.length - pos < 2) {
-    throw new Error(`${decodeErrPrefix} not enough data for float16`);
-  }
-  const half = (ui8a2[pos] << 8) + ui8a2[pos + 1];
-  if (half === 31744) {
-    return Infinity;
-  }
-  if (half === 64512) {
-    return -Infinity;
-  }
-  if (half === 32256) {
-    return NaN;
-  }
-  const exp = half >> 10 & 31;
-  const mant = half & 1023;
-  let val;
-  if (exp === 0) {
-    val = mant * 2 ** -24;
-  } else if (exp !== 31) {
-    val = (mant + 1024) * 2 ** (exp - 25);
-  } else {
-    val = mant === 0 ? Infinity : NaN;
-  }
-  return half & 32768 ? -val : val;
-}
-function encodeFloat32(inp) {
-  dataView.setFloat32(0, inp, false);
-}
-function readFloat32(ui8a2, pos) {
-  if (ui8a2.length - pos < 4) {
-    throw new Error(`${decodeErrPrefix} not enough data for float32`);
-  }
-  const offset = (ui8a2.byteOffset || 0) + pos;
-  return new DataView(ui8a2.buffer, offset, 4).getFloat32(0, false);
-}
-function encodeFloat64(inp) {
-  dataView.setFloat64(0, inp, false);
-}
-function readFloat64(ui8a2, pos) {
-  if (ui8a2.length - pos < 8) {
-    throw new Error(`${decodeErrPrefix} not enough data for float64`);
-  }
-  const offset = (ui8a2.byteOffset || 0) + pos;
-  return new DataView(ui8a2.buffer, offset, 8).getFloat64(0, false);
-}
-encodeFloat.compareTokens = encodeUint.compareTokens;
-
-// node_modules/cborg/lib/jump.js
-function invalidMinor(data, pos, minor) {
-  throw new Error(`${decodeErrPrefix} encountered invalid minor (${minor}) for major ${data[pos] >>> 5}`);
-}
-function errorer(msg) {
-  return () => {
-    throw new Error(`${decodeErrPrefix} ${msg}`);
-  };
-}
-var jump = [];
-for (let i = 0; i <= 23; i++) {
-  jump[i] = invalidMinor;
-}
-jump[24] = decodeUint8;
-jump[25] = decodeUint16;
-jump[26] = decodeUint32;
-jump[27] = decodeUint64;
-jump[28] = invalidMinor;
-jump[29] = invalidMinor;
-jump[30] = invalidMinor;
-jump[31] = invalidMinor;
-for (let i = 32; i <= 55; i++) {
-  jump[i] = invalidMinor;
-}
-jump[56] = decodeNegint8;
-jump[57] = decodeNegint16;
-jump[58] = decodeNegint32;
-jump[59] = decodeNegint64;
-jump[60] = invalidMinor;
-jump[61] = invalidMinor;
-jump[62] = invalidMinor;
-jump[63] = invalidMinor;
-for (let i = 64; i <= 87; i++) {
-  jump[i] = decodeBytesCompact;
-}
-jump[88] = decodeBytes8;
-jump[89] = decodeBytes16;
-jump[90] = decodeBytes32;
-jump[91] = decodeBytes64;
-jump[92] = invalidMinor;
-jump[93] = invalidMinor;
-jump[94] = invalidMinor;
-jump[95] = errorer("indefinite length bytes/strings are not supported");
-for (let i = 96; i <= 119; i++) {
-  jump[i] = decodeStringCompact;
-}
-jump[120] = decodeString8;
-jump[121] = decodeString16;
-jump[122] = decodeString32;
-jump[123] = decodeString64;
-jump[124] = invalidMinor;
-jump[125] = invalidMinor;
-jump[126] = invalidMinor;
-jump[127] = errorer("indefinite length bytes/strings are not supported");
-for (let i = 128; i <= 151; i++) {
-  jump[i] = decodeArrayCompact;
-}
-jump[152] = decodeArray8;
-jump[153] = decodeArray16;
-jump[154] = decodeArray32;
-jump[155] = decodeArray64;
-jump[156] = invalidMinor;
-jump[157] = invalidMinor;
-jump[158] = invalidMinor;
-jump[159] = decodeArrayIndefinite;
-for (let i = 160; i <= 183; i++) {
-  jump[i] = decodeMapCompact;
-}
-jump[184] = decodeMap8;
-jump[185] = decodeMap16;
-jump[186] = decodeMap32;
-jump[187] = decodeMap64;
-jump[188] = invalidMinor;
-jump[189] = invalidMinor;
-jump[190] = invalidMinor;
-jump[191] = decodeMapIndefinite;
-for (let i = 192; i <= 215; i++) {
-  jump[i] = decodeTagCompact;
-}
-jump[216] = decodeTag8;
-jump[217] = decodeTag16;
-jump[218] = decodeTag32;
-jump[219] = decodeTag64;
-jump[220] = invalidMinor;
-jump[221] = invalidMinor;
-jump[222] = invalidMinor;
-jump[223] = invalidMinor;
-for (let i = 224; i <= 243; i++) {
-  jump[i] = errorer("simple values are not supported");
-}
-jump[244] = invalidMinor;
-jump[245] = invalidMinor;
-jump[246] = invalidMinor;
-jump[247] = decodeUndefined;
-jump[248] = errorer("simple values are not supported");
-jump[249] = decodeFloat16;
-jump[250] = decodeFloat32;
-jump[251] = decodeFloat64;
-jump[252] = invalidMinor;
-jump[253] = invalidMinor;
-jump[254] = invalidMinor;
-jump[255] = decodeBreak;
-var quick = [];
-for (let i = 0; i < 24; i++) {
-  quick[i] = new Token(Type.uint, i, 1);
-}
-for (let i = -1; i >= -24; i--) {
-  quick[31 - i] = new Token(Type.negint, i, 1);
-}
-quick[64] = new Token(Type.bytes, new Uint8Array(0), 1);
-quick[96] = new Token(Type.string, "", 1);
-quick[128] = new Token(Type.array, 0, 1);
-quick[160] = new Token(Type.map, 0, 1);
-quick[244] = new Token(Type.false, false, 1);
-quick[245] = new Token(Type.true, true, 1);
-quick[246] = new Token(Type.null, null, 1);
-function quickEncodeToken(token) {
-  switch (token.type) {
-    case Type.false:
-      return fromArray([244]);
-    case Type.true:
-      return fromArray([245]);
-    case Type.null:
-      return fromArray([246]);
-    case Type.bytes:
-      if (!token.value.length) {
-        return fromArray([64]);
-      }
-      return;
-    case Type.string:
-      if (token.value === "") {
-        return fromArray([96]);
-      }
-      return;
-    case Type.array:
-      if (token.value === 0) {
-        return fromArray([128]);
-      }
-      return;
-    case Type.map:
-      if (token.value === 0) {
-        return fromArray([160]);
-      }
-      return;
-    case Type.uint:
-      if (token.value < 24) {
-        return fromArray([Number(token.value)]);
-      }
-      return;
-    case Type.negint:
-      if (token.value >= -24) {
-        return fromArray([31 - Number(token.value)]);
-      }
-  }
-}
-
-// node_modules/cborg/lib/encode.js
-var defaultEncodeOptions = {
-  float64: false,
-  mapSorter,
-  quickEncodeToken
-};
-function makeCborEncoders() {
-  const encoders = [];
-  encoders[Type.uint.major] = encodeUint;
-  encoders[Type.negint.major] = encodeNegint;
-  encoders[Type.bytes.major] = encodeBytes;
-  encoders[Type.string.major] = encodeString;
-  encoders[Type.array.major] = encodeArray;
-  encoders[Type.map.major] = encodeMap;
-  encoders[Type.tag.major] = encodeTag;
-  encoders[Type.float.major] = encodeFloat;
-  return encoders;
-}
-var cborEncoders = makeCborEncoders();
-var buf = new Bl();
-var Ref = class _Ref {
-  /**
-   * @param {object|any[]} obj
-   * @param {Reference|undefined} parent
-   */
-  constructor(obj, parent) {
-    this.obj = obj;
-    this.parent = parent;
-  }
-  /**
-   * @param {object|any[]} obj
-   * @returns {boolean}
-   */
-  includes(obj) {
-    let p = this;
-    do {
-      if (p.obj === obj) {
-        return true;
-      }
-    } while (p = p.parent);
-    return false;
-  }
-  /**
-   * @param {Reference|undefined} stack
-   * @param {object|any[]} obj
-   * @returns {Reference}
-   */
-  static createCheck(stack, obj) {
-    if (stack && stack.includes(obj)) {
-      throw new Error(`${encodeErrPrefix} object contains circular references`);
-    }
-    return new _Ref(obj, stack);
-  }
-};
-var simpleTokens = {
-  null: new Token(Type.null, null),
-  undefined: new Token(Type.undefined, void 0),
-  true: new Token(Type.true, true),
-  false: new Token(Type.false, false),
-  emptyArray: new Token(Type.array, 0),
-  emptyMap: new Token(Type.map, 0)
-};
-var typeEncoders = {
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  number(obj, _typ, _options, _refStack) {
-    if (!Number.isInteger(obj) || !Number.isSafeInteger(obj)) {
-      return new Token(Type.float, obj);
-    } else if (obj >= 0) {
-      return new Token(Type.uint, obj);
-    } else {
-      return new Token(Type.negint, obj);
-    }
-  },
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  bigint(obj, _typ, _options, _refStack) {
-    if (obj >= BigInt(0)) {
-      return new Token(Type.uint, obj);
-    } else {
-      return new Token(Type.negint, obj);
-    }
-  },
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  Uint8Array(obj, _typ, _options, _refStack) {
-    return new Token(Type.bytes, obj);
-  },
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  string(obj, _typ, _options, _refStack) {
-    return new Token(Type.string, obj);
-  },
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  boolean(obj, _typ, _options, _refStack) {
-    return obj ? simpleTokens.true : simpleTokens.false;
-  },
-  /**
-   * @param {any} _obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  null(_obj, _typ, _options, _refStack) {
-    return simpleTokens.null;
-  },
-  /**
-   * @param {any} _obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  undefined(_obj, _typ, _options, _refStack) {
-    return simpleTokens.undefined;
-  },
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  ArrayBuffer(obj, _typ, _options, _refStack) {
-    return new Token(Type.bytes, new Uint8Array(obj));
-  },
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  DataView(obj, _typ, _options, _refStack) {
-    return new Token(Type.bytes, new Uint8Array(obj.buffer, obj.byteOffset, obj.byteLength));
-  },
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} options
-   * @param {Reference} [refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  Array(obj, _typ, options, refStack) {
-    if (!obj.length) {
-      if (options.addBreakTokens === true) {
-        return [simpleTokens.emptyArray, new Token(Type.break)];
-      }
-      return simpleTokens.emptyArray;
-    }
-    refStack = Ref.createCheck(refStack, obj);
-    const entries = [];
-    let i = 0;
-    for (const e of obj) {
-      entries[i++] = objectToTokens(e, options, refStack);
-    }
-    if (options.addBreakTokens) {
-      return [new Token(Type.array, obj.length), entries, new Token(Type.break)];
-    }
-    return [new Token(Type.array, obj.length), entries];
-  },
-  /**
-   * @param {any} obj
-   * @param {string} typ
-   * @param {EncodeOptions} options
-   * @param {Reference} [refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  Object(obj, typ, options, refStack) {
-    const isMap = typ !== "Object";
-    const keys = isMap ? obj.keys() : Object.keys(obj);
-    const length4 = isMap ? obj.size : keys.length;
-    if (!length4) {
-      if (options.addBreakTokens === true) {
-        return [simpleTokens.emptyMap, new Token(Type.break)];
-      }
-      return simpleTokens.emptyMap;
-    }
-    refStack = Ref.createCheck(refStack, obj);
-    const entries = [];
-    let i = 0;
-    for (const key of keys) {
-      entries[i++] = [
-        objectToTokens(key, options, refStack),
-        objectToTokens(isMap ? obj.get(key) : obj[key], options, refStack)
-      ];
-    }
-    sortMapEntries(entries, options);
-    if (options.addBreakTokens) {
-      return [new Token(Type.map, length4), entries, new Token(Type.break)];
-    }
-    return [new Token(Type.map, length4), entries];
-  }
-};
-typeEncoders.Map = typeEncoders.Object;
-typeEncoders.Buffer = typeEncoders.Uint8Array;
-for (const typ of "Uint8Clamped Uint16 Uint32 Int8 Int16 Int32 BigUint64 BigInt64 Float32 Float64".split(" ")) {
-  typeEncoders[`${typ}Array`] = typeEncoders.DataView;
-}
-function objectToTokens(obj, options = {}, refStack) {
-  const typ = is(obj);
-  const customTypeEncoder = options && options.typeEncoders && /** @type {OptionalTypeEncoder} */
-  options.typeEncoders[typ] || typeEncoders[typ];
-  if (typeof customTypeEncoder === "function") {
-    const tokens = customTypeEncoder(obj, typ, options, refStack);
-    if (tokens != null) {
-      return tokens;
-    }
-  }
-  const typeEncoder = typeEncoders[typ];
-  if (!typeEncoder) {
-    throw new Error(`${encodeErrPrefix} unsupported type: ${typ}`);
-  }
-  return typeEncoder(obj, typ, options, refStack);
-}
-function sortMapEntries(entries, options) {
-  if (options.mapSorter) {
-    entries.sort(options.mapSorter);
-  }
-}
-function mapSorter(e1, e2) {
-  const keyToken1 = Array.isArray(e1[0]) ? e1[0][0] : e1[0];
-  const keyToken2 = Array.isArray(e2[0]) ? e2[0][0] : e2[0];
-  if (keyToken1.type !== keyToken2.type) {
-    return keyToken1.type.compare(keyToken2.type);
-  }
-  const major = keyToken1.type.major;
-  const tcmp = cborEncoders[major].compareTokens(keyToken1, keyToken2);
-  if (tcmp === 0) {
-    console.warn("WARNING: complex key types used, CBOR key sorting guarantees are gone");
-  }
-  return tcmp;
-}
-function tokensToEncoded(buf2, tokens, encoders, options) {
-  if (Array.isArray(tokens)) {
-    for (const token of tokens) {
-      tokensToEncoded(buf2, token, encoders, options);
-    }
-  } else {
-    encoders[tokens.type.major](buf2, tokens, options);
-  }
-}
-function encodeCustom(data, encoders, options) {
-  const tokens = objectToTokens(data, options);
-  if (!Array.isArray(tokens) && options.quickEncodeToken) {
-    const quickBytes = options.quickEncodeToken(tokens);
-    if (quickBytes) {
-      return quickBytes;
-    }
-    const encoder = encoders[tokens.type.major];
-    if (encoder.encodedSize) {
-      const size = encoder.encodedSize(tokens, options);
-      const buf2 = new Bl(size);
-      encoder(buf2, tokens, options);
-      if (buf2.chunks.length !== 1) {
-        throw new Error(`Unexpected error: pre-calculated length for ${tokens} was wrong`);
-      }
-      return asU8A(buf2.chunks[0]);
-    }
-  }
-  buf.reset();
-  tokensToEncoded(buf, tokens, encoders, options);
-  return buf.toBytes(true);
-}
-function encode(data, options) {
-  options = Object.assign({}, defaultEncodeOptions, options);
-  return encodeCustom(data, cborEncoders, options);
-}
-
-// node_modules/cborg/lib/decode.js
-var defaultDecodeOptions = {
-  strict: false,
-  allowIndefinite: true,
-  allowUndefined: true,
-  allowBigInt: true
-};
-var Tokeniser = class {
-  /**
-   * @param {Uint8Array} data
-   * @param {DecodeOptions} options
-   */
-  constructor(data, options = {}) {
-    this._pos = 0;
-    this.data = data;
-    this.options = options;
-  }
-  pos() {
-    return this._pos;
-  }
-  done() {
-    return this._pos >= this.data.length;
-  }
-  next() {
-    const byt = this.data[this._pos];
-    let token = quick[byt];
-    if (token === void 0) {
-      const decoder = jump[byt];
-      if (!decoder) {
-        throw new Error(`${decodeErrPrefix} no decoder for major type ${byt >>> 5} (byte 0x${byt.toString(16).padStart(2, "0")})`);
-      }
-      const minor = byt & 31;
-      token = decoder(this.data, this._pos, minor, this.options);
-    }
-    this._pos += token.encodedLength;
-    return token;
-  }
-};
-var DONE = Symbol.for("DONE");
-var BREAK = Symbol.for("BREAK");
-function tokenToArray(token, tokeniser, options) {
-  const arr = [];
-  for (let i = 0; i < token.value; i++) {
-    const value = tokensToObject(tokeniser, options);
-    if (value === BREAK) {
-      if (token.value === Infinity) {
-        break;
-      }
-      throw new Error(`${decodeErrPrefix} got unexpected break to lengthed array`);
-    }
-    if (value === DONE) {
-      throw new Error(`${decodeErrPrefix} found array but not enough entries (got ${i}, expected ${token.value})`);
-    }
-    arr[i] = value;
-  }
-  return arr;
-}
-function tokenToMap(token, tokeniser, options) {
-  const useMaps = options.useMaps === true;
-  const obj = useMaps ? void 0 : {};
-  const m = useMaps ? /* @__PURE__ */ new Map() : void 0;
-  for (let i = 0; i < token.value; i++) {
-    const key = tokensToObject(tokeniser, options);
-    if (key === BREAK) {
-      if (token.value === Infinity) {
-        break;
-      }
-      throw new Error(`${decodeErrPrefix} got unexpected break to lengthed map`);
-    }
-    if (key === DONE) {
-      throw new Error(`${decodeErrPrefix} found map but not enough entries (got ${i} [no key], expected ${token.value})`);
-    }
-    if (useMaps !== true && typeof key !== "string") {
-      throw new Error(`${decodeErrPrefix} non-string keys not supported (got ${typeof key})`);
-    }
-    if (options.rejectDuplicateMapKeys === true) {
-      if (useMaps && m.has(key) || !useMaps && key in obj) {
-        throw new Error(`${decodeErrPrefix} found repeat map key "${key}"`);
-      }
-    }
-    const value = tokensToObject(tokeniser, options);
-    if (value === DONE) {
-      throw new Error(`${decodeErrPrefix} found map but not enough entries (got ${i} [no value], expected ${token.value})`);
-    }
-    if (useMaps) {
-      m.set(key, value);
-    } else {
-      obj[key] = value;
-    }
-  }
-  return useMaps ? m : obj;
-}
-function tokensToObject(tokeniser, options) {
-  if (tokeniser.done()) {
-    return DONE;
-  }
-  const token = tokeniser.next();
-  if (token.type === Type.break) {
-    return BREAK;
-  }
-  if (token.type.terminal) {
-    return token.value;
-  }
-  if (token.type === Type.array) {
-    return tokenToArray(token, tokeniser, options);
-  }
-  if (token.type === Type.map) {
-    return tokenToMap(token, tokeniser, options);
-  }
-  if (token.type === Type.tag) {
-    if (options.tags && typeof options.tags[token.value] === "function") {
-      const tagged = tokensToObject(tokeniser, options);
-      return options.tags[token.value](tagged);
-    }
-    throw new Error(`${decodeErrPrefix} tag not supported (${token.value})`);
-  }
-  throw new Error("unsupported");
-}
-function decodeFirst(data, options) {
-  if (!(data instanceof Uint8Array)) {
-    throw new Error(`${decodeErrPrefix} data to decode must be a Uint8Array`);
-  }
-  options = Object.assign({}, defaultDecodeOptions, options);
-  const tokeniser = options.tokenizer || new Tokeniser(data, options);
-  const decoded = tokensToObject(tokeniser, options);
-  if (decoded === DONE) {
-    throw new Error(`${decodeErrPrefix} did not find any content to decode`);
-  }
-  if (decoded === BREAK) {
-    throw new Error(`${decodeErrPrefix} got unexpected break`);
-  }
-  return [decoded, data.subarray(tokeniser.pos())];
-}
-function decode(data, options) {
-  const [decoded, remainder] = decodeFirst(data, options);
-  if (remainder.length > 0) {
-    throw new Error(`${decodeErrPrefix} too many terminals, data makes no sense`);
-  }
-  return decoded;
-}
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bytes.js
-var empty = new Uint8Array(0);
-function equals(aa, bb) {
-  if (aa === bb)
-    return true;
-  if (aa.byteLength !== bb.byteLength) {
-    return false;
-  }
-  for (let ii = 0; ii < aa.byteLength; ii++) {
-    if (aa[ii] !== bb[ii]) {
-      return false;
-    }
-  }
-  return true;
-}
-function coerce(o) {
-  if (o instanceof Uint8Array && o.constructor.name === "Uint8Array")
-    return o;
-  if (o instanceof ArrayBuffer)
-    return new Uint8Array(o);
-  if (ArrayBuffer.isView(o)) {
-    return new Uint8Array(o.buffer, o.byteOffset, o.byteLength);
-  }
-  throw new Error("Unknown type, must be binary type");
-}
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/vendor/base-x.js
-function base(ALPHABET, name4) {
-  if (ALPHABET.length >= 255) {
-    throw new TypeError("Alphabet too long");
-  }
-  var BASE_MAP = new Uint8Array(256);
-  for (var j = 0; j < BASE_MAP.length; j++) {
-    BASE_MAP[j] = 255;
-  }
-  for (var i = 0; i < ALPHABET.length; i++) {
-    var x = ALPHABET.charAt(i);
-    var xc = x.charCodeAt(0);
-    if (BASE_MAP[xc] !== 255) {
-      throw new TypeError(x + " is ambiguous");
-    }
-    BASE_MAP[xc] = i;
-  }
-  var BASE = ALPHABET.length;
-  var LEADER = ALPHABET.charAt(0);
-  var FACTOR = Math.log(BASE) / Math.log(256);
-  var iFACTOR = Math.log(256) / Math.log(BASE);
-  function encode12(source) {
-    if (source instanceof Uint8Array)
-      ;
-    else if (ArrayBuffer.isView(source)) {
-      source = new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
-    } else if (Array.isArray(source)) {
-      source = Uint8Array.from(source);
-    }
-    if (!(source instanceof Uint8Array)) {
-      throw new TypeError("Expected Uint8Array");
-    }
-    if (source.length === 0) {
-      return "";
-    }
-    var zeroes = 0;
-    var length4 = 0;
-    var pbegin = 0;
-    var pend = source.length;
-    while (pbegin !== pend && source[pbegin] === 0) {
-      pbegin++;
-      zeroes++;
-    }
-    var size = (pend - pbegin) * iFACTOR + 1 >>> 0;
-    var b58 = new Uint8Array(size);
-    while (pbegin !== pend) {
-      var carry = source[pbegin];
-      var i2 = 0;
-      for (var it1 = size - 1; (carry !== 0 || i2 < length4) && it1 !== -1; it1--, i2++) {
-        carry += 256 * b58[it1] >>> 0;
-        b58[it1] = carry % BASE >>> 0;
-        carry = carry / BASE >>> 0;
-      }
-      if (carry !== 0) {
-        throw new Error("Non-zero carry");
-      }
-      length4 = i2;
-      pbegin++;
-    }
-    var it2 = size - length4;
-    while (it2 !== size && b58[it2] === 0) {
-      it2++;
-    }
-    var str = LEADER.repeat(zeroes);
-    for (; it2 < size; ++it2) {
-      str += ALPHABET.charAt(b58[it2]);
-    }
-    return str;
-  }
-  function decodeUnsafe(source) {
-    if (typeof source !== "string") {
-      throw new TypeError("Expected String");
-    }
-    if (source.length === 0) {
-      return new Uint8Array();
-    }
-    var psz = 0;
-    if (source[psz] === " ") {
-      return;
-    }
-    var zeroes = 0;
-    var length4 = 0;
-    while (source[psz] === LEADER) {
-      zeroes++;
-      psz++;
-    }
-    var size = (source.length - psz) * FACTOR + 1 >>> 0;
-    var b256 = new Uint8Array(size);
-    while (source[psz]) {
-      var carry = BASE_MAP[source.charCodeAt(psz)];
-      if (carry === 255) {
-        return;
-      }
-      var i2 = 0;
-      for (var it3 = size - 1; (carry !== 0 || i2 < length4) && it3 !== -1; it3--, i2++) {
-        carry += BASE * b256[it3] >>> 0;
-        b256[it3] = carry % 256 >>> 0;
-        carry = carry / 256 >>> 0;
-      }
-      if (carry !== 0) {
-        throw new Error("Non-zero carry");
-      }
-      length4 = i2;
-      psz++;
-    }
-    if (source[psz] === " ") {
-      return;
-    }
-    var it4 = size - length4;
-    while (it4 !== size && b256[it4] === 0) {
-      it4++;
-    }
-    var vch = new Uint8Array(zeroes + (size - it4));
-    var j2 = zeroes;
-    while (it4 !== size) {
-      vch[j2++] = b256[it4++];
-    }
-    return vch;
-  }
-  function decode15(string2) {
-    var buffer2 = decodeUnsafe(string2);
-    if (buffer2) {
-      return buffer2;
-    }
-    throw new Error(`Non-${name4} character`);
-  }
-  return {
-    encode: encode12,
-    decodeUnsafe,
-    decode: decode15
-  };
-}
-var src = base;
-var _brrp__multiformats_scope_baseX = src;
-var base_x_default = _brrp__multiformats_scope_baseX;
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bases/base.js
-var Encoder = class {
-  name;
-  prefix;
-  baseEncode;
-  constructor(name4, prefix, baseEncode) {
-    this.name = name4;
-    this.prefix = prefix;
-    this.baseEncode = baseEncode;
-  }
-  encode(bytes) {
-    if (bytes instanceof Uint8Array) {
-      return `${this.prefix}${this.baseEncode(bytes)}`;
-    } else {
-      throw Error("Unknown type, must be binary type");
-    }
-  }
-};
-var Decoder = class {
-  name;
-  prefix;
-  baseDecode;
-  prefixCodePoint;
-  constructor(name4, prefix, baseDecode) {
-    this.name = name4;
-    this.prefix = prefix;
-    if (prefix.codePointAt(0) === void 0) {
-      throw new Error("Invalid prefix character");
-    }
-    this.prefixCodePoint = prefix.codePointAt(0);
-    this.baseDecode = baseDecode;
-  }
-  decode(text) {
-    if (typeof text === "string") {
-      if (text.codePointAt(0) !== this.prefixCodePoint) {
-        throw Error(`Unable to decode multibase string ${JSON.stringify(text)}, ${this.name} decoder only supports inputs prefixed with ${this.prefix}`);
-      }
-      return this.baseDecode(text.slice(this.prefix.length));
-    } else {
-      throw Error("Can only multibase decode strings");
-    }
-  }
-  or(decoder) {
-    return or(this, decoder);
-  }
-};
-var ComposedDecoder = class {
-  decoders;
-  constructor(decoders) {
-    this.decoders = decoders;
-  }
-  or(decoder) {
-    return or(this, decoder);
-  }
-  decode(input) {
-    const prefix = input[0];
-    const decoder = this.decoders[prefix];
-    if (decoder != null) {
-      return decoder.decode(input);
-    } else {
-      throw RangeError(`Unable to decode multibase string ${JSON.stringify(input)}, only inputs prefixed with ${Object.keys(this.decoders)} are supported`);
-    }
-  }
-};
-function or(left, right) {
-  return new ComposedDecoder({
-    ...left.decoders ?? { [left.prefix]: left },
-    ...right.decoders ?? { [right.prefix]: right }
-  });
-}
-var Codec = class {
-  name;
-  prefix;
-  baseEncode;
-  baseDecode;
-  encoder;
-  decoder;
-  constructor(name4, prefix, baseEncode, baseDecode) {
-    this.name = name4;
-    this.prefix = prefix;
-    this.baseEncode = baseEncode;
-    this.baseDecode = baseDecode;
-    this.encoder = new Encoder(name4, prefix, baseEncode);
-    this.decoder = new Decoder(name4, prefix, baseDecode);
-  }
-  encode(input) {
-    return this.encoder.encode(input);
-  }
-  decode(input) {
-    return this.decoder.decode(input);
-  }
-};
-function from({ name: name4, prefix, encode: encode12, decode: decode15 }) {
-  return new Codec(name4, prefix, encode12, decode15);
-}
-function baseX({ name: name4, prefix, alphabet: alphabet2 }) {
-  const { encode: encode12, decode: decode15 } = base_x_default(alphabet2, name4);
-  return from({
-    prefix,
-    name: name4,
-    encode: encode12,
-    decode: (text) => coerce(decode15(text))
-  });
-}
-function decode2(string2, alphabet2, bitsPerChar, name4) {
-  const codes = {};
-  for (let i = 0; i < alphabet2.length; ++i) {
-    codes[alphabet2[i]] = i;
-  }
-  let end = string2.length;
-  while (string2[end - 1] === "=") {
-    --end;
-  }
-  const out = new Uint8Array(end * bitsPerChar / 8 | 0);
-  let bits = 0;
-  let buffer2 = 0;
-  let written = 0;
-  for (let i = 0; i < end; ++i) {
-    const value = codes[string2[i]];
-    if (value === void 0) {
-      throw new SyntaxError(`Non-${name4} character`);
-    }
-    buffer2 = buffer2 << bitsPerChar | value;
-    bits += bitsPerChar;
-    if (bits >= 8) {
-      bits -= 8;
-      out[written++] = 255 & buffer2 >> bits;
-    }
-  }
-  if (bits >= bitsPerChar || (255 & buffer2 << 8 - bits) !== 0) {
-    throw new SyntaxError("Unexpected end of data");
-  }
-  return out;
-}
-function encode2(data, alphabet2, bitsPerChar) {
-  const pad = alphabet2[alphabet2.length - 1] === "=";
-  const mask = (1 << bitsPerChar) - 1;
-  let out = "";
-  let bits = 0;
-  let buffer2 = 0;
-  for (let i = 0; i < data.length; ++i) {
-    buffer2 = buffer2 << 8 | data[i];
-    bits += 8;
-    while (bits > bitsPerChar) {
-      bits -= bitsPerChar;
-      out += alphabet2[mask & buffer2 >> bits];
-    }
-  }
-  if (bits !== 0) {
-    out += alphabet2[mask & buffer2 << bitsPerChar - bits];
-  }
-  if (pad) {
-    while ((out.length * bitsPerChar & 7) !== 0) {
-      out += "=";
-    }
-  }
-  return out;
-}
-function rfc4648({ name: name4, prefix, bitsPerChar, alphabet: alphabet2 }) {
-  return from({
-    prefix,
-    name: name4,
-    encode(input) {
-      return encode2(input, alphabet2, bitsPerChar);
-    },
-    decode(input) {
-      return decode2(input, alphabet2, bitsPerChar, name4);
-    }
-  });
-}
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bases/base32.js
-var base32 = rfc4648({
-  prefix: "b",
-  name: "base32",
-  alphabet: "abcdefghijklmnopqrstuvwxyz234567",
-  bitsPerChar: 5
-});
-var base32upper = rfc4648({
-  prefix: "B",
-  name: "base32upper",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567",
-  bitsPerChar: 5
-});
-var base32pad = rfc4648({
-  prefix: "c",
-  name: "base32pad",
-  alphabet: "abcdefghijklmnopqrstuvwxyz234567=",
-  bitsPerChar: 5
-});
-var base32padupper = rfc4648({
-  prefix: "C",
-  name: "base32padupper",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=",
-  bitsPerChar: 5
-});
-var base32hex = rfc4648({
-  prefix: "v",
-  name: "base32hex",
-  alphabet: "0123456789abcdefghijklmnopqrstuv",
-  bitsPerChar: 5
-});
-var base32hexupper = rfc4648({
-  prefix: "V",
-  name: "base32hexupper",
-  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV",
-  bitsPerChar: 5
-});
-var base32hexpad = rfc4648({
-  prefix: "t",
-  name: "base32hexpad",
-  alphabet: "0123456789abcdefghijklmnopqrstuv=",
-  bitsPerChar: 5
-});
-var base32hexpadupper = rfc4648({
-  prefix: "T",
-  name: "base32hexpadupper",
-  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV=",
-  bitsPerChar: 5
-});
-var base32z = rfc4648({
-  prefix: "h",
-  name: "base32z",
-  alphabet: "ybndrfg8ejkmcpqxot1uwisza345h769",
-  bitsPerChar: 5
-});
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bases/base58.js
-var base58btc = baseX({
-  name: "base58btc",
-  prefix: "z",
-  alphabet: "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
-});
-var base58flickr = baseX({
-  name: "base58flickr",
-  prefix: "Z",
-  alphabet: "123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ"
-});
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/vendor/varint.js
-var encode_1 = encode3;
-var MSB = 128;
-var REST = 127;
-var MSBALL = ~REST;
-var INT = Math.pow(2, 31);
-function encode3(num, out, offset) {
-  out = out || [];
-  offset = offset || 0;
-  var oldOffset = offset;
-  while (num >= INT) {
-    out[offset++] = num & 255 | MSB;
-    num /= 128;
-  }
-  while (num & MSBALL) {
-    out[offset++] = num & 255 | MSB;
-    num >>>= 7;
-  }
-  out[offset] = num | 0;
-  encode3.bytes = offset - oldOffset + 1;
-  return out;
-}
-var decode3 = read;
-var MSB$1 = 128;
-var REST$1 = 127;
-function read(buf2, offset) {
-  var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf2.length;
-  do {
-    if (counter >= l) {
-      read.bytes = 0;
-      throw new RangeError("Could not decode varint");
-    }
-    b = buf2[counter++];
-    res += shift < 28 ? (b & REST$1) << shift : (b & REST$1) * Math.pow(2, shift);
-    shift += 7;
-  } while (b >= MSB$1);
-  read.bytes = counter - offset;
-  return res;
-}
-var N1 = Math.pow(2, 7);
-var N2 = Math.pow(2, 14);
-var N3 = Math.pow(2, 21);
-var N4 = Math.pow(2, 28);
-var N5 = Math.pow(2, 35);
-var N6 = Math.pow(2, 42);
-var N7 = Math.pow(2, 49);
-var N8 = Math.pow(2, 56);
-var N9 = Math.pow(2, 63);
-var length = function(value) {
-  return value < N1 ? 1 : value < N2 ? 2 : value < N3 ? 3 : value < N4 ? 4 : value < N5 ? 5 : value < N6 ? 6 : value < N7 ? 7 : value < N8 ? 8 : value < N9 ? 9 : 10;
-};
-var varint = {
-  encode: encode_1,
-  decode: decode3,
-  encodingLength: length
-};
-var _brrp_varint = varint;
-var varint_default = _brrp_varint;
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/varint.js
-function decode4(data, offset = 0) {
-  const code5 = varint_default.decode(data, offset);
-  return [code5, varint_default.decode.bytes];
-}
-function encodeTo(int, target, offset = 0) {
-  varint_default.encode(int, target, offset);
-  return target;
-}
-function encodingLength(int) {
-  return varint_default.encodingLength(int);
-}
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/hashes/digest.js
-function create(code5, digest2) {
-  const size = digest2.byteLength;
-  const sizeOffset = encodingLength(code5);
-  const digestOffset = sizeOffset + encodingLength(size);
-  const bytes = new Uint8Array(digestOffset + size);
-  encodeTo(code5, bytes, 0);
-  encodeTo(size, bytes, sizeOffset);
-  bytes.set(digest2, digestOffset);
-  return new Digest(code5, size, digest2, bytes);
-}
-function decode5(multihash) {
-  const bytes = coerce(multihash);
-  const [code5, sizeOffset] = decode4(bytes);
-  const [size, digestOffset] = decode4(bytes.subarray(sizeOffset));
-  const digest2 = bytes.subarray(sizeOffset + digestOffset);
-  if (digest2.byteLength !== size) {
-    throw new Error("Incorrect length");
-  }
-  return new Digest(code5, size, digest2, bytes);
-}
-function equals2(a, b) {
-  if (a === b) {
-    return true;
-  } else {
-    const data = b;
-    return a.code === data.code && a.size === data.size && data.bytes instanceof Uint8Array && equals(a.bytes, data.bytes);
-  }
-}
-var Digest = class {
-  code;
-  size;
-  digest;
-  bytes;
-  /**
-   * Creates a multihash digest.
-   */
-  constructor(code5, size, digest2, bytes) {
-    this.code = code5;
-    this.size = size;
-    this.digest = digest2;
-    this.bytes = bytes;
-  }
-};
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/cid.js
-function format(link, base3) {
-  const { bytes, version } = link;
-  switch (version) {
-    case 0:
-      return toStringV0(bytes, baseCache(link), base3 ?? base58btc.encoder);
-    default:
-      return toStringV1(bytes, baseCache(link), base3 ?? base32.encoder);
-  }
-}
-var cache = /* @__PURE__ */ new WeakMap();
-function baseCache(cid) {
-  const baseCache3 = cache.get(cid);
-  if (baseCache3 == null) {
-    const baseCache4 = /* @__PURE__ */ new Map();
-    cache.set(cid, baseCache4);
-    return baseCache4;
-  }
-  return baseCache3;
-}
-var CID = class _CID {
-  code;
-  version;
-  multihash;
-  bytes;
-  "/";
-  /**
-   * @param version - Version of the CID
-   * @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
-   * @param multihash - (Multi)hash of the of the content.
-   */
-  constructor(version, code5, multihash, bytes) {
-    this.code = code5;
-    this.version = version;
-    this.multihash = multihash;
-    this.bytes = bytes;
-    this["/"] = bytes;
-  }
-  /**
-   * Signalling `cid.asCID === cid` has been replaced with `cid['/'] === cid.bytes`
-   * please either use `CID.asCID(cid)` or switch to new signalling mechanism
-   *
-   * @deprecated
-   */
-  get asCID() {
-    return this;
-  }
-  // ArrayBufferView
-  get byteOffset() {
-    return this.bytes.byteOffset;
-  }
-  // ArrayBufferView
-  get byteLength() {
-    return this.bytes.byteLength;
-  }
-  toV0() {
-    switch (this.version) {
-      case 0: {
-        return this;
-      }
-      case 1: {
-        const { code: code5, multihash } = this;
-        if (code5 !== DAG_PB_CODE) {
-          throw new Error("Cannot convert a non dag-pb CID to CIDv0");
-        }
-        if (multihash.code !== SHA_256_CODE) {
-          throw new Error("Cannot convert non sha2-256 multihash CID to CIDv0");
-        }
-        return _CID.createV0(multihash);
-      }
-      default: {
-        throw Error(`Can not convert CID version ${this.version} to version 0. This is a bug please report`);
-      }
-    }
-  }
-  toV1() {
-    switch (this.version) {
-      case 0: {
-        const { code: code5, digest: digest2 } = this.multihash;
-        const multihash = create(code5, digest2);
-        return _CID.createV1(this.code, multihash);
-      }
-      case 1: {
-        return this;
-      }
-      default: {
-        throw Error(`Can not convert CID version ${this.version} to version 1. This is a bug please report`);
-      }
-    }
-  }
-  equals(other) {
-    return _CID.equals(this, other);
-  }
-  static equals(self, other) {
-    const unknown = other;
-    return unknown != null && self.code === unknown.code && self.version === unknown.version && equals2(self.multihash, unknown.multihash);
-  }
-  toString(base3) {
-    return format(this, base3);
-  }
-  toJSON() {
-    return { "/": format(this) };
-  }
-  link() {
-    return this;
-  }
-  [Symbol.toStringTag] = "CID";
-  // Legacy
-  [Symbol.for("nodejs.util.inspect.custom")]() {
-    return `CID(${this.toString()})`;
-  }
-  /**
-   * Takes any input `value` and returns a `CID` instance if it was
-   * a `CID` otherwise returns `null`. If `value` is instanceof `CID`
-   * it will return value back. If `value` is not instance of this CID
-   * class, but is compatible CID it will return new instance of this
-   * `CID` class. Otherwise returns null.
-   *
-   * This allows two different incompatible versions of CID library to
-   * co-exist and interop as long as binary interface is compatible.
-   */
-  static asCID(input) {
-    if (input == null) {
-      return null;
-    }
-    const value = input;
-    if (value instanceof _CID) {
-      return value;
-    } else if (value["/"] != null && value["/"] === value.bytes || value.asCID === value) {
-      const { version, code: code5, multihash, bytes } = value;
-      return new _CID(version, code5, multihash, bytes ?? encodeCID(version, code5, multihash.bytes));
-    } else if (value[cidSymbol] === true) {
-      const { version, multihash, code: code5 } = value;
-      const digest2 = decode5(multihash);
-      return _CID.create(version, code5, digest2);
-    } else {
-      return null;
-    }
-  }
-  /**
-   * @param version - Version of the CID
-   * @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
-   * @param digest - (Multi)hash of the of the content.
-   */
-  static create(version, code5, digest2) {
-    if (typeof code5 !== "number") {
-      throw new Error("String codecs are no longer supported");
-    }
-    if (!(digest2.bytes instanceof Uint8Array)) {
-      throw new Error("Invalid digest");
-    }
-    switch (version) {
-      case 0: {
-        if (code5 !== DAG_PB_CODE) {
-          throw new Error(`Version 0 CID must use dag-pb (code: ${DAG_PB_CODE}) block encoding`);
-        } else {
-          return new _CID(version, code5, digest2, digest2.bytes);
-        }
-      }
-      case 1: {
-        const bytes = encodeCID(version, code5, digest2.bytes);
-        return new _CID(version, code5, digest2, bytes);
-      }
-      default: {
-        throw new Error("Invalid version");
-      }
-    }
-  }
-  /**
-   * Simplified version of `create` for CIDv0.
-   */
-  static createV0(digest2) {
-    return _CID.create(0, DAG_PB_CODE, digest2);
-  }
-  /**
-   * Simplified version of `create` for CIDv1.
-   *
-   * @param code - Content encoding format code.
-   * @param digest - Multihash of the content.
-   */
-  static createV1(code5, digest2) {
-    return _CID.create(1, code5, digest2);
-  }
-  /**
-   * Decoded a CID from its binary representation. The byte array must contain
-   * only the CID with no additional bytes.
-   *
-   * An error will be thrown if the bytes provided do not contain a valid
-   * binary representation of a CID.
-   */
-  static decode(bytes) {
-    const [cid, remainder] = _CID.decodeFirst(bytes);
-    if (remainder.length !== 0) {
-      throw new Error("Incorrect length");
-    }
-    return cid;
-  }
-  /**
-   * Decoded a CID from its binary representation at the beginning of a byte
-   * array.
-   *
-   * Returns an array with the first element containing the CID and the second
-   * element containing the remainder of the original byte array. The remainder
-   * will be a zero-length byte array if the provided bytes only contained a
-   * binary CID representation.
-   */
-  static decodeFirst(bytes) {
-    const specs = _CID.inspectBytes(bytes);
-    const prefixSize = specs.size - specs.multihashSize;
-    const multihashBytes = coerce(bytes.subarray(prefixSize, prefixSize + specs.multihashSize));
-    if (multihashBytes.byteLength !== specs.multihashSize) {
-      throw new Error("Incorrect length");
-    }
-    const digestBytes = multihashBytes.subarray(specs.multihashSize - specs.digestSize);
-    const digest2 = new Digest(specs.multihashCode, specs.digestSize, digestBytes, multihashBytes);
-    const cid = specs.version === 0 ? _CID.createV0(digest2) : _CID.createV1(specs.codec, digest2);
-    return [cid, bytes.subarray(specs.size)];
-  }
-  /**
-   * Inspect the initial bytes of a CID to determine its properties.
-   *
-   * Involves decoding up to 4 varints. Typically this will require only 4 to 6
-   * bytes but for larger multicodec code values and larger multihash digest
-   * lengths these varints can be quite large. It is recommended that at least
-   * 10 bytes be made available in the `initialBytes` argument for a complete
-   * inspection.
-   */
-  static inspectBytes(initialBytes) {
-    let offset = 0;
-    const next = () => {
-      const [i, length4] = decode4(initialBytes.subarray(offset));
-      offset += length4;
-      return i;
-    };
-    let version = next();
-    let codec = DAG_PB_CODE;
-    if (version === 18) {
-      version = 0;
-      offset = 0;
-    } else {
-      codec = next();
-    }
-    if (version !== 0 && version !== 1) {
-      throw new RangeError(`Invalid CID version ${version}`);
-    }
-    const prefixSize = offset;
-    const multihashCode = next();
-    const digestSize = next();
-    const size = offset + digestSize;
-    const multihashSize = size - prefixSize;
-    return { version, codec, multihashCode, digestSize, multihashSize, size };
-  }
-  /**
-   * Takes cid in a string representation and creates an instance. If `base`
-   * decoder is not provided will use a default from the configuration. It will
-   * throw an error if encoding of the CID is not compatible with supplied (or
-   * a default decoder).
-   */
-  static parse(source, base3) {
-    const [prefix, bytes] = parseCIDtoBytes(source, base3);
-    const cid = _CID.decode(bytes);
-    if (cid.version === 0 && source[0] !== "Q") {
-      throw Error("Version 0 CID string must not include multibase prefix");
-    }
-    baseCache(cid).set(prefix, source);
-    return cid;
-  }
-};
-function parseCIDtoBytes(source, base3) {
-  switch (source[0]) {
-    case "Q": {
-      const decoder = base3 ?? base58btc;
-      return [
-        base58btc.prefix,
-        decoder.decode(`${base58btc.prefix}${source}`)
-      ];
-    }
-    case base58btc.prefix: {
-      const decoder = base3 ?? base58btc;
-      return [base58btc.prefix, decoder.decode(source)];
-    }
-    case base32.prefix: {
-      const decoder = base3 ?? base32;
-      return [base32.prefix, decoder.decode(source)];
-    }
-    default: {
-      if (base3 == null) {
-        throw Error("To parse non base32 or base58btc encoded CID multibase decoder must be provided");
-      }
-      return [source[0], base3.decode(source)];
-    }
-  }
-}
-function toStringV0(bytes, cache3, base3) {
-  const { prefix } = base3;
-  if (prefix !== base58btc.prefix) {
-    throw Error(`Cannot string encode V0 in ${base3.name} encoding`);
-  }
-  const cid = cache3.get(prefix);
-  if (cid == null) {
-    const cid2 = base3.encode(bytes).slice(1);
-    cache3.set(prefix, cid2);
-    return cid2;
-  } else {
-    return cid;
-  }
-}
-function toStringV1(bytes, cache3, base3) {
-  const { prefix } = base3;
-  const cid = cache3.get(prefix);
-  if (cid == null) {
-    const cid2 = base3.encode(bytes);
-    cache3.set(prefix, cid2);
-    return cid2;
-  } else {
-    return cid;
-  }
-}
-var DAG_PB_CODE = 112;
-var SHA_256_CODE = 18;
-function encodeCID(version, code5, multihash) {
-  const codeOffset = encodingLength(version);
-  const hashOffset = codeOffset + encodingLength(code5);
-  const bytes = new Uint8Array(hashOffset + multihash.byteLength);
-  encodeTo(version, bytes, 0);
-  encodeTo(code5, bytes, codeOffset);
-  bytes.set(multihash, hashOffset);
-  return bytes;
-}
-var cidSymbol = Symbol.for("@ipld/js-cid/CID");
-
-// node_modules/@ipld/dag-cbor/src/index.js
-var CID_CBOR_TAG = 42;
-function cidEncoder(obj) {
-  if (obj.asCID !== obj && obj["/"] !== obj.bytes) {
-    return null;
-  }
-  const cid = CID.asCID(obj);
-  if (!cid) {
-    return null;
-  }
-  const bytes = new Uint8Array(cid.bytes.byteLength + 1);
-  bytes.set(cid.bytes, 1);
-  return [
-    new Token(Type.tag, CID_CBOR_TAG),
-    new Token(Type.bytes, bytes)
-  ];
-}
-function undefinedEncoder() {
-  throw new Error("`undefined` is not supported by the IPLD Data Model and cannot be encoded");
-}
-function numberEncoder(num) {
-  if (Number.isNaN(num)) {
-    throw new Error("`NaN` is not supported by the IPLD Data Model and cannot be encoded");
-  }
-  if (num === Infinity || num === -Infinity) {
-    throw new Error("`Infinity` and `-Infinity` is not supported by the IPLD Data Model and cannot be encoded");
-  }
-  return null;
-}
-var _encodeOptions = {
-  float64: true,
-  typeEncoders: {
-    Object: cidEncoder,
-    undefined: undefinedEncoder,
-    number: numberEncoder
-  }
-};
-var encodeOptions = {
-  ..._encodeOptions,
-  typeEncoders: {
-    ..._encodeOptions.typeEncoders
-  }
-};
-function cidDecoder(bytes) {
-  if (bytes[0] !== 0) {
-    throw new Error("Invalid CID for CBOR tag 42; expected leading 0x00");
-  }
-  return CID.decode(bytes.subarray(1));
-}
-var _decodeOptions = {
-  allowIndefinite: false,
-  coerceUndefinedToNull: true,
-  allowNaN: false,
-  allowInfinity: false,
-  allowBigInt: true,
-  // this will lead to BigInt for ints outside of
-  // safe-integer range, which may surprise users
-  strict: true,
-  useMaps: false,
-  rejectDuplicateMapKeys: true,
-  /** @type {import('cborg').TagDecoder[]} */
-  tags: []
-};
-_decodeOptions.tags[CID_CBOR_TAG] = cidDecoder;
-var decodeOptions = {
-  ..._decodeOptions,
-  tags: _decodeOptions.tags.slice()
-};
-var code = 113;
-var encode4 = (node) => encode(node, _encodeOptions);
-var decode6 = (data) => decode(data, _decodeOptions);
-
-// node_modules/multiformats/src/bases/base32.js
-var base32_exports = {};
-__export(base32_exports, {
-  base32: () => base322,
-  base32hex: () => base32hex2,
-  base32hexpad: () => base32hexpad2,
-  base32hexpadupper: () => base32hexpadupper2,
-  base32hexupper: () => base32hexupper2,
-  base32pad: () => base32pad2,
-  base32padupper: () => base32padupper2,
-  base32upper: () => base32upper2,
-  base32z: () => base32z2
-});
-
-// node_modules/multiformats/vendor/base-x.js
-function base2(ALPHABET, name4) {
-  if (ALPHABET.length >= 255) {
-    throw new TypeError("Alphabet too long");
-  }
-  var BASE_MAP = new Uint8Array(256);
-  for (var j = 0; j < BASE_MAP.length; j++) {
-    BASE_MAP[j] = 255;
-  }
-  for (var i = 0; i < ALPHABET.length; i++) {
-    var x = ALPHABET.charAt(i);
-    var xc = x.charCodeAt(0);
-    if (BASE_MAP[xc] !== 255) {
-      throw new TypeError(x + " is ambiguous");
-    }
-    BASE_MAP[xc] = i;
-  }
-  var BASE = ALPHABET.length;
-  var LEADER = ALPHABET.charAt(0);
-  var FACTOR = Math.log(BASE) / Math.log(256);
-  var iFACTOR = Math.log(256) / Math.log(BASE);
-  function encode12(source) {
-    if (source instanceof Uint8Array)
-      ;
-    else if (ArrayBuffer.isView(source)) {
-      source = new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
-    } else if (Array.isArray(source)) {
-      source = Uint8Array.from(source);
-    }
-    if (!(source instanceof Uint8Array)) {
-      throw new TypeError("Expected Uint8Array");
-    }
-    if (source.length === 0) {
-      return "";
-    }
-    var zeroes = 0;
-    var length4 = 0;
-    var pbegin = 0;
-    var pend = source.length;
-    while (pbegin !== pend && source[pbegin] === 0) {
-      pbegin++;
-      zeroes++;
-    }
-    var size = (pend - pbegin) * iFACTOR + 1 >>> 0;
-    var b58 = new Uint8Array(size);
-    while (pbegin !== pend) {
-      var carry = source[pbegin];
-      var i2 = 0;
-      for (var it1 = size - 1; (carry !== 0 || i2 < length4) && it1 !== -1; it1--, i2++) {
-        carry += 256 * b58[it1] >>> 0;
-        b58[it1] = carry % BASE >>> 0;
-        carry = carry / BASE >>> 0;
-      }
-      if (carry !== 0) {
-        throw new Error("Non-zero carry");
-      }
-      length4 = i2;
-      pbegin++;
-    }
-    var it2 = size - length4;
-    while (it2 !== size && b58[it2] === 0) {
-      it2++;
-    }
-    var str = LEADER.repeat(zeroes);
-    for (; it2 < size; ++it2) {
-      str += ALPHABET.charAt(b58[it2]);
-    }
-    return str;
-  }
-  function decodeUnsafe(source) {
-    if (typeof source !== "string") {
-      throw new TypeError("Expected String");
-    }
-    if (source.length === 0) {
-      return new Uint8Array();
-    }
-    var psz = 0;
-    if (source[psz] === " ") {
-      return;
-    }
-    var zeroes = 0;
-    var length4 = 0;
-    while (source[psz] === LEADER) {
-      zeroes++;
-      psz++;
-    }
-    var size = (source.length - psz) * FACTOR + 1 >>> 0;
-    var b256 = new Uint8Array(size);
-    while (source[psz]) {
-      var carry = BASE_MAP[source.charCodeAt(psz)];
-      if (carry === 255) {
-        return;
-      }
-      var i2 = 0;
-      for (var it3 = size - 1; (carry !== 0 || i2 < length4) && it3 !== -1; it3--, i2++) {
-        carry += BASE * b256[it3] >>> 0;
-        b256[it3] = carry % 256 >>> 0;
-        carry = carry / 256 >>> 0;
-      }
-      if (carry !== 0) {
-        throw new Error("Non-zero carry");
-      }
-      length4 = i2;
-      psz++;
-    }
-    if (source[psz] === " ") {
-      return;
-    }
-    var it4 = size - length4;
-    while (it4 !== size && b256[it4] === 0) {
-      it4++;
-    }
-    var vch = new Uint8Array(zeroes + (size - it4));
-    var j2 = zeroes;
-    while (it4 !== size) {
-      vch[j2++] = b256[it4++];
-    }
-    return vch;
-  }
-  function decode15(string2) {
-    var buffer2 = decodeUnsafe(string2);
-    if (buffer2) {
-      return buffer2;
-    }
-    throw new Error(`Non-${name4} character`);
-  }
-  return {
-    encode: encode12,
-    decodeUnsafe,
-    decode: decode15
-  };
-}
-var src2 = base2;
-var _brrp__multiformats_scope_baseX2 = src2;
-var base_x_default2 = _brrp__multiformats_scope_baseX2;
-
-// node_modules/multiformats/src/bytes.js
-var bytes_exports2 = {};
-__export(bytes_exports2, {
-  coerce: () => coerce2,
-  empty: () => empty2,
-  equals: () => equals3,
-  fromHex: () => fromHex,
-  fromString: () => fromString2,
-  isBinary: () => isBinary,
-  toHex: () => toHex,
-  toString: () => toString2
-});
-var empty2 = new Uint8Array(0);
-var toHex = (d) => d.reduce((hex, byte) => hex + byte.toString(16).padStart(2, "0"), "");
-var fromHex = (hex) => {
-  const hexes = hex.match(/../g);
-  return hexes ? new Uint8Array(hexes.map((b) => parseInt(b, 16))) : empty2;
-};
-var equals3 = (aa, bb) => {
-  if (aa === bb)
-    return true;
-  if (aa.byteLength !== bb.byteLength) {
-    return false;
-  }
-  for (let ii = 0; ii < aa.byteLength; ii++) {
-    if (aa[ii] !== bb[ii]) {
-      return false;
-    }
-  }
-  return true;
-};
-var coerce2 = (o) => {
-  if (o instanceof Uint8Array && o.constructor.name === "Uint8Array")
-    return o;
-  if (o instanceof ArrayBuffer)
-    return new Uint8Array(o);
-  if (ArrayBuffer.isView(o)) {
-    return new Uint8Array(o.buffer, o.byteOffset, o.byteLength);
-  }
-  throw new Error("Unknown type, must be binary type");
-};
-var isBinary = (o) => o instanceof ArrayBuffer || ArrayBuffer.isView(o);
-var fromString2 = (str) => new TextEncoder().encode(str);
-var toString2 = (b) => new TextDecoder().decode(b);
-
-// node_modules/multiformats/src/bases/base.js
-var Encoder2 = class {
-  /**
-   * @param {Base} name
-   * @param {Prefix} prefix
-   * @param {(bytes:Uint8Array) => string} baseEncode
-   */
-  constructor(name4, prefix, baseEncode) {
-    this.name = name4;
-    this.prefix = prefix;
-    this.baseEncode = baseEncode;
-  }
-  /**
-   * @param {Uint8Array} bytes
-   * @returns {API.Multibase}
-   */
-  encode(bytes) {
-    if (bytes instanceof Uint8Array) {
-      return `${this.prefix}${this.baseEncode(bytes)}`;
-    } else {
-      throw Error("Unknown type, must be binary type");
-    }
-  }
-};
-var Decoder2 = class {
-  /**
-   * @param {Base} name
-   * @param {Prefix} prefix
-   * @param {(text:string) => Uint8Array} baseDecode
-   */
-  constructor(name4, prefix, baseDecode) {
-    this.name = name4;
-    this.prefix = prefix;
-    if (prefix.codePointAt(0) === void 0) {
-      throw new Error("Invalid prefix character");
-    }
-    this.prefixCodePoint = /** @type {number} */
-    prefix.codePointAt(0);
-    this.baseDecode = baseDecode;
-  }
-  /**
-   * @param {string} text
-   */
-  decode(text) {
-    if (typeof text === "string") {
-      if (text.codePointAt(0) !== this.prefixCodePoint) {
-        throw Error(`Unable to decode multibase string ${JSON.stringify(text)}, ${this.name} decoder only supports inputs prefixed with ${this.prefix}`);
-      }
-      return this.baseDecode(text.slice(this.prefix.length));
-    } else {
-      throw Error("Can only multibase decode strings");
-    }
-  }
-  /**
-   * @template {string} OtherPrefix
-   * @param {API.UnibaseDecoder|ComposedDecoder} decoder
-   * @returns {ComposedDecoder}
-   */
-  or(decoder) {
-    return or2(this, decoder);
-  }
-};
-var ComposedDecoder2 = class {
-  /**
-   * @param {Decoders} decoders
-   */
-  constructor(decoders) {
-    this.decoders = decoders;
-  }
-  /**
-   * @template {string} OtherPrefix
-   * @param {API.UnibaseDecoder|ComposedDecoder} decoder
-   * @returns {ComposedDecoder}
-   */
-  or(decoder) {
-    return or2(this, decoder);
-  }
-  /**
-   * @param {string} input
-   * @returns {Uint8Array}
-   */
-  decode(input) {
-    const prefix = (
-      /** @type {Prefix} */
-      input[0]
-    );
-    const decoder = this.decoders[prefix];
-    if (decoder) {
-      return decoder.decode(input);
-    } else {
-      throw RangeError(`Unable to decode multibase string ${JSON.stringify(input)}, only inputs prefixed with ${Object.keys(this.decoders)} are supported`);
-    }
-  }
-};
-var or2 = (left, right) => new ComposedDecoder2(
-  /** @type {Decoders} */
-  {
-    ...left.decoders || { [
-      /** @type API.UnibaseDecoder */
-      left.prefix
-    ]: left },
-    ...right.decoders || { [
-      /** @type API.UnibaseDecoder */
-      right.prefix
-    ]: right }
-  }
-);
-var Codec2 = class {
-  /**
-   * @param {Base} name
-   * @param {Prefix} prefix
-   * @param {(bytes:Uint8Array) => string} baseEncode
-   * @param {(text:string) => Uint8Array} baseDecode
-   */
-  constructor(name4, prefix, baseEncode, baseDecode) {
-    this.name = name4;
-    this.prefix = prefix;
-    this.baseEncode = baseEncode;
-    this.baseDecode = baseDecode;
-    this.encoder = new Encoder2(name4, prefix, baseEncode);
-    this.decoder = new Decoder2(name4, prefix, baseDecode);
-  }
-  /**
-   * @param {Uint8Array} input
-   */
-  encode(input) {
-    return this.encoder.encode(input);
-  }
-  /**
-   * @param {string} input
-   */
-  decode(input) {
-    return this.decoder.decode(input);
-  }
-};
-var from2 = ({ name: name4, prefix, encode: encode12, decode: decode15 }) => new Codec2(name4, prefix, encode12, decode15);
-var baseX2 = ({ prefix, name: name4, alphabet: alphabet2 }) => {
-  const { encode: encode12, decode: decode15 } = base_x_default2(alphabet2, name4);
-  return from2({
-    prefix,
-    name: name4,
-    encode: encode12,
-    /**
-     * @param {string} text
-     */
-    decode: (text) => coerce2(decode15(text))
-  });
-};
-var decode7 = (string2, alphabet2, bitsPerChar, name4) => {
-  const codes = {};
-  for (let i = 0; i < alphabet2.length; ++i) {
-    codes[alphabet2[i]] = i;
-  }
-  let end = string2.length;
-  while (string2[end - 1] === "=") {
-    --end;
-  }
-  const out = new Uint8Array(end * bitsPerChar / 8 | 0);
-  let bits = 0;
-  let buffer2 = 0;
-  let written = 0;
-  for (let i = 0; i < end; ++i) {
-    const value = codes[string2[i]];
-    if (value === void 0) {
-      throw new SyntaxError(`Non-${name4} character`);
-    }
-    buffer2 = buffer2 << bitsPerChar | value;
-    bits += bitsPerChar;
-    if (bits >= 8) {
-      bits -= 8;
-      out[written++] = 255 & buffer2 >> bits;
-    }
-  }
-  if (bits >= bitsPerChar || 255 & buffer2 << 8 - bits) {
-    throw new SyntaxError("Unexpected end of data");
-  }
-  return out;
-};
-var encode5 = (data, alphabet2, bitsPerChar) => {
-  const pad = alphabet2[alphabet2.length - 1] === "=";
-  const mask = (1 << bitsPerChar) - 1;
-  let out = "";
-  let bits = 0;
-  let buffer2 = 0;
-  for (let i = 0; i < data.length; ++i) {
-    buffer2 = buffer2 << 8 | data[i];
-    bits += 8;
-    while (bits > bitsPerChar) {
-      bits -= bitsPerChar;
-      out += alphabet2[mask & buffer2 >> bits];
-    }
-  }
-  if (bits) {
-    out += alphabet2[mask & buffer2 << bitsPerChar - bits];
-  }
-  if (pad) {
-    while (out.length * bitsPerChar & 7) {
-      out += "=";
-    }
-  }
-  return out;
-};
-var rfc46482 = ({ name: name4, prefix, bitsPerChar, alphabet: alphabet2 }) => {
-  return from2({
-    prefix,
-    name: name4,
-    encode(input) {
-      return encode5(input, alphabet2, bitsPerChar);
-    },
-    decode(input) {
-      return decode7(input, alphabet2, bitsPerChar, name4);
-    }
-  });
-};
-
-// node_modules/multiformats/src/bases/base32.js
-var base322 = rfc46482({
-  prefix: "b",
-  name: "base32",
-  alphabet: "abcdefghijklmnopqrstuvwxyz234567",
-  bitsPerChar: 5
-});
-var base32upper2 = rfc46482({
-  prefix: "B",
-  name: "base32upper",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567",
-  bitsPerChar: 5
-});
-var base32pad2 = rfc46482({
-  prefix: "c",
-  name: "base32pad",
-  alphabet: "abcdefghijklmnopqrstuvwxyz234567=",
-  bitsPerChar: 5
-});
-var base32padupper2 = rfc46482({
-  prefix: "C",
-  name: "base32padupper",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=",
-  bitsPerChar: 5
-});
-var base32hex2 = rfc46482({
-  prefix: "v",
-  name: "base32hex",
-  alphabet: "0123456789abcdefghijklmnopqrstuv",
-  bitsPerChar: 5
-});
-var base32hexupper2 = rfc46482({
-  prefix: "V",
-  name: "base32hexupper",
-  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV",
-  bitsPerChar: 5
-});
-var base32hexpad2 = rfc46482({
-  prefix: "t",
-  name: "base32hexpad",
-  alphabet: "0123456789abcdefghijklmnopqrstuv=",
-  bitsPerChar: 5
-});
-var base32hexpadupper2 = rfc46482({
-  prefix: "T",
-  name: "base32hexpadupper",
-  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV=",
-  bitsPerChar: 5
-});
-var base32z2 = rfc46482({
-  prefix: "h",
-  name: "base32z",
-  alphabet: "ybndrfg8ejkmcpqxot1uwisza345h769",
-  bitsPerChar: 5
-});
-
-// node_modules/multiformats/src/bases/base58.js
-var base58_exports = {};
-__export(base58_exports, {
-  base58btc: () => base58btc2,
-  base58flickr: () => base58flickr2
-});
-var base58btc2 = baseX2({
-  name: "base58btc",
-  prefix: "z",
-  alphabet: "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
-});
-var base58flickr2 = baseX2({
-  name: "base58flickr",
-  prefix: "Z",
-  alphabet: "123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ"
-});
-
-// node_modules/multiformats/vendor/varint.js
-var encode_12 = encode6;
-var MSB2 = 128;
-var REST2 = 127;
-var MSBALL2 = ~REST2;
-var INT2 = Math.pow(2, 31);
-function encode6(num, out, offset) {
-  out = out || [];
-  offset = offset || 0;
-  var oldOffset = offset;
-  while (num >= INT2) {
-    out[offset++] = num & 255 | MSB2;
-    num /= 128;
-  }
-  while (num & MSBALL2) {
-    out[offset++] = num & 255 | MSB2;
-    num >>>= 7;
-  }
-  out[offset] = num | 0;
-  encode6.bytes = offset - oldOffset + 1;
-  return out;
-}
-var decode8 = read2;
-var MSB$12 = 128;
-var REST$12 = 127;
-function read2(buf2, offset) {
-  var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf2.length;
-  do {
-    if (counter >= l) {
-      read2.bytes = 0;
-      throw new RangeError("Could not decode varint");
-    }
-    b = buf2[counter++];
-    res += shift < 28 ? (b & REST$12) << shift : (b & REST$12) * Math.pow(2, shift);
-    shift += 7;
-  } while (b >= MSB$12);
-  read2.bytes = counter - offset;
-  return res;
-}
-var N12 = Math.pow(2, 7);
-var N22 = Math.pow(2, 14);
-var N32 = Math.pow(2, 21);
-var N42 = Math.pow(2, 28);
-var N52 = Math.pow(2, 35);
-var N62 = Math.pow(2, 42);
-var N72 = Math.pow(2, 49);
-var N82 = Math.pow(2, 56);
-var N92 = Math.pow(2, 63);
-var length2 = function(value) {
-  return value < N12 ? 1 : value < N22 ? 2 : value < N32 ? 3 : value < N42 ? 4 : value < N52 ? 5 : value < N62 ? 6 : value < N72 ? 7 : value < N82 ? 8 : value < N92 ? 9 : 10;
-};
-var varint2 = {
-  encode: encode_12,
-  decode: decode8,
-  encodingLength: length2
-};
-var _brrp_varint2 = varint2;
-var varint_default2 = _brrp_varint2;
-
-// node_modules/multiformats/src/varint.js
-var decode9 = (data, offset = 0) => {
-  const code5 = varint_default2.decode(data, offset);
-  return [code5, varint_default2.decode.bytes];
-};
-var encodeTo2 = (int, target, offset = 0) => {
-  varint_default2.encode(int, target, offset);
-  return target;
-};
-var encodingLength2 = (int) => {
-  return varint_default2.encodingLength(int);
-};
-
-// node_modules/multiformats/src/hashes/digest.js
-var create2 = (code5, digest2) => {
-  const size = digest2.byteLength;
-  const sizeOffset = encodingLength2(code5);
-  const digestOffset = sizeOffset + encodingLength2(size);
-  const bytes = new Uint8Array(digestOffset + size);
-  encodeTo2(code5, bytes, 0);
-  encodeTo2(size, bytes, sizeOffset);
-  bytes.set(digest2, digestOffset);
-  return new Digest2(code5, size, digest2, bytes);
-};
-var decode10 = (multihash) => {
-  const bytes = coerce2(multihash);
-  const [code5, sizeOffset] = decode9(bytes);
-  const [size, digestOffset] = decode9(bytes.subarray(sizeOffset));
-  const digest2 = bytes.subarray(sizeOffset + digestOffset);
-  if (digest2.byteLength !== size) {
-    throw new Error("Incorrect length");
-  }
-  return new Digest2(code5, size, digest2, bytes);
-};
-var equals4 = (a, b) => {
-  if (a === b) {
-    return true;
-  } else {
-    const data = (
-      /** @type {{code?:unknown, size?:unknown, bytes?:unknown}} */
-      b
-    );
-    return a.code === data.code && a.size === data.size && data.bytes instanceof Uint8Array && equals3(a.bytes, data.bytes);
-  }
-};
-var Digest2 = class {
-  /**
-   * Creates a multihash digest.
-   *
-   * @param {Code} code
-   * @param {Size} size
-   * @param {Uint8Array} digest
-   * @param {Uint8Array} bytes
-   */
-  constructor(code5, size, digest2, bytes) {
-    this.code = code5;
-    this.size = size;
-    this.digest = digest2;
-    this.bytes = bytes;
-  }
-};
-
-// node_modules/multiformats/src/cid.js
-var format2 = (link, base3) => {
-  const { bytes, version } = link;
-  switch (version) {
-    case 0:
-      return toStringV02(
-        bytes,
-        baseCache2(link),
-        /** @type {API.MultibaseEncoder<"z">} */
-        base3 || base58btc2.encoder
-      );
-    default:
-      return toStringV12(
-        bytes,
-        baseCache2(link),
-        /** @type {API.MultibaseEncoder} */
-        base3 || base322.encoder
-      );
-  }
-};
-var cache2 = /* @__PURE__ */ new WeakMap();
-var baseCache2 = (cid) => {
-  const baseCache3 = cache2.get(cid);
-  if (baseCache3 == null) {
-    const baseCache4 = /* @__PURE__ */ new Map();
-    cache2.set(cid, baseCache4);
-    return baseCache4;
-  }
-  return baseCache3;
-};
-var CID2 = class _CID {
-  /**
-   * @param {Version} version - Version of the CID
-   * @param {Format} code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
-   * @param {API.MultihashDigest} multihash - (Multi)hash of the of the content.
-   * @param {Uint8Array} bytes
-   */
-  constructor(version, code5, multihash, bytes) {
-    this.code = code5;
-    this.version = version;
-    this.multihash = multihash;
-    this.bytes = bytes;
-    this["/"] = bytes;
-  }
-  /**
-   * Signalling `cid.asCID === cid` has been replaced with `cid['/'] === cid.bytes`
-   * please either use `CID.asCID(cid)` or switch to new signalling mechanism
-   *
-   * @deprecated
-   */
-  get asCID() {
-    return this;
-  }
-  // ArrayBufferView
-  get byteOffset() {
-    return this.bytes.byteOffset;
-  }
-  // ArrayBufferView
-  get byteLength() {
-    return this.bytes.byteLength;
-  }
-  /**
-   * @returns {CID}
-   */
-  toV0() {
-    switch (this.version) {
-      case 0: {
-        return (
-          /** @type {CID} */
-          this
-        );
-      }
-      case 1: {
-        const { code: code5, multihash } = this;
-        if (code5 !== DAG_PB_CODE2) {
-          throw new Error("Cannot convert a non dag-pb CID to CIDv0");
-        }
-        if (multihash.code !== SHA_256_CODE2) {
-          throw new Error("Cannot convert non sha2-256 multihash CID to CIDv0");
-        }
-        return (
-          /** @type {CID} */
-          _CID.createV0(
-            /** @type {API.MultihashDigest} */
-            multihash
-          )
-        );
-      }
-      default: {
-        throw Error(
-          `Can not convert CID version ${this.version} to version 0. This is a bug please report`
-        );
-      }
-    }
-  }
-  /**
-   * @returns {CID}
-   */
-  toV1() {
-    switch (this.version) {
-      case 0: {
-        const { code: code5, digest: digest2 } = this.multihash;
-        const multihash = create2(code5, digest2);
-        return (
-          /** @type {CID} */
-          _CID.createV1(this.code, multihash)
-        );
-      }
-      case 1: {
-        return (
-          /** @type {CID} */
-          this
-        );
-      }
-      default: {
-        throw Error(
-          `Can not convert CID version ${this.version} to version 1. This is a bug please report`
-        );
-      }
-    }
-  }
-  /**
-   * @param {unknown} other
-   * @returns {other is CID}
-   */
-  equals(other) {
-    return _CID.equals(this, other);
-  }
-  /**
-   * @template {unknown} Data
-   * @template {number} Format
-   * @template {number} Alg
-   * @template {API.Version} Version
-   * @param {API.Link} self
-   * @param {unknown} other
-   * @returns {other is CID}
-   */
-  static equals(self, other) {
-    const unknown = (
-      /** @type {{code?:unknown, version?:unknown, multihash?:unknown}} */
-      other
-    );
-    return unknown && self.code === unknown.code && self.version === unknown.version && equals4(self.multihash, unknown.multihash);
-  }
-  /**
-   * @param {API.MultibaseEncoder} [base]
-   * @returns {string}
-   */
-  toString(base3) {
-    return format2(this, base3);
-  }
-  /**
-   * @returns {API.LinkJSON}
-   */
-  toJSON() {
-    return { "/": format2(this) };
-  }
-  link() {
-    return this;
-  }
-  get [Symbol.toStringTag]() {
-    return "CID";
-  }
-  // Legacy
-  [Symbol.for("nodejs.util.inspect.custom")]() {
-    return `CID(${this.toString()})`;
-  }
-  /**
-   * Takes any input `value` and returns a `CID` instance if it was
-   * a `CID` otherwise returns `null`. If `value` is instanceof `CID`
-   * it will return value back. If `value` is not instance of this CID
-   * class, but is compatible CID it will return new instance of this
-   * `CID` class. Otherwise returns null.
-   *
-   * This allows two different incompatible versions of CID library to
-   * co-exist and interop as long as binary interface is compatible.
-   *
-   * @template {unknown} Data
-   * @template {number} Format
-   * @template {number} Alg
-   * @template {API.Version} Version
-   * @template {unknown} U
-   * @param {API.Link|U} input
-   * @returns {CID|null}
-   */
-  static asCID(input) {
-    if (input == null) {
-      return null;
-    }
-    const value = (
-      /** @type {any} */
-      input
-    );
-    if (value instanceof _CID) {
-      return value;
-    } else if (value["/"] != null && value["/"] === value.bytes || value.asCID === value) {
-      const { version, code: code5, multihash, bytes } = value;
-      return new _CID(
-        version,
-        code5,
-        /** @type {API.MultihashDigest} */
-        multihash,
-        bytes || encodeCID2(version, code5, multihash.bytes)
-      );
-    } else if (value[cidSymbol2] === true) {
-      const { version, multihash, code: code5 } = value;
-      const digest2 = (
-        /** @type {API.MultihashDigest} */
-        decode10(multihash)
-      );
-      return _CID.create(version, code5, digest2);
-    } else {
-      return null;
-    }
-  }
-  /**
-   *
-   * @template {unknown} Data
-   * @template {number} Format
-   * @template {number} Alg
-   * @template {API.Version} Version
-   * @param {Version} version - Version of the CID
-   * @param {Format} code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
-   * @param {API.MultihashDigest} digest - (Multi)hash of the of the content.
-   * @returns {CID}
-   */
-  static create(version, code5, digest2) {
-    if (typeof code5 !== "number") {
-      throw new Error("String codecs are no longer supported");
-    }
-    if (!(digest2.bytes instanceof Uint8Array)) {
-      throw new Error("Invalid digest");
-    }
-    switch (version) {
-      case 0: {
-        if (code5 !== DAG_PB_CODE2) {
-          throw new Error(
-            `Version 0 CID must use dag-pb (code: ${DAG_PB_CODE2}) block encoding`
-          );
-        } else {
-          return new _CID(version, code5, digest2, digest2.bytes);
-        }
-      }
-      case 1: {
-        const bytes = encodeCID2(version, code5, digest2.bytes);
-        return new _CID(version, code5, digest2, bytes);
-      }
-      default: {
-        throw new Error("Invalid version");
-      }
-    }
-  }
-  /**
-   * Simplified version of `create` for CIDv0.
-   *
-   * @template {unknown} [T=unknown]
-   * @param {API.MultihashDigest} digest - Multihash.
-   * @returns {CID}
-   */
-  static createV0(digest2) {
-    return _CID.create(0, DAG_PB_CODE2, digest2);
-  }
-  /**
-   * Simplified version of `create` for CIDv1.
-   *
-   * @template {unknown} Data
-   * @template {number} Code
-   * @template {number} Alg
-   * @param {Code} code - Content encoding format code.
-   * @param {API.MultihashDigest} digest - Miltihash of the content.
-   * @returns {CID}
-   */
-  static createV1(code5, digest2) {
-    return _CID.create(1, code5, digest2);
-  }
-  /**
-   * Decoded a CID from its binary representation. The byte array must contain
-   * only the CID with no additional bytes.
-   *
-   * An error will be thrown if the bytes provided do not contain a valid
-   * binary representation of a CID.
-   *
-   * @template {unknown} Data
-   * @template {number} Code
-   * @template {number} Alg
-   * @template {API.Version} Ver
-   * @param {API.ByteView>} bytes
-   * @returns {CID}
-   */
-  static decode(bytes) {
-    const [cid, remainder] = _CID.decodeFirst(bytes);
-    if (remainder.length) {
-      throw new Error("Incorrect length");
-    }
-    return cid;
-  }
-  /**
-   * Decoded a CID from its binary representation at the beginning of a byte
-   * array.
-   *
-   * Returns an array with the first element containing the CID and the second
-   * element containing the remainder of the original byte array. The remainder
-   * will be a zero-length byte array if the provided bytes only contained a
-   * binary CID representation.
-   *
-   * @template {unknown} T
-   * @template {number} C
-   * @template {number} A
-   * @template {API.Version} V
-   * @param {API.ByteView>} bytes
-   * @returns {[CID, Uint8Array]}
-   */
-  static decodeFirst(bytes) {
-    const specs = _CID.inspectBytes(bytes);
-    const prefixSize = specs.size - specs.multihashSize;
-    const multihashBytes = coerce2(
-      bytes.subarray(prefixSize, prefixSize + specs.multihashSize)
-    );
-    if (multihashBytes.byteLength !== specs.multihashSize) {
-      throw new Error("Incorrect length");
-    }
-    const digestBytes = multihashBytes.subarray(
-      specs.multihashSize - specs.digestSize
-    );
-    const digest2 = new Digest2(
-      specs.multihashCode,
-      specs.digestSize,
-      digestBytes,
-      multihashBytes
-    );
-    const cid = specs.version === 0 ? _CID.createV0(
-      /** @type {API.MultihashDigest} */
-      digest2
-    ) : _CID.createV1(specs.codec, digest2);
-    return [
-      /** @type {CID} */
-      cid,
-      bytes.subarray(specs.size)
-    ];
-  }
-  /**
-   * Inspect the initial bytes of a CID to determine its properties.
-   *
-   * Involves decoding up to 4 varints. Typically this will require only 4 to 6
-   * bytes but for larger multicodec code values and larger multihash digest
-   * lengths these varints can be quite large. It is recommended that at least
-   * 10 bytes be made available in the `initialBytes` argument for a complete
-   * inspection.
-   *
-   * @template {unknown} T
-   * @template {number} C
-   * @template {number} A
-   * @template {API.Version} V
-   * @param {API.ByteView>} initialBytes
-   * @returns {{ version:V, codec:C, multihashCode:A, digestSize:number, multihashSize:number, size:number }}
-   */
-  static inspectBytes(initialBytes) {
-    let offset = 0;
-    const next = () => {
-      const [i, length4] = decode9(initialBytes.subarray(offset));
-      offset += length4;
-      return i;
-    };
-    let version = (
-      /** @type {V} */
-      next()
-    );
-    let codec = (
-      /** @type {C} */
-      DAG_PB_CODE2
-    );
-    if (
-      /** @type {number} */
-      version === 18
-    ) {
-      version = /** @type {V} */
-      0;
-      offset = 0;
-    } else {
-      codec = /** @type {C} */
-      next();
-    }
-    if (version !== 0 && version !== 1) {
-      throw new RangeError(`Invalid CID version ${version}`);
-    }
-    const prefixSize = offset;
-    const multihashCode = (
-      /** @type {A} */
-      next()
-    );
-    const digestSize = next();
-    const size = offset + digestSize;
-    const multihashSize = size - prefixSize;
-    return { version, codec, multihashCode, digestSize, multihashSize, size };
-  }
-  /**
-   * Takes cid in a string representation and creates an instance. If `base`
-   * decoder is not provided will use a default from the configuration. It will
-   * throw an error if encoding of the CID is not compatible with supplied (or
-   * a default decoder).
-   *
-   * @template {string} Prefix
-   * @template {unknown} Data
-   * @template {number} Code
-   * @template {number} Alg
-   * @template {API.Version} Ver
-   * @param {API.ToString, Prefix>} source
-   * @param {API.MultibaseDecoder} [base]
-   * @returns {CID}
-   */
-  static parse(source, base3) {
-    const [prefix, bytes] = parseCIDtoBytes2(source, base3);
-    const cid = _CID.decode(bytes);
-    if (cid.version === 0 && source[0] !== "Q") {
-      throw Error("Version 0 CID string must not include multibase prefix");
-    }
-    baseCache2(cid).set(prefix, source);
-    return cid;
-  }
-};
-var parseCIDtoBytes2 = (source, base3) => {
-  switch (source[0]) {
-    case "Q": {
-      const decoder = base3 || base58btc2;
-      return [
-        /** @type {Prefix} */
-        base58btc2.prefix,
-        decoder.decode(`${base58btc2.prefix}${source}`)
-      ];
-    }
-    case base58btc2.prefix: {
-      const decoder = base3 || base58btc2;
-      return [
-        /** @type {Prefix} */
-        base58btc2.prefix,
-        decoder.decode(source)
-      ];
-    }
-    case base322.prefix: {
-      const decoder = base3 || base322;
-      return [
-        /** @type {Prefix} */
-        base322.prefix,
-        decoder.decode(source)
-      ];
-    }
-    default: {
-      if (base3 == null) {
-        throw Error(
-          "To parse non base32 or base58btc encoded CID multibase decoder must be provided"
-        );
-      }
-      return [
-        /** @type {Prefix} */
-        source[0],
-        base3.decode(source)
-      ];
-    }
-  }
-};
-var toStringV02 = (bytes, cache3, base3) => {
-  const { prefix } = base3;
-  if (prefix !== base58btc2.prefix) {
-    throw Error(`Cannot string encode V0 in ${base3.name} encoding`);
-  }
-  const cid = cache3.get(prefix);
-  if (cid == null) {
-    const cid2 = base3.encode(bytes).slice(1);
-    cache3.set(prefix, cid2);
-    return cid2;
-  } else {
-    return cid;
-  }
-};
-var toStringV12 = (bytes, cache3, base3) => {
-  const { prefix } = base3;
-  const cid = cache3.get(prefix);
-  if (cid == null) {
-    const cid2 = base3.encode(bytes);
-    cache3.set(prefix, cid2);
-    return cid2;
-  } else {
-    return cid;
-  }
-};
-var DAG_PB_CODE2 = 112;
-var SHA_256_CODE2 = 18;
-var encodeCID2 = (version, code5, multihash) => {
-  const codeOffset = encodingLength2(version);
-  const hashOffset = codeOffset + encodingLength2(code5);
-  const bytes = new Uint8Array(hashOffset + multihash.byteLength);
-  encodeTo2(version, bytes, 0);
-  encodeTo2(code5, bytes, codeOffset);
-  bytes.set(multihash, hashOffset);
-  return bytes;
-};
-var cidSymbol2 = Symbol.for("@ipld/js-cid/CID");
-
-// node_modules/@ipld/car/src/decoder-common.js
-var import_varint3 = __toESM(require_varint(), 1);
-var V2_HEADER_LENGTH = (
-  /* characteristics */
-  16 + 8 + 8 + 8
-);
-function decodeVarint(bytes, seeker) {
-  if (!bytes.length) {
-    throw new Error("Unexpected end of data");
-  }
-  const i = import_varint3.default.decode(bytes);
-  seeker.seek(
-    /** @type {number} */
-    import_varint3.default.decode.bytes
-  );
-  return i;
-}
-function decodeV2Header(bytes) {
-  const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
-  let offset = 0;
-  const header = {
-    version: 2,
-    /** @type {[bigint, bigint]} */
-    characteristics: [
-      dv.getBigUint64(offset, true),
-      dv.getBigUint64(offset += 8, true)
-    ],
-    dataOffset: Number(dv.getBigUint64(offset += 8, true)),
-    dataSize: Number(dv.getBigUint64(offset += 8, true)),
-    indexOffset: Number(dv.getBigUint64(offset += 8, true))
-  };
-  return header;
-}
-
-// node_modules/@ipld/car/src/header-validator.js
-var Kinds = {
-  Null: (
-    /** @returns {undefined|null} */
-    (obj) => obj === null ? obj : void 0
-  ),
-  Int: (
-    /** @returns {undefined|number} */
-    (obj) => Number.isInteger(obj) ? obj : void 0
-  ),
-  Float: (
-    /** @returns {undefined|number} */
-    (obj) => typeof obj === "number" && Number.isFinite(obj) ? obj : void 0
-  ),
-  String: (
-    /** @returns {undefined|string} */
-    (obj) => typeof obj === "string" ? obj : void 0
-  ),
-  Bool: (
-    /** @returns {undefined|boolean} */
-    (obj) => typeof obj === "boolean" ? obj : void 0
-  ),
-  Bytes: (
-    /** @returns {undefined|Uint8Array} */
-    (obj) => obj instanceof Uint8Array ? obj : void 0
-  ),
-  Link: (
-    /** @returns {undefined|object} */
-    (obj) => obj !== null && typeof obj === "object" && obj.asCID === obj ? obj : void 0
-  ),
-  List: (
-    /** @returns {undefined|Array} */
-    (obj) => Array.isArray(obj) ? obj : void 0
-  ),
-  Map: (
-    /** @returns {undefined|object} */
-    (obj) => obj !== null && typeof obj === "object" && obj.asCID !== obj && !Array.isArray(obj) && !(obj instanceof Uint8Array) ? obj : void 0
-  )
-};
-var Types = {
-  "CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)": Kinds.Link,
-  "CarV1HeaderOrV2Pragma > roots (anon)": (
-    /** @returns {undefined|any} */
-    (obj) => {
-      if (Kinds.List(obj) === void 0) {
-        return void 0;
-      }
-      for (let i = 0; i < obj.length; i++) {
-        let v = obj[i];
-        v = Types["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v);
-        if (v === void 0) {
-          return void 0;
-        }
-        if (v !== obj[i]) {
-          const ret = obj.slice(0, i);
-          for (let j = i; j < obj.length; j++) {
-            let v2 = obj[j];
-            v2 = Types["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v2);
-            if (v2 === void 0) {
-              return void 0;
-            }
-            ret.push(v2);
-          }
-          return ret;
-        }
-      }
-      return obj;
-    }
-  ),
-  Int: Kinds.Int,
-  CarV1HeaderOrV2Pragma: (
-    /** @returns {undefined|any} */
-    (obj) => {
-      if (Kinds.Map(obj) === void 0) {
-        return void 0;
-      }
-      const entries = Object.entries(obj);
-      let ret = obj;
-      let requiredCount = 1;
-      for (let i = 0; i < entries.length; i++) {
-        const [key, value] = entries[i];
-        switch (key) {
-          case "roots":
-            {
-              const v = Types["CarV1HeaderOrV2Pragma > roots (anon)"](obj[key]);
-              if (v === void 0) {
-                return void 0;
-              }
-              if (v !== value || ret !== obj) {
-                if (ret === obj) {
-                  ret = {};
-                  for (let j = 0; j < i; j++) {
-                    ret[entries[j][0]] = entries[j][1];
-                  }
-                }
-                ret.roots = v;
-              }
-            }
-            break;
-          case "version":
-            {
-              requiredCount--;
-              const v = Types.Int(obj[key]);
-              if (v === void 0) {
-                return void 0;
-              }
-              if (v !== value || ret !== obj) {
-                if (ret === obj) {
-                  ret = {};
-                  for (let j = 0; j < i; j++) {
-                    ret[entries[j][0]] = entries[j][1];
-                  }
-                }
-                ret.version = v;
-              }
-            }
-            break;
-          default:
-            return void 0;
-        }
-      }
-      if (requiredCount > 0) {
-        return void 0;
-      }
-      return ret;
-    }
-  )
-};
-var Reprs = {
-  "CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)": Kinds.Link,
-  "CarV1HeaderOrV2Pragma > roots (anon)": (
-    /** @returns {undefined|any} */
-    (obj) => {
-      if (Kinds.List(obj) === void 0) {
-        return void 0;
-      }
-      for (let i = 0; i < obj.length; i++) {
-        let v = obj[i];
-        v = Reprs["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v);
-        if (v === void 0) {
-          return void 0;
-        }
-        if (v !== obj[i]) {
-          const ret = obj.slice(0, i);
-          for (let j = i; j < obj.length; j++) {
-            let v2 = obj[j];
-            v2 = Reprs["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v2);
-            if (v2 === void 0) {
-              return void 0;
-            }
-            ret.push(v2);
-          }
-          return ret;
-        }
-      }
-      return obj;
-    }
-  ),
-  Int: Kinds.Int,
-  CarV1HeaderOrV2Pragma: (
-    /** @returns {undefined|any} */
-    (obj) => {
-      if (Kinds.Map(obj) === void 0) {
-        return void 0;
-      }
-      const entries = Object.entries(obj);
-      let ret = obj;
-      let requiredCount = 1;
-      for (let i = 0; i < entries.length; i++) {
-        const [key, value] = entries[i];
-        switch (key) {
-          case "roots":
-            {
-              const v = Reprs["CarV1HeaderOrV2Pragma > roots (anon)"](value);
-              if (v === void 0) {
-                return void 0;
-              }
-              if (v !== value || ret !== obj) {
-                if (ret === obj) {
-                  ret = {};
-                  for (let j = 0; j < i; j++) {
-                    ret[entries[j][0]] = entries[j][1];
-                  }
-                }
-                ret.roots = v;
-              }
-            }
-            break;
-          case "version":
-            {
-              requiredCount--;
-              const v = Reprs.Int(value);
-              if (v === void 0) {
-                return void 0;
-              }
-              if (v !== value || ret !== obj) {
-                if (ret === obj) {
-                  ret = {};
-                  for (let j = 0; j < i; j++) {
-                    ret[entries[j][0]] = entries[j][1];
-                  }
-                }
-                ret.version = v;
-              }
-            }
-            break;
-          default:
-            return void 0;
-        }
-      }
-      if (requiredCount > 0) {
-        return void 0;
-      }
-      return ret;
-    }
-  )
-};
-var CarV1HeaderOrV2Pragma = {
-  toTyped: Types.CarV1HeaderOrV2Pragma,
-  toRepresentation: Reprs.CarV1HeaderOrV2Pragma
-};
-
-// node_modules/@ipld/car/src/buffer-reader.js
-var fsread = import_fs.default.readSync;
-
-// node_modules/cborg/lib/length.js
-var cborEncoders2 = makeCborEncoders();
-
-// node_modules/@ipld/car/src/buffer-writer.js
-var import_varint4 = __toESM(require_varint(), 1);
-var headerPreludeTokens = [
-  new Token(Type.map, 2),
-  new Token(Type.string, "version"),
-  new Token(Type.uint, 1),
-  new Token(Type.string, "roots")
-];
-var CID_TAG = new Token(Type.tag, 42);
-
-// node_modules/@ipld/car/src/decoder.js
-async function readHeader(reader, strictVersion) {
-  const length4 = decodeVarint(await reader.upTo(8), reader);
-  if (length4 === 0) {
-    throw new Error("Invalid CAR header (zero length)");
-  }
-  const header = await reader.exactly(length4, true);
-  const block = decode6(header);
-  if (CarV1HeaderOrV2Pragma.toTyped(block) === void 0) {
-    throw new Error("Invalid CAR header format");
-  }
-  if (block.version !== 1 && block.version !== 2 || strictVersion !== void 0 && block.version !== strictVersion) {
-    throw new Error(`Invalid CAR version: ${block.version}${strictVersion !== void 0 ? ` (expected ${strictVersion})` : ""}`);
-  }
-  if (block.version === 1) {
-    if (!Array.isArray(block.roots)) {
-      throw new Error("Invalid CAR header format");
-    }
-    return block;
-  }
-  if (block.roots !== void 0) {
-    throw new Error("Invalid CAR header format");
-  }
-  const v2Header = decodeV2Header(await reader.exactly(V2_HEADER_LENGTH, true));
-  reader.seek(v2Header.dataOffset - reader.pos);
-  const v1Header = await readHeader(reader, 1);
-  return Object.assign(v1Header, v2Header);
-}
-function bytesReader(bytes) {
-  let pos = 0;
-  return {
-    async upTo(length4) {
-      const out = bytes.subarray(pos, pos + Math.min(length4, bytes.length - pos));
-      return out;
-    },
-    async exactly(length4, seek = false) {
-      if (length4 > bytes.length - pos) {
-        throw new Error("Unexpected end of data");
-      }
-      const out = bytes.subarray(pos, pos + length4);
-      if (seek) {
-        pos += length4;
-      }
-      return out;
-    },
-    seek(length4) {
-      pos += length4;
-    },
-    get pos() {
-      return pos;
-    }
-  };
-}
-function chunkReader(readChunk) {
-  let pos = 0;
-  let have = 0;
-  let offset = 0;
-  let currentChunk = new Uint8Array(0);
-  const read4 = async (length4) => {
-    have = currentChunk.length - offset;
-    const bufa = [currentChunk.subarray(offset)];
-    while (have < length4) {
-      const chunk = await readChunk();
-      if (chunk == null) {
-        break;
-      }
-      if (have < 0) {
-        if (chunk.length > have) {
-          bufa.push(chunk.subarray(-have));
-        }
-      } else {
-        bufa.push(chunk);
-      }
-      have += chunk.length;
-    }
-    currentChunk = new Uint8Array(bufa.reduce((p, c) => p + c.length, 0));
-    let off = 0;
-    for (const b of bufa) {
-      currentChunk.set(b, off);
-      off += b.length;
-    }
-    offset = 0;
-  };
-  return {
-    async upTo(length4) {
-      if (currentChunk.length - offset < length4) {
-        await read4(length4);
-      }
-      return currentChunk.subarray(offset, offset + Math.min(currentChunk.length - offset, length4));
-    },
-    async exactly(length4, seek = false) {
-      if (currentChunk.length - offset < length4) {
-        await read4(length4);
-      }
-      if (currentChunk.length - offset < length4) {
-        throw new Error("Unexpected end of data");
-      }
-      const out = currentChunk.subarray(offset, offset + length4);
-      if (seek) {
-        pos += length4;
-        offset += length4;
-      }
-      return out;
-    },
-    seek(length4) {
-      pos += length4;
-      offset += length4;
-    },
-    get pos() {
-      return pos;
-    }
-  };
-}
-
-// node_modules/@ipld/car/src/reader.js
-var import_fs2 = __toESM(require("fs"), 1);
-var import_util = require("util");
-var fsread2 = (0, import_util.promisify)(import_fs2.default.read);
-
-// node_modules/@ipld/car/src/writer.js
-var import_fs3 = __toESM(require("fs"), 1);
-var import_util2 = require("util");
-
-// node_modules/@ipld/car/src/encoder.js
-var import_varint5 = __toESM(require_varint(), 1);
-function createHeader(roots) {
-  const headerBytes = encode4({ version: 1, roots });
-  const varintBytes = import_varint5.default.encode(headerBytes.length);
-  const header = new Uint8Array(varintBytes.length + headerBytes.length);
-  header.set(varintBytes, 0);
-  header.set(headerBytes, varintBytes.length);
-  return header;
-}
-function createEncoder(writer) {
-  return {
-    /**
-     * @param {CID[]} roots
-     * @returns {Promise}
-     */
-    async setRoots(roots) {
-      const bytes = createHeader(roots);
-      await writer.write(bytes);
-    },
-    /**
-     * @param {Block} block
-     * @returns {Promise}
-     */
-    async writeBlock(block) {
-      const { cid, bytes } = block;
-      await writer.write(new Uint8Array(import_varint5.default.encode(cid.bytes.length + bytes.length)));
-      await writer.write(cid.bytes);
-      if (bytes.length) {
-        await writer.write(bytes);
-      }
-    },
-    /**
-     * @returns {Promise}
-     */
-    async close() {
-      await writer.end();
-    }
-  };
-}
-
-// node_modules/@ipld/car/src/iterator-channel.js
-function noop() {
-}
-function create3() {
-  const chunkQueue = [];
-  let drainer = null;
-  let drainerResolver = noop;
-  let ended = false;
-  let outWait = null;
-  let outWaitResolver = noop;
-  const makeDrainer = () => {
-    if (!drainer) {
-      drainer = new Promise((resolve6) => {
-        drainerResolver = () => {
-          drainer = null;
-          drainerResolver = noop;
-          resolve6();
-        };
-      });
-    }
-    return drainer;
-  };
-  const writer = {
-    /**
-     * @param {T} chunk
-     * @returns {Promise}
-     */
-    write(chunk) {
-      chunkQueue.push(chunk);
-      const drainer2 = makeDrainer();
-      outWaitResolver();
-      return drainer2;
-    },
-    async end() {
-      ended = true;
-      const drainer2 = makeDrainer();
-      outWaitResolver();
-      await drainer2;
-    }
-  };
-  const iterator = {
-    /** @returns {Promise>} */
-    async next() {
-      const chunk = chunkQueue.shift();
-      if (chunk) {
-        if (chunkQueue.length === 0) {
-          drainerResolver();
-        }
-        return { done: false, value: chunk };
-      }
-      if (ended) {
-        drainerResolver();
-        return { done: true, value: void 0 };
-      }
-      if (!outWait) {
-        outWait = new Promise((resolve6) => {
-          outWaitResolver = () => {
-            outWait = null;
-            outWaitResolver = noop;
-            return resolve6(iterator.next());
-          };
-        });
-      }
-      return outWait;
-    }
-  };
-  return { writer, iterator };
-}
-
-// node_modules/@ipld/car/src/writer-browser.js
-var CarWriter = class _CarWriter {
-  /**
-   * @param {CID[]} roots
-   * @param {CarEncoder} encoder
-   */
-  constructor(roots, encoder) {
-    this._encoder = encoder;
-    this._mutex = encoder.setRoots(roots);
-    this._ended = false;
-  }
-  /**
-   * Write a `Block` (a `{ cid:CID, bytes:Uint8Array }` pair) to the archive.
-   *
-   * @function
-   * @memberof CarWriter
-   * @instance
-   * @async
-   * @param {Block} block - A `{ cid:CID, bytes:Uint8Array }` pair.
-   * @returns {Promise} The returned promise will only resolve once the
-   * bytes this block generates are written to the `out` iterable.
-   */
-  async put(block) {
-    if (!(block.bytes instanceof Uint8Array) || !block.cid) {
-      throw new TypeError("Can only write {cid, bytes} objects");
-    }
-    if (this._ended) {
-      throw new Error("Already closed");
-    }
-    const cid = CID2.asCID(block.cid);
-    if (!cid) {
-      throw new TypeError("Can only write {cid, bytes} objects");
-    }
-    this._mutex = this._mutex.then(() => this._encoder.writeBlock({ cid, bytes: block.bytes }));
-    return this._mutex;
-  }
-  /**
-   * Finalise the CAR archive and signal that the `out` iterable should end once
-   * any remaining bytes are written.
-   *
-   * @function
-   * @memberof CarWriter
-   * @instance
-   * @async
-   * @returns {Promise}
-   */
-  async close() {
-    if (this._ended) {
-      throw new Error("Already closed");
-    }
-    await this._mutex;
-    this._ended = true;
-    return this._encoder.close();
-  }
-  /**
-   * Create a new CAR writer "channel" which consists of a
-   * `{ writer:CarWriter, out:AsyncIterable }` pair.
-   *
-   * @async
-   * @static
-   * @memberof CarWriter
-   * @param {CID[] | CID | void} roots
-   * @returns {WriterChannel} The channel takes the form of
-   * `{ writer:CarWriter, out:AsyncIterable }`.
-   */
-  static create(roots) {
-    roots = toRoots(roots);
-    const { encoder, iterator } = encodeWriter();
-    const writer = new _CarWriter(roots, encoder);
-    const out = new CarWriterOut(iterator);
-    return { writer, out };
-  }
-  /**
-   * Create a new CAR appender "channel" which consists of a
-   * `{ writer:CarWriter, out:AsyncIterable }` pair.
-   * This appender does not consider roots and does not produce a CAR header.
-   * It is designed to append blocks to an _existing_ CAR archive. It is
-   * expected that `out` will be concatenated onto the end of an existing
-   * archive that already has a properly formatted header.
-   *
-   * @async
-   * @static
-   * @memberof CarWriter
-   * @returns {WriterChannel} The channel takes the form of
-   * `{ writer:CarWriter, out:AsyncIterable }`.
-   */
-  static createAppender() {
-    const { encoder, iterator } = encodeWriter();
-    encoder.setRoots = () => Promise.resolve();
-    const writer = new _CarWriter([], encoder);
-    const out = new CarWriterOut(iterator);
-    return { writer, out };
-  }
-  /**
-   * Update the list of roots in the header of an existing CAR as represented
-   * in a Uint8Array.
-   *
-   * This operation is an _overwrite_, the total length of the CAR will not be
-   * modified. A rejection will occur if the new header will not be the same
-   * length as the existing header, in which case the CAR will not be modified.
-   * It is the responsibility of the user to ensure that the roots being
-   * replaced encode as the same length as the new roots.
-   *
-   * The byte array passed in an argument will be modified and also returned
-   * upon successful modification.
-   *
-   * @async
-   * @static
-   * @memberof CarWriter
-   * @param {Uint8Array} bytes
-   * @param {CID[]} roots - A new list of roots to replace the existing list in
-   * the CAR header. The new header must take up the same number of bytes as the
-   * existing header, so the roots should collectively be the same byte length
-   * as the existing roots.
-   * @returns {Promise}
-   */
-  static async updateRootsInBytes(bytes, roots) {
-    const reader = bytesReader(bytes);
-    await readHeader(reader);
-    const newHeader = createHeader(roots);
-    if (Number(reader.pos) !== newHeader.length) {
-      throw new Error(`updateRoots() can only overwrite a header of the same length (old header is ${reader.pos} bytes, new header is ${newHeader.length} bytes)`);
-    }
-    bytes.set(newHeader, 0);
-    return bytes;
-  }
-};
-var CarWriterOut = class {
-  /**
-   * @param {AsyncIterator} iterator
-   */
-  constructor(iterator) {
-    this._iterator = iterator;
-  }
-  [Symbol.asyncIterator]() {
-    if (this._iterating) {
-      throw new Error("Multiple iterator not supported");
-    }
-    this._iterating = true;
-    return this._iterator;
-  }
-};
-function encodeWriter() {
-  const iw = create3();
-  const { writer, iterator } = iw;
-  const encoder = createEncoder(writer);
-  return { encoder, iterator };
-}
-function toRoots(roots) {
-  if (roots === void 0) {
-    return [];
-  }
-  if (!Array.isArray(roots)) {
-    const cid = CID2.asCID(roots);
-    if (!cid) {
-      throw new TypeError("roots must be a single CID or an array of CIDs");
-    }
-    return [cid];
-  }
-  const _roots = [];
-  for (const root of roots) {
-    const _root = CID2.asCID(root);
-    if (!_root) {
-      throw new TypeError("roots must be a single CID or an array of CIDs");
-    }
-    _roots.push(_root);
-  }
-  return _roots;
-}
-
-// node_modules/@ipld/car/src/writer.js
-var fsread3 = (0, import_util2.promisify)(import_fs3.default.read);
-var fswrite = (0, import_util2.promisify)(import_fs3.default.write);
-var CarWriter2 = class extends CarWriter {
-  /**
-   * Update the list of roots in the header of an existing CAR file. The first
-   * argument must be a file descriptor for CAR file that is open in read and
-   * write mode (not append), e.g. `fs.open` or `fs.promises.open` with `'r+'`
-   * mode.
-   *
-   * This operation is an _overwrite_, the total length of the CAR will not be
-   * modified. A rejection will occur if the new header will not be the same
-   * length as the existing header, in which case the CAR will not be modified.
-   * It is the responsibility of the user to ensure that the roots being
-   * replaced encode as the same length as the new roots.
-   *
-   * This function is **only available in Node.js** and not a browser
-   * environment.
-   *
-   * @async
-   * @static
-   * @memberof CarWriter
-   * @param {fs.promises.FileHandle | number} fd - A file descriptor from the
-   * Node.js `fs` module. Either an integer, from `fs.open()` or a `FileHandle`
-   * from `fs.promises.open()`.
-   * @param {CID[]} roots - A new list of roots to replace the existing list in
-   * the CAR header. The new header must take up the same number of bytes as the
-   * existing header, so the roots should collectively be the same byte length
-   * as the existing roots.
-   * @returns {Promise}
-   */
-  static async updateRootsInFile(fd, roots) {
-    const chunkSize = 256;
-    let bytes;
-    let offset = 0;
-    let readChunk;
-    if (typeof fd === "number") {
-      readChunk = async () => (await fsread3(fd, bytes, 0, chunkSize, offset)).bytesRead;
-    } else if (typeof fd === "object" && typeof fd.read === "function") {
-      readChunk = async () => (await fd.read(bytes, 0, chunkSize, offset)).bytesRead;
-    } else {
-      throw new TypeError("Bad fd");
-    }
-    const fdReader = chunkReader(async () => {
-      bytes = new Uint8Array(chunkSize);
-      const read4 = await readChunk();
-      offset += read4;
-      return read4 < chunkSize ? bytes.subarray(0, read4) : bytes;
-    });
-    await readHeader(fdReader);
-    const newHeader = createHeader(roots);
-    if (fdReader.pos !== newHeader.length) {
-      throw new Error(`updateRoots() can only overwrite a header of the same length (old header is ${fdReader.pos} bytes, new header is ${newHeader.length} bytes)`);
-    }
-    if (typeof fd === "number") {
-      await fswrite(fd, newHeader, 0, newHeader.length, 0);
-    } else if (typeof fd === "object" && typeof fd.read === "function") {
-      await fd.write(newHeader, 0, newHeader.length, 0);
-    }
-  }
-};
-
-// node_modules/it-drain/dist/src/index.js
-function isAsyncIterable(thing) {
-  return thing[Symbol.asyncIterator] != null;
-}
-function drain(source) {
-  if (isAsyncIterable(source)) {
-    return (async () => {
-      for await (const _ of source) {
-      }
-    })();
-  } else {
-    for (const _ of source) {
-    }
-  }
-}
-var src_default = drain;
-
-// node_modules/it-peekable/dist/src/index.js
-function peekable(iterable) {
-  const [iterator, symbol2] = iterable[Symbol.asyncIterator] != null ? [iterable[Symbol.asyncIterator](), Symbol.asyncIterator] : [iterable[Symbol.iterator](), Symbol.iterator];
-  const queue = [];
-  return {
-    peek: () => {
-      return iterator.next();
-    },
-    push: (value) => {
-      queue.push(value);
-    },
-    next: () => {
-      if (queue.length > 0) {
-        return {
-          done: false,
-          value: queue.shift()
-        };
-      }
-      return iterator.next();
-    },
-    [symbol2]() {
-      return this;
-    }
-  };
-}
-var src_default2 = peekable;
-
-// node_modules/it-map/dist/src/index.js
-function isAsyncIterable2(thing) {
-  return thing[Symbol.asyncIterator] != null;
-}
-function map(source, func) {
-  if (isAsyncIterable2(source)) {
-    return async function* () {
-      for await (const val of source) {
-        yield func(val);
-      }
-    }();
-  }
-  const peekable2 = src_default2(source);
-  const { value, done } = peekable2.next();
-  if (done === true) {
-    return function* () {
-    }();
-  }
-  const res = func(value);
-  if (typeof res.then === "function") {
-    return async function* () {
-      yield await res;
-      for await (const val of peekable2) {
-        yield func(val);
-      }
-    }();
-  }
-  const fn = func;
-  return function* () {
-    yield res;
-    for (const val of peekable2) {
-      yield fn(val);
-    }
-  }();
-}
-var src_default3 = map;
-
-// node_modules/p-defer/index.js
-function pDefer() {
-  const deferred = {};
-  deferred.promise = new Promise((resolve6, reject) => {
-    deferred.resolve = resolve6;
-    deferred.reject = reject;
-  });
-  return deferred;
-}
-
-// node_modules/eventemitter3/index.mjs
-var import_index = __toESM(require_eventemitter3(), 1);
-
-// node_modules/p-timeout/index.js
-var TimeoutError = class extends Error {
-  constructor(message2) {
-    super(message2);
-    this.name = "TimeoutError";
-  }
-};
-var AbortError = class extends Error {
-  constructor(message2) {
-    super();
-    this.name = "AbortError";
-    this.message = message2;
-  }
-};
-var getDOMException = (errorMessage) => globalThis.DOMException === void 0 ? new AbortError(errorMessage) : new DOMException(errorMessage);
-var getAbortedReason = (signal) => {
-  const reason = signal.reason === void 0 ? getDOMException("This operation was aborted.") : signal.reason;
-  return reason instanceof Error ? reason : getDOMException(reason);
-};
-function pTimeout(promise, milliseconds, fallback, options) {
-  let timer;
-  const cancelablePromise = new Promise((resolve6, reject) => {
-    if (typeof milliseconds !== "number" || Math.sign(milliseconds) !== 1) {
-      throw new TypeError(`Expected \`milliseconds\` to be a positive number, got \`${milliseconds}\``);
-    }
-    if (milliseconds === Number.POSITIVE_INFINITY) {
-      resolve6(promise);
-      return;
-    }
-    options = {
-      customTimers: { setTimeout, clearTimeout },
-      ...options
-    };
-    if (options.signal) {
-      const { signal } = options;
-      if (signal.aborted) {
-        reject(getAbortedReason(signal));
-      }
-      signal.addEventListener("abort", () => {
-        reject(getAbortedReason(signal));
-      });
-    }
-    timer = options.customTimers.setTimeout.call(void 0, () => {
-      if (typeof fallback === "function") {
-        try {
-          resolve6(fallback());
-        } catch (error) {
-          reject(error);
-        }
-        return;
-      }
-      const message2 = typeof fallback === "string" ? fallback : `Promise timed out after ${milliseconds} milliseconds`;
-      const timeoutError = fallback instanceof Error ? fallback : new TimeoutError(message2);
-      if (typeof promise.cancel === "function") {
-        promise.cancel();
-      }
-      reject(timeoutError);
-    }, milliseconds);
-    (async () => {
-      try {
-        resolve6(await promise);
-      } catch (error) {
-        reject(error);
-      } finally {
-        options.customTimers.clearTimeout.call(void 0, timer);
-      }
-    })();
-  });
-  cancelablePromise.clear = () => {
-    clearTimeout(timer);
-    timer = void 0;
-  };
-  return cancelablePromise;
-}
-
-// node_modules/p-queue/dist/lower-bound.js
-function lowerBound(array, value, comparator) {
-  let first2 = 0;
-  let count = array.length;
-  while (count > 0) {
-    const step = Math.trunc(count / 2);
-    let it = first2 + step;
-    if (comparator(array[it], value) <= 0) {
-      first2 = ++it;
-      count -= step + 1;
-    } else {
-      count = step;
-    }
-  }
-  return first2;
-}
-
-// node_modules/p-queue/dist/priority-queue.js
-var __classPrivateFieldGet = function(receiver, state, kind, f) {
-  if (kind === "a" && !f)
-    throw new TypeError("Private accessor was defined without a getter");
-  if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
-    throw new TypeError("Cannot read private member from an object whose class did not declare it");
-  return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
-};
-var _PriorityQueue_queue;
-var PriorityQueue = class {
-  constructor() {
-    _PriorityQueue_queue.set(this, []);
-  }
-  enqueue(run, options) {
-    options = {
-      priority: 0,
-      ...options
-    };
-    const element = {
-      priority: options.priority,
-      run
-    };
-    if (this.size && __classPrivateFieldGet(this, _PriorityQueue_queue, "f")[this.size - 1].priority >= options.priority) {
-      __classPrivateFieldGet(this, _PriorityQueue_queue, "f").push(element);
-      return;
-    }
-    const index = lowerBound(__classPrivateFieldGet(this, _PriorityQueue_queue, "f"), element, (a, b) => b.priority - a.priority);
-    __classPrivateFieldGet(this, _PriorityQueue_queue, "f").splice(index, 0, element);
-  }
-  dequeue() {
-    const item = __classPrivateFieldGet(this, _PriorityQueue_queue, "f").shift();
-    return item === null || item === void 0 ? void 0 : item.run;
-  }
-  filter(options) {
-    return __classPrivateFieldGet(this, _PriorityQueue_queue, "f").filter((element) => element.priority === options.priority).map((element) => element.run);
-  }
-  get size() {
-    return __classPrivateFieldGet(this, _PriorityQueue_queue, "f").length;
-  }
-};
-_PriorityQueue_queue = /* @__PURE__ */ new WeakMap();
-var priority_queue_default = PriorityQueue;
-
-// node_modules/p-queue/dist/index.js
-var __classPrivateFieldSet = function(receiver, state, value, kind, f) {
-  if (kind === "m")
-    throw new TypeError("Private method is not writable");
-  if (kind === "a" && !f)
-    throw new TypeError("Private accessor was defined without a setter");
-  if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
-    throw new TypeError("Cannot write private member to an object whose class did not declare it");
-  return kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value), value;
-};
-var __classPrivateFieldGet2 = function(receiver, state, kind, f) {
-  if (kind === "a" && !f)
-    throw new TypeError("Private accessor was defined without a getter");
-  if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
-    throw new TypeError("Cannot read private member from an object whose class did not declare it");
-  return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
-};
-var _PQueue_instances;
-var _PQueue_carryoverConcurrencyCount;
-var _PQueue_isIntervalIgnored;
-var _PQueue_intervalCount;
-var _PQueue_intervalCap;
-var _PQueue_interval;
-var _PQueue_intervalEnd;
-var _PQueue_intervalId;
-var _PQueue_timeoutId;
-var _PQueue_queue;
-var _PQueue_queueClass;
-var _PQueue_pending;
-var _PQueue_concurrency;
-var _PQueue_isPaused;
-var _PQueue_throwOnTimeout;
-var _PQueue_doesIntervalAllowAnother_get;
-var _PQueue_doesConcurrentAllowAnother_get;
-var _PQueue_next;
-var _PQueue_onResumeInterval;
-var _PQueue_isIntervalPaused_get;
-var _PQueue_tryToStartAnother;
-var _PQueue_initializeIntervalIfNeeded;
-var _PQueue_onInterval;
-var _PQueue_processQueue;
-var _PQueue_throwOnAbort;
-var _PQueue_onEvent;
-var AbortError2 = class extends Error {
-};
-var PQueue = class extends import_index.default {
-  // TODO: The `throwOnTimeout` option should affect the return types of `add()` and `addAll()`
-  constructor(options) {
-    var _a, _b, _c, _d;
-    super();
-    _PQueue_instances.add(this);
-    _PQueue_carryoverConcurrencyCount.set(this, void 0);
-    _PQueue_isIntervalIgnored.set(this, void 0);
-    _PQueue_intervalCount.set(this, 0);
-    _PQueue_intervalCap.set(this, void 0);
-    _PQueue_interval.set(this, void 0);
-    _PQueue_intervalEnd.set(this, 0);
-    _PQueue_intervalId.set(this, void 0);
-    _PQueue_timeoutId.set(this, void 0);
-    _PQueue_queue.set(this, void 0);
-    _PQueue_queueClass.set(this, void 0);
-    _PQueue_pending.set(this, 0);
-    _PQueue_concurrency.set(this, void 0);
-    _PQueue_isPaused.set(this, void 0);
-    _PQueue_throwOnTimeout.set(this, void 0);
-    Object.defineProperty(this, "timeout", {
-      enumerable: true,
-      configurable: true,
-      writable: true,
-      value: void 0
-    });
-    options = {
-      carryoverConcurrencyCount: false,
-      intervalCap: Number.POSITIVE_INFINITY,
-      interval: 0,
-      concurrency: Number.POSITIVE_INFINITY,
-      autoStart: true,
-      queueClass: priority_queue_default,
-      ...options
-    };
-    if (!(typeof options.intervalCap === "number" && options.intervalCap >= 1)) {
-      throw new TypeError(`Expected \`intervalCap\` to be a number from 1 and up, got \`${(_b = (_a = options.intervalCap) === null || _a === void 0 ? void 0 : _a.toString()) !== null && _b !== void 0 ? _b : ""}\` (${typeof options.intervalCap})`);
-    }
-    if (options.interval === void 0 || !(Number.isFinite(options.interval) && options.interval >= 0)) {
-      throw new TypeError(`Expected \`interval\` to be a finite number >= 0, got \`${(_d = (_c = options.interval) === null || _c === void 0 ? void 0 : _c.toString()) !== null && _d !== void 0 ? _d : ""}\` (${typeof options.interval})`);
-    }
-    __classPrivateFieldSet(this, _PQueue_carryoverConcurrencyCount, options.carryoverConcurrencyCount, "f");
-    __classPrivateFieldSet(this, _PQueue_isIntervalIgnored, options.intervalCap === Number.POSITIVE_INFINITY || options.interval === 0, "f");
-    __classPrivateFieldSet(this, _PQueue_intervalCap, options.intervalCap, "f");
-    __classPrivateFieldSet(this, _PQueue_interval, options.interval, "f");
-    __classPrivateFieldSet(this, _PQueue_queue, new options.queueClass(), "f");
-    __classPrivateFieldSet(this, _PQueue_queueClass, options.queueClass, "f");
-    this.concurrency = options.concurrency;
-    this.timeout = options.timeout;
-    __classPrivateFieldSet(this, _PQueue_throwOnTimeout, options.throwOnTimeout === true, "f");
-    __classPrivateFieldSet(this, _PQueue_isPaused, options.autoStart === false, "f");
-  }
-  get concurrency() {
-    return __classPrivateFieldGet2(this, _PQueue_concurrency, "f");
-  }
-  set concurrency(newConcurrency) {
-    if (!(typeof newConcurrency === "number" && newConcurrency >= 1)) {
-      throw new TypeError(`Expected \`concurrency\` to be a number from 1 and up, got \`${newConcurrency}\` (${typeof newConcurrency})`);
-    }
-    __classPrivateFieldSet(this, _PQueue_concurrency, newConcurrency, "f");
-    __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_processQueue).call(this);
-  }
-  async add(function_, options = {}) {
-    options = {
-      timeout: this.timeout,
-      throwOnTimeout: __classPrivateFieldGet2(this, _PQueue_throwOnTimeout, "f"),
-      ...options
-    };
-    return new Promise((resolve6, reject) => {
-      __classPrivateFieldGet2(this, _PQueue_queue, "f").enqueue(async () => {
-        var _a;
-        var _b, _c;
-        __classPrivateFieldSet(this, _PQueue_pending, (_b = __classPrivateFieldGet2(this, _PQueue_pending, "f"), _b++, _b), "f");
-        __classPrivateFieldSet(this, _PQueue_intervalCount, (_c = __classPrivateFieldGet2(this, _PQueue_intervalCount, "f"), _c++, _c), "f");
-        try {
-          if ((_a = options.signal) === null || _a === void 0 ? void 0 : _a.aborted) {
-            throw new AbortError2("The task was aborted.");
-          }
-          let operation = function_({ signal: options.signal });
-          if (options.timeout) {
-            operation = pTimeout(Promise.resolve(operation), options.timeout);
-          }
-          if (options.signal) {
-            operation = Promise.race([operation, __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_throwOnAbort).call(this, options.signal)]);
-          }
-          const result = await operation;
-          resolve6(result);
-          this.emit("completed", result);
-        } catch (error) {
-          if (error instanceof TimeoutError && !options.throwOnTimeout) {
-            resolve6();
-            return;
-          }
-          reject(error);
-          this.emit("error", error);
-        } finally {
-          __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_next).call(this);
-        }
-      }, options);
-      this.emit("add");
-      __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_tryToStartAnother).call(this);
-    });
-  }
-  async addAll(functions, options) {
-    return Promise.all(functions.map(async (function_) => this.add(function_, options)));
-  }
-  /**
-  Start (or resume) executing enqueued tasks within concurrency limit. No need to call this if queue is not paused (via `options.autoStart = false` or by `.pause()` method.)
-  */
-  start() {
-    if (!__classPrivateFieldGet2(this, _PQueue_isPaused, "f")) {
-      return this;
-    }
-    __classPrivateFieldSet(this, _PQueue_isPaused, false, "f");
-    __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_processQueue).call(this);
-    return this;
-  }
-  /**
-  Put queue execution on hold.
-  */
-  pause() {
-    __classPrivateFieldSet(this, _PQueue_isPaused, true, "f");
-  }
-  /**
-  Clear the queue.
-  */
-  clear() {
-    __classPrivateFieldSet(this, _PQueue_queue, new (__classPrivateFieldGet2(this, _PQueue_queueClass, "f"))(), "f");
-  }
-  /**
-      Can be called multiple times. Useful if you for example add additional items at a later time.
-  
-      @returns A promise that settles when the queue becomes empty.
-      */
-  async onEmpty() {
-    if (__classPrivateFieldGet2(this, _PQueue_queue, "f").size === 0) {
-      return;
-    }
-    await __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onEvent).call(this, "empty");
-  }
-  /**
-      @returns A promise that settles when the queue size is less than the given limit: `queue.size < limit`.
-  
-      If you want to avoid having the queue grow beyond a certain size you can `await queue.onSizeLessThan()` before adding a new item.
-  
-      Note that this only limits the number of items waiting to start. There could still be up to `concurrency` jobs already running that this call does not include in its calculation.
-      */
-  async onSizeLessThan(limit) {
-    if (__classPrivateFieldGet2(this, _PQueue_queue, "f").size < limit) {
-      return;
-    }
-    await __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onEvent).call(this, "next", () => __classPrivateFieldGet2(this, _PQueue_queue, "f").size < limit);
-  }
-  /**
-      The difference with `.onEmpty` is that `.onIdle` guarantees that all work from the queue has finished. `.onEmpty` merely signals that the queue is empty, but it could mean that some promises haven't completed yet.
-  
-      @returns A promise that settles when the queue becomes empty, and all promises have completed; `queue.size === 0 && queue.pending === 0`.
-      */
-  async onIdle() {
-    if (__classPrivateFieldGet2(this, _PQueue_pending, "f") === 0 && __classPrivateFieldGet2(this, _PQueue_queue, "f").size === 0) {
-      return;
-    }
-    await __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onEvent).call(this, "idle");
-  }
-  /**
-  Size of the queue, the number of queued items waiting to run.
-  */
-  get size() {
-    return __classPrivateFieldGet2(this, _PQueue_queue, "f").size;
-  }
-  /**
-      Size of the queue, filtered by the given options.
-  
-      For example, this can be used to find the number of items remaining in the queue with a specific priority level.
-      */
-  sizeBy(options) {
-    return __classPrivateFieldGet2(this, _PQueue_queue, "f").filter(options).length;
-  }
-  /**
-  Number of running items (no longer in the queue).
-  */
-  get pending() {
-    return __classPrivateFieldGet2(this, _PQueue_pending, "f");
-  }
-  /**
-  Whether the queue is currently paused.
-  */
-  get isPaused() {
-    return __classPrivateFieldGet2(this, _PQueue_isPaused, "f");
-  }
-};
-_PQueue_carryoverConcurrencyCount = /* @__PURE__ */ new WeakMap(), _PQueue_isIntervalIgnored = /* @__PURE__ */ new WeakMap(), _PQueue_intervalCount = /* @__PURE__ */ new WeakMap(), _PQueue_intervalCap = /* @__PURE__ */ new WeakMap(), _PQueue_interval = /* @__PURE__ */ new WeakMap(), _PQueue_intervalEnd = /* @__PURE__ */ new WeakMap(), _PQueue_intervalId = /* @__PURE__ */ new WeakMap(), _PQueue_timeoutId = /* @__PURE__ */ new WeakMap(), _PQueue_queue = /* @__PURE__ */ new WeakMap(), _PQueue_queueClass = /* @__PURE__ */ new WeakMap(), _PQueue_pending = /* @__PURE__ */ new WeakMap(), _PQueue_concurrency = /* @__PURE__ */ new WeakMap(), _PQueue_isPaused = /* @__PURE__ */ new WeakMap(), _PQueue_throwOnTimeout = /* @__PURE__ */ new WeakMap(), _PQueue_instances = /* @__PURE__ */ new WeakSet(), _PQueue_doesIntervalAllowAnother_get = function _PQueue_doesIntervalAllowAnother_get2() {
-  return __classPrivateFieldGet2(this, _PQueue_isIntervalIgnored, "f") || __classPrivateFieldGet2(this, _PQueue_intervalCount, "f") < __classPrivateFieldGet2(this, _PQueue_intervalCap, "f");
-}, _PQueue_doesConcurrentAllowAnother_get = function _PQueue_doesConcurrentAllowAnother_get2() {
-  return __classPrivateFieldGet2(this, _PQueue_pending, "f") < __classPrivateFieldGet2(this, _PQueue_concurrency, "f");
-}, _PQueue_next = function _PQueue_next2() {
-  var _a;
-  __classPrivateFieldSet(this, _PQueue_pending, (_a = __classPrivateFieldGet2(this, _PQueue_pending, "f"), _a--, _a), "f");
-  __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_tryToStartAnother).call(this);
-  this.emit("next");
-}, _PQueue_onResumeInterval = function _PQueue_onResumeInterval2() {
-  __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onInterval).call(this);
-  __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_initializeIntervalIfNeeded).call(this);
-  __classPrivateFieldSet(this, _PQueue_timeoutId, void 0, "f");
-}, _PQueue_isIntervalPaused_get = function _PQueue_isIntervalPaused_get2() {
-  const now = Date.now();
-  if (__classPrivateFieldGet2(this, _PQueue_intervalId, "f") === void 0) {
-    const delay = __classPrivateFieldGet2(this, _PQueue_intervalEnd, "f") - now;
-    if (delay < 0) {
-      __classPrivateFieldSet(this, _PQueue_intervalCount, __classPrivateFieldGet2(this, _PQueue_carryoverConcurrencyCount, "f") ? __classPrivateFieldGet2(this, _PQueue_pending, "f") : 0, "f");
-    } else {
-      if (__classPrivateFieldGet2(this, _PQueue_timeoutId, "f") === void 0) {
-        __classPrivateFieldSet(this, _PQueue_timeoutId, setTimeout(() => {
-          __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onResumeInterval).call(this);
-        }, delay), "f");
-      }
-      return true;
-    }
-  }
-  return false;
-}, _PQueue_tryToStartAnother = function _PQueue_tryToStartAnother2() {
-  if (__classPrivateFieldGet2(this, _PQueue_queue, "f").size === 0) {
-    if (__classPrivateFieldGet2(this, _PQueue_intervalId, "f")) {
-      clearInterval(__classPrivateFieldGet2(this, _PQueue_intervalId, "f"));
-    }
-    __classPrivateFieldSet(this, _PQueue_intervalId, void 0, "f");
-    this.emit("empty");
-    if (__classPrivateFieldGet2(this, _PQueue_pending, "f") === 0) {
-      this.emit("idle");
-    }
-    return false;
-  }
-  if (!__classPrivateFieldGet2(this, _PQueue_isPaused, "f")) {
-    const canInitializeInterval = !__classPrivateFieldGet2(this, _PQueue_instances, "a", _PQueue_isIntervalPaused_get);
-    if (__classPrivateFieldGet2(this, _PQueue_instances, "a", _PQueue_doesIntervalAllowAnother_get) && __classPrivateFieldGet2(this, _PQueue_instances, "a", _PQueue_doesConcurrentAllowAnother_get)) {
-      const job = __classPrivateFieldGet2(this, _PQueue_queue, "f").dequeue();
-      if (!job) {
-        return false;
-      }
-      this.emit("active");
-      job();
-      if (canInitializeInterval) {
-        __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_initializeIntervalIfNeeded).call(this);
-      }
-      return true;
-    }
-  }
-  return false;
-}, _PQueue_initializeIntervalIfNeeded = function _PQueue_initializeIntervalIfNeeded2() {
-  if (__classPrivateFieldGet2(this, _PQueue_isIntervalIgnored, "f") || __classPrivateFieldGet2(this, _PQueue_intervalId, "f") !== void 0) {
-    return;
-  }
-  __classPrivateFieldSet(this, _PQueue_intervalId, setInterval(() => {
-    __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onInterval).call(this);
-  }, __classPrivateFieldGet2(this, _PQueue_interval, "f")), "f");
-  __classPrivateFieldSet(this, _PQueue_intervalEnd, Date.now() + __classPrivateFieldGet2(this, _PQueue_interval, "f"), "f");
-}, _PQueue_onInterval = function _PQueue_onInterval2() {
-  if (__classPrivateFieldGet2(this, _PQueue_intervalCount, "f") === 0 && __classPrivateFieldGet2(this, _PQueue_pending, "f") === 0 && __classPrivateFieldGet2(this, _PQueue_intervalId, "f")) {
-    clearInterval(__classPrivateFieldGet2(this, _PQueue_intervalId, "f"));
-    __classPrivateFieldSet(this, _PQueue_intervalId, void 0, "f");
-  }
-  __classPrivateFieldSet(this, _PQueue_intervalCount, __classPrivateFieldGet2(this, _PQueue_carryoverConcurrencyCount, "f") ? __classPrivateFieldGet2(this, _PQueue_pending, "f") : 0, "f");
-  __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_processQueue).call(this);
-}, _PQueue_processQueue = function _PQueue_processQueue2() {
-  while (__classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_tryToStartAnother).call(this)) {
-  }
-}, _PQueue_throwOnAbort = async function _PQueue_throwOnAbort2(signal) {
-  return new Promise((_resolve, reject) => {
-    signal.addEventListener("abort", () => {
-      reject(new AbortError2("The task was aborted."));
-    }, { once: true });
-  });
-}, _PQueue_onEvent = async function _PQueue_onEvent2(event, filter3) {
-  return new Promise((resolve6) => {
-    const listener = () => {
-      if (filter3 && !filter3()) {
-        return;
-      }
-      this.off(event, listener);
-      resolve6();
-    };
-    this.on(event, listener);
-  });
-};
-var dist_default = PQueue;
-
-// node_modules/@ipld/dag-pb/src/index.js
-var src_exports2 = {};
-__export(src_exports2, {
-  code: () => code2,
-  createLink: () => createLink,
-  createNode: () => createNode,
-  decode: () => decode11,
-  encode: () => encode7,
-  name: () => name,
-  prepare: () => prepare,
-  validate: () => validate
-});
-
-// node_modules/@ipld/dag-pb/src/pb-decode.js
-var textDecoder2 = new TextDecoder();
-function decodeVarint2(bytes, offset) {
-  let v = 0;
-  for (let shift = 0; ; shift += 7) {
-    if (shift >= 64) {
-      throw new Error("protobuf: varint overflow");
-    }
-    if (offset >= bytes.length) {
-      throw new Error("protobuf: unexpected end of data");
-    }
-    const b = bytes[offset++];
-    v += shift < 28 ? (b & 127) << shift : (b & 127) * 2 ** shift;
-    if (b < 128) {
-      break;
-    }
-  }
-  return [v, offset];
-}
-function decodeBytes(bytes, offset) {
-  let byteLen;
-  [byteLen, offset] = decodeVarint2(bytes, offset);
-  const postOffset = offset + byteLen;
-  if (byteLen < 0 || postOffset < 0) {
-    throw new Error("protobuf: invalid length");
-  }
-  if (postOffset > bytes.length) {
-    throw new Error("protobuf: unexpected end of data");
-  }
-  return [bytes.subarray(offset, postOffset), postOffset];
-}
-function decodeKey(bytes, index) {
-  let wire;
-  [wire, index] = decodeVarint2(bytes, index);
-  return [wire & 7, wire >> 3, index];
-}
-function decodeLink(bytes) {
-  const link = {};
-  const l = bytes.length;
-  let index = 0;
-  while (index < l) {
-    let wireType, fieldNum;
-    [wireType, fieldNum, index] = decodeKey(bytes, index);
-    if (fieldNum === 1) {
-      if (link.Hash) {
-        throw new Error("protobuf: (PBLink) duplicate Hash section");
-      }
-      if (wireType !== 2) {
-        throw new Error(`protobuf: (PBLink) wrong wireType (${wireType}) for Hash`);
-      }
-      if (link.Name !== void 0) {
-        throw new Error("protobuf: (PBLink) invalid order, found Name before Hash");
-      }
-      if (link.Tsize !== void 0) {
-        throw new Error("protobuf: (PBLink) invalid order, found Tsize before Hash");
-      }
-      [link.Hash, index] = decodeBytes(bytes, index);
-    } else if (fieldNum === 2) {
-      if (link.Name !== void 0) {
-        throw new Error("protobuf: (PBLink) duplicate Name section");
-      }
-      if (wireType !== 2) {
-        throw new Error(`protobuf: (PBLink) wrong wireType (${wireType}) for Name`);
-      }
-      if (link.Tsize !== void 0) {
-        throw new Error("protobuf: (PBLink) invalid order, found Tsize before Name");
-      }
-      let byts;
-      [byts, index] = decodeBytes(bytes, index);
-      link.Name = textDecoder2.decode(byts);
-    } else if (fieldNum === 3) {
-      if (link.Tsize !== void 0) {
-        throw new Error("protobuf: (PBLink) duplicate Tsize section");
-      }
-      if (wireType !== 0) {
-        throw new Error(`protobuf: (PBLink) wrong wireType (${wireType}) for Tsize`);
-      }
-      [link.Tsize, index] = decodeVarint2(bytes, index);
-    } else {
-      throw new Error(`protobuf: (PBLink) invalid fieldNumber, expected 1, 2 or 3, got ${fieldNum}`);
-    }
-  }
-  if (index > l) {
-    throw new Error("protobuf: (PBLink) unexpected end of data");
-  }
-  return link;
-}
-function decodeNode(bytes) {
-  const l = bytes.length;
-  let index = 0;
-  let links = void 0;
-  let linksBeforeData = false;
-  let data = void 0;
-  while (index < l) {
-    let wireType, fieldNum;
-    [wireType, fieldNum, index] = decodeKey(bytes, index);
-    if (wireType !== 2) {
-      throw new Error(`protobuf: (PBNode) invalid wireType, expected 2, got ${wireType}`);
-    }
-    if (fieldNum === 1) {
-      if (data) {
-        throw new Error("protobuf: (PBNode) duplicate Data section");
-      }
-      [data, index] = decodeBytes(bytes, index);
-      if (links) {
-        linksBeforeData = true;
-      }
-    } else if (fieldNum === 2) {
-      if (linksBeforeData) {
-        throw new Error("protobuf: (PBNode) duplicate Links section");
-      } else if (!links) {
-        links = [];
-      }
-      let byts;
-      [byts, index] = decodeBytes(bytes, index);
-      links.push(decodeLink(byts));
-    } else {
-      throw new Error(`protobuf: (PBNode) invalid fieldNumber, expected 1 or 2, got ${fieldNum}`);
-    }
-  }
-  if (index > l) {
-    throw new Error("protobuf: (PBNode) unexpected end of data");
-  }
-  const node = {};
-  if (data) {
-    node.Data = data;
-  }
-  node.Links = links || [];
-  return node;
-}
-
-// node_modules/@ipld/dag-pb/src/pb-encode.js
-var textEncoder2 = new TextEncoder();
-var maxInt32 = 2 ** 32;
-var maxUInt32 = 2 ** 31;
-function encodeLink(link, bytes) {
-  let i = bytes.length;
-  if (typeof link.Tsize === "number") {
-    if (link.Tsize < 0) {
-      throw new Error("Tsize cannot be negative");
-    }
-    if (!Number.isSafeInteger(link.Tsize)) {
-      throw new Error("Tsize too large for encoding");
-    }
-    i = encodeVarint(bytes, i, link.Tsize) - 1;
-    bytes[i] = 24;
-  }
-  if (typeof link.Name === "string") {
-    const nameBytes = textEncoder2.encode(link.Name);
-    i -= nameBytes.length;
-    bytes.set(nameBytes, i);
-    i = encodeVarint(bytes, i, nameBytes.length) - 1;
-    bytes[i] = 18;
-  }
-  if (link.Hash) {
-    i -= link.Hash.length;
-    bytes.set(link.Hash, i);
-    i = encodeVarint(bytes, i, link.Hash.length) - 1;
-    bytes[i] = 10;
-  }
-  return bytes.length - i;
-}
-function encodeNode(node) {
-  const size = sizeNode(node);
-  const bytes = new Uint8Array(size);
-  let i = size;
-  if (node.Data) {
-    i -= node.Data.length;
-    bytes.set(node.Data, i);
-    i = encodeVarint(bytes, i, node.Data.length) - 1;
-    bytes[i] = 10;
-  }
-  if (node.Links) {
-    for (let index = node.Links.length - 1; index >= 0; index--) {
-      const size2 = encodeLink(node.Links[index], bytes.subarray(0, i));
-      i -= size2;
-      i = encodeVarint(bytes, i, size2) - 1;
-      bytes[i] = 18;
-    }
-  }
-  return bytes;
-}
-function sizeLink(link) {
-  let n = 0;
-  if (link.Hash) {
-    const l = link.Hash.length;
-    n += 1 + l + sov(l);
-  }
-  if (typeof link.Name === "string") {
-    const l = textEncoder2.encode(link.Name).length;
-    n += 1 + l + sov(l);
-  }
-  if (typeof link.Tsize === "number") {
-    n += 1 + sov(link.Tsize);
-  }
-  return n;
-}
-function sizeNode(node) {
-  let n = 0;
-  if (node.Data) {
-    const l = node.Data.length;
-    n += 1 + l + sov(l);
-  }
-  if (node.Links) {
-    for (const link of node.Links) {
-      const l = sizeLink(link);
-      n += 1 + l + sov(l);
-    }
-  }
-  return n;
-}
-function encodeVarint(bytes, offset, v) {
-  offset -= sov(v);
-  const base3 = offset;
-  while (v >= maxUInt32) {
-    bytes[offset++] = v & 127 | 128;
-    v /= 128;
-  }
-  while (v >= 128) {
-    bytes[offset++] = v & 127 | 128;
-    v >>>= 7;
-  }
-  bytes[offset] = v;
-  return base3;
-}
-function sov(x) {
-  if (x % 2 === 0) {
-    x++;
-  }
-  return Math.floor((len64(x) + 6) / 7);
-}
-function len64(x) {
-  let n = 0;
-  if (x >= maxInt32) {
-    x = Math.floor(x / maxInt32);
-    n = 32;
-  }
-  if (x >= 1 << 16) {
-    x >>>= 16;
-    n += 16;
-  }
-  if (x >= 1 << 8) {
-    x >>>= 8;
-    n += 8;
-  }
-  return n + len8tab[x];
-}
-var len8tab = [
-  0,
-  1,
-  2,
-  2,
-  3,
-  3,
-  3,
-  3,
-  4,
-  4,
-  4,
-  4,
-  4,
-  4,
-  4,
-  4,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8
-];
-
-// node_modules/@ipld/dag-pb/src/util.js
-var pbNodeProperties = ["Data", "Links"];
-var pbLinkProperties = ["Hash", "Name", "Tsize"];
-var textEncoder3 = new TextEncoder();
-function linkComparator(a, b) {
-  if (a === b) {
-    return 0;
-  }
-  const abuf = a.Name ? textEncoder3.encode(a.Name) : [];
-  const bbuf = b.Name ? textEncoder3.encode(b.Name) : [];
-  let x = abuf.length;
-  let y = bbuf.length;
-  for (let i = 0, len = Math.min(x, y); i < len; ++i) {
-    if (abuf[i] !== bbuf[i]) {
-      x = abuf[i];
-      y = bbuf[i];
-      break;
-    }
-  }
-  return x < y ? -1 : y < x ? 1 : 0;
-}
-function hasOnlyProperties(node, properties) {
-  return !Object.keys(node).some((p) => !properties.includes(p));
-}
-function asLink(link) {
-  if (typeof link.asCID === "object") {
-    const Hash = CID2.asCID(link);
-    if (!Hash) {
-      throw new TypeError("Invalid DAG-PB form");
-    }
-    return { Hash };
-  }
-  if (typeof link !== "object" || Array.isArray(link)) {
-    throw new TypeError("Invalid DAG-PB form");
-  }
-  const pbl = {};
-  if (link.Hash) {
-    let cid = CID2.asCID(link.Hash);
-    try {
-      if (!cid) {
-        if (typeof link.Hash === "string") {
-          cid = CID2.parse(link.Hash);
-        } else if (link.Hash instanceof Uint8Array) {
-          cid = CID2.decode(link.Hash);
-        }
-      }
-    } catch (e) {
-      throw new TypeError(`Invalid DAG-PB form: ${e.message}`);
-    }
-    if (cid) {
-      pbl.Hash = cid;
-    }
-  }
-  if (!pbl.Hash) {
-    throw new TypeError("Invalid DAG-PB form");
-  }
-  if (typeof link.Name === "string") {
-    pbl.Name = link.Name;
-  }
-  if (typeof link.Tsize === "number") {
-    pbl.Tsize = link.Tsize;
-  }
-  return pbl;
-}
-function prepare(node) {
-  if (node instanceof Uint8Array || typeof node === "string") {
-    node = { Data: node };
-  }
-  if (typeof node !== "object" || Array.isArray(node)) {
-    throw new TypeError("Invalid DAG-PB form");
-  }
-  const pbn = {};
-  if (node.Data !== void 0) {
-    if (typeof node.Data === "string") {
-      pbn.Data = textEncoder3.encode(node.Data);
-    } else if (node.Data instanceof Uint8Array) {
-      pbn.Data = node.Data;
-    } else {
-      throw new TypeError("Invalid DAG-PB form");
-    }
-  }
-  if (node.Links !== void 0) {
-    if (Array.isArray(node.Links)) {
-      pbn.Links = node.Links.map(asLink);
-      pbn.Links.sort(linkComparator);
-    } else {
-      throw new TypeError("Invalid DAG-PB form");
-    }
-  } else {
-    pbn.Links = [];
-  }
-  return pbn;
-}
-function validate(node) {
-  if (!node || typeof node !== "object" || Array.isArray(node) || node instanceof Uint8Array || node["/"] && node["/"] === node.bytes) {
-    throw new TypeError("Invalid DAG-PB form");
-  }
-  if (!hasOnlyProperties(node, pbNodeProperties)) {
-    throw new TypeError("Invalid DAG-PB form (extraneous properties)");
-  }
-  if (node.Data !== void 0 && !(node.Data instanceof Uint8Array)) {
-    throw new TypeError("Invalid DAG-PB form (Data must be bytes)");
-  }
-  if (!Array.isArray(node.Links)) {
-    throw new TypeError("Invalid DAG-PB form (Links must be a list)");
-  }
-  for (let i = 0; i < node.Links.length; i++) {
-    const link = node.Links[i];
-    if (!link || typeof link !== "object" || Array.isArray(link) || link instanceof Uint8Array || link["/"] && link["/"] === link.bytes) {
-      throw new TypeError("Invalid DAG-PB form (bad link)");
-    }
-    if (!hasOnlyProperties(link, pbLinkProperties)) {
-      throw new TypeError("Invalid DAG-PB form (extraneous properties on link)");
-    }
-    if (link.Hash === void 0) {
-      throw new TypeError("Invalid DAG-PB form (link must have a Hash)");
-    }
-    if (link.Hash == null || !link.Hash["/"] || link.Hash["/"] !== link.Hash.bytes) {
-      throw new TypeError("Invalid DAG-PB form (link Hash must be a CID)");
-    }
-    if (link.Name !== void 0 && typeof link.Name !== "string") {
-      throw new TypeError("Invalid DAG-PB form (link Name must be a string)");
-    }
-    if (link.Tsize !== void 0) {
-      if (typeof link.Tsize !== "number" || link.Tsize % 1 !== 0) {
-        throw new TypeError("Invalid DAG-PB form (link Tsize must be an integer)");
-      }
-      if (link.Tsize < 0) {
-        throw new TypeError("Invalid DAG-PB form (link Tsize cannot be negative)");
-      }
-    }
-    if (i > 0 && linkComparator(link, node.Links[i - 1]) === -1) {
-      throw new TypeError("Invalid DAG-PB form (links must be sorted by Name bytes)");
-    }
-  }
-}
-function createNode(data, links = []) {
-  return prepare({ Data: data, Links: links });
-}
-function createLink(name4, size, cid) {
-  return asLink({ Hash: cid, Name: name4, Tsize: size });
-}
-
-// node_modules/@ipld/dag-pb/src/index.js
-var name = "dag-pb";
-var code2 = 112;
-function encode7(node) {
-  validate(node);
-  const pbn = {};
-  if (node.Links) {
-    pbn.Links = node.Links.map((l) => {
-      const link = {};
-      if (l.Hash) {
-        link.Hash = l.Hash.bytes;
-      }
-      if (l.Name !== void 0) {
-        link.Name = l.Name;
-      }
-      if (l.Tsize !== void 0) {
-        link.Tsize = l.Tsize;
-      }
-      return link;
-    });
-  }
-  if (node.Data) {
-    pbn.Data = node.Data;
-  }
-  return encodeNode(pbn);
-}
-function decode11(bytes) {
-  const pbn = decodeNode(bytes);
-  const node = {};
-  if (pbn.Data) {
-    node.Data = pbn.Data;
-  }
-  if (pbn.Links) {
-    node.Links = pbn.Links.map((l) => {
-      const link = {};
-      try {
-        link.Hash = CID2.decode(l.Hash);
-      } catch (e) {
-      }
-      if (!link.Hash) {
-        throw new Error("Invalid Hash field found in link, expected CID");
-      }
-      if (l.Name !== void 0) {
-        link.Name = l.Name;
-      }
-      if (l.Tsize !== void 0) {
-        link.Tsize = l.Tsize;
-      }
-      return link;
-    });
-  }
-  return node;
-}
-
-// node_modules/cborg/lib/json/encode.js
-var JSONEncoder = class extends Array {
-  constructor() {
-    super();
-    this.inRecursive = [];
-  }
-  /**
-   * @param {Bl} buf
-   */
-  prefix(buf2) {
-    const recurs = this.inRecursive[this.inRecursive.length - 1];
-    if (recurs) {
-      if (recurs.type === Type.array) {
-        recurs.elements++;
-        if (recurs.elements !== 1) {
-          buf2.push([44]);
-        }
-      }
-      if (recurs.type === Type.map) {
-        recurs.elements++;
-        if (recurs.elements !== 1) {
-          if (recurs.elements % 2 === 1) {
-            buf2.push([44]);
-          } else {
-            buf2.push([58]);
-          }
-        }
-      }
-    }
-  }
-  /**
-   * @param {Bl} buf
-   * @param {Token} token
-   */
-  [Type.uint.major](buf2, token) {
-    this.prefix(buf2);
-    const is2 = String(token.value);
-    const isa = [];
-    for (let i = 0; i < is2.length; i++) {
-      isa[i] = is2.charCodeAt(i);
-    }
-    buf2.push(isa);
-  }
-  /**
-   * @param {Bl} buf
-   * @param {Token} token
-   */
-  [Type.negint.major](buf2, token) {
-    this[Type.uint.major](buf2, token);
-  }
-  /**
-   * @param {Bl} _buf
-   * @param {Token} _token
-   */
-  [Type.bytes.major](_buf, _token) {
-    throw new Error(`${encodeErrPrefix} unsupported type: Uint8Array`);
-  }
-  /**
-   * @param {Bl} buf
-   * @param {Token} token
-   */
-  [Type.string.major](buf2, token) {
-    this.prefix(buf2);
-    const byts = fromString(JSON.stringify(token.value));
-    buf2.push(byts.length > 32 ? asU8A(byts) : byts);
-  }
-  /**
-   * @param {Bl} buf
-   * @param {Token} _token
-   */
-  [Type.array.major](buf2, _token) {
-    this.prefix(buf2);
-    this.inRecursive.push({ type: Type.array, elements: 0 });
-    buf2.push([91]);
-  }
-  /**
-   * @param {Bl} buf
-   * @param {Token} _token
-   */
-  [Type.map.major](buf2, _token) {
-    this.prefix(buf2);
-    this.inRecursive.push({ type: Type.map, elements: 0 });
-    buf2.push([123]);
-  }
-  /**
-   * @param {Bl} _buf
-   * @param {Token} _token
-   */
-  [Type.tag.major](_buf, _token) {
-  }
-  /**
-   * @param {Bl} buf
-   * @param {Token} token
-   */
-  [Type.float.major](buf2, token) {
-    if (token.type.name === "break") {
-      const recurs = this.inRecursive.pop();
-      if (recurs) {
-        if (recurs.type === Type.array) {
-          buf2.push([93]);
-        } else if (recurs.type === Type.map) {
-          buf2.push([125]);
-        } else {
-          throw new Error("Unexpected recursive type; this should not happen!");
-        }
-        return;
-      }
-      throw new Error("Unexpected break; this should not happen!");
-    }
-    if (token.value === void 0) {
-      throw new Error(`${encodeErrPrefix} unsupported type: undefined`);
-    }
-    this.prefix(buf2);
-    if (token.type.name === "true") {
-      buf2.push([116, 114, 117, 101]);
-      return;
-    } else if (token.type.name === "false") {
-      buf2.push([102, 97, 108, 115, 101]);
-      return;
-    } else if (token.type.name === "null") {
-      buf2.push([110, 117, 108, 108]);
-      return;
-    }
-    const is2 = String(token.value);
-    const isa = [];
-    let dp = false;
-    for (let i = 0; i < is2.length; i++) {
-      isa[i] = is2.charCodeAt(i);
-      if (!dp && (isa[i] === 46 || isa[i] === 101 || isa[i] === 69)) {
-        dp = true;
-      }
-    }
-    if (!dp) {
-      isa.push(46);
-      isa.push(48);
-    }
-    buf2.push(isa);
-  }
-};
-
-// node_modules/cborg/lib/json/decode.js
-var Tokenizer = class {
-  /**
-   * @param {Uint8Array} data
-   * @param {DecodeOptions} options
-   */
-  constructor(data, options = {}) {
-    this._pos = 0;
-    this.data = data;
-    this.options = options;
-    this.modeStack = ["value"];
-    this.lastToken = "";
-  }
-  pos() {
-    return this._pos;
-  }
-  /**
-   * @returns {boolean}
-   */
-  done() {
-    return this._pos >= this.data.length;
-  }
-  /**
-   * @returns {number}
-   */
-  ch() {
-    return this.data[this._pos];
-  }
-  /**
-   * @returns {string}
-   */
-  currentMode() {
-    return this.modeStack[this.modeStack.length - 1];
-  }
-  skipWhitespace() {
-    let c = this.ch();
-    while (c === 32 || c === 9 || c === 13 || c === 10) {
-      c = this.data[++this._pos];
-    }
-  }
-  /**
-   * @param {number[]} str
-   */
-  expect(str) {
-    if (this.data.length - this._pos < str.length) {
-      throw new Error(`${decodeErrPrefix} unexpected end of input at position ${this._pos}`);
-    }
-    for (let i = 0; i < str.length; i++) {
-      if (this.data[this._pos++] !== str[i]) {
-        throw new Error(`${decodeErrPrefix} unexpected token at position ${this._pos}, expected to find '${String.fromCharCode(...str)}'`);
-      }
-    }
-  }
-  parseNumber() {
-    const startPos = this._pos;
-    let negative = false;
-    let float = false;
-    const swallow = (chars) => {
-      while (!this.done()) {
-        const ch = this.ch();
-        if (chars.includes(ch)) {
-          this._pos++;
-        } else {
-          break;
-        }
-      }
-    };
-    if (this.ch() === 45) {
-      negative = true;
-      this._pos++;
-    }
-    if (this.ch() === 48) {
-      this._pos++;
-      if (this.ch() === 46) {
-        this._pos++;
-        float = true;
-      } else {
-        return new Token(Type.uint, 0, this._pos - startPos);
-      }
-    }
-    swallow([48, 49, 50, 51, 52, 53, 54, 55, 56, 57]);
-    if (negative && this._pos === startPos + 1) {
-      throw new Error(`${decodeErrPrefix} unexpected token at position ${this._pos}`);
-    }
-    if (!this.done() && this.ch() === 46) {
-      if (float) {
-        throw new Error(`${decodeErrPrefix} unexpected token at position ${this._pos}`);
-      }
-      float = true;
-      this._pos++;
-      swallow([48, 49, 50, 51, 52, 53, 54, 55, 56, 57]);
-    }
-    if (!this.done() && (this.ch() === 101 || this.ch() === 69)) {
-      float = true;
-      this._pos++;
-      if (!this.done() && (this.ch() === 43 || this.ch() === 45)) {
-        this._pos++;
-      }
-      swallow([48, 49, 50, 51, 52, 53, 54, 55, 56, 57]);
-    }
-    const numStr = String.fromCharCode.apply(null, this.data.subarray(startPos, this._pos));
-    const num = parseFloat(numStr);
-    if (float) {
-      return new Token(Type.float, num, this._pos - startPos);
-    }
-    if (this.options.allowBigInt !== true || Number.isSafeInteger(num)) {
-      return new Token(num >= 0 ? Type.uint : Type.negint, num, this._pos - startPos);
-    }
-    return new Token(num >= 0 ? Type.uint : Type.negint, BigInt(numStr), this._pos - startPos);
-  }
-  /**
-   * @returns {Token}
-   */
-  parseString() {
-    if (this.ch() !== 34) {
-      throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}; this shouldn't happen`);
-    }
-    this._pos++;
-    for (let i = this._pos, l = 0; i < this.data.length && l < 65536; i++, l++) {
-      const ch = this.data[i];
-      if (ch === 92 || ch < 32 || ch >= 128) {
-        break;
-      }
-      if (ch === 34) {
-        const str = String.fromCharCode.apply(null, this.data.subarray(this._pos, i));
-        this._pos = i + 1;
-        return new Token(Type.string, str, l);
-      }
-    }
-    const startPos = this._pos;
-    const chars = [];
-    const readu4 = () => {
-      if (this._pos + 4 >= this.data.length) {
-        throw new Error(`${decodeErrPrefix} unexpected end of unicode escape sequence at position ${this._pos}`);
-      }
-      let u4 = 0;
-      for (let i = 0; i < 4; i++) {
-        let ch = this.ch();
-        if (ch >= 48 && ch <= 57) {
-          ch -= 48;
-        } else if (ch >= 97 && ch <= 102) {
-          ch = ch - 97 + 10;
-        } else if (ch >= 65 && ch <= 70) {
-          ch = ch - 65 + 10;
-        } else {
-          throw new Error(`${decodeErrPrefix} unexpected unicode escape character at position ${this._pos}`);
-        }
-        u4 = u4 * 16 + ch;
-        this._pos++;
-      }
-      return u4;
-    };
-    const readUtf8Char = () => {
-      const firstByte = this.ch();
-      let codePoint = null;
-      let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
-      if (this._pos + bytesPerSequence > this.data.length) {
-        throw new Error(`${decodeErrPrefix} unexpected unicode sequence at position ${this._pos}`);
-      }
-      let secondByte, thirdByte, fourthByte, tempCodePoint;
-      switch (bytesPerSequence) {
-        case 1:
-          if (firstByte < 128) {
-            codePoint = firstByte;
-          }
-          break;
-        case 2:
-          secondByte = this.data[this._pos + 1];
-          if ((secondByte & 192) === 128) {
-            tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
-            if (tempCodePoint > 127) {
-              codePoint = tempCodePoint;
-            }
-          }
-          break;
-        case 3:
-          secondByte = this.data[this._pos + 1];
-          thirdByte = this.data[this._pos + 2];
-          if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
-            tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
-            if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
-              codePoint = tempCodePoint;
-            }
-          }
-          break;
-        case 4:
-          secondByte = this.data[this._pos + 1];
-          thirdByte = this.data[this._pos + 2];
-          fourthByte = this.data[this._pos + 3];
-          if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
-            tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
-            if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
-              codePoint = tempCodePoint;
-            }
-          }
-      }
-      if (codePoint === null) {
-        codePoint = 65533;
-        bytesPerSequence = 1;
-      } else if (codePoint > 65535) {
-        codePoint -= 65536;
-        chars.push(codePoint >>> 10 & 1023 | 55296);
-        codePoint = 56320 | codePoint & 1023;
-      }
-      chars.push(codePoint);
-      this._pos += bytesPerSequence;
-    };
-    while (!this.done()) {
-      const ch = this.ch();
-      let ch1;
-      switch (ch) {
-        case 92:
-          this._pos++;
-          if (this.done()) {
-            throw new Error(`${decodeErrPrefix} unexpected string termination at position ${this._pos}`);
-          }
-          ch1 = this.ch();
-          this._pos++;
-          switch (ch1) {
-            case 34:
-            case 39:
-            case 92:
-            case 47:
-              chars.push(ch1);
-              break;
-            case 98:
-              chars.push(8);
-              break;
-            case 116:
-              chars.push(9);
-              break;
-            case 110:
-              chars.push(10);
-              break;
-            case 102:
-              chars.push(12);
-              break;
-            case 114:
-              chars.push(13);
-              break;
-            case 117:
-              chars.push(readu4());
-              break;
-            default:
-              throw new Error(`${decodeErrPrefix} unexpected string escape character at position ${this._pos}`);
-          }
-          break;
-        case 34:
-          this._pos++;
-          return new Token(Type.string, decodeCodePointsArray(chars), this._pos - startPos);
-        default:
-          if (ch < 32) {
-            throw new Error(`${decodeErrPrefix} invalid control character at position ${this._pos}`);
-          } else if (ch < 128) {
-            chars.push(ch);
-            this._pos++;
-          } else {
-            readUtf8Char();
-          }
-      }
-    }
-    throw new Error(`${decodeErrPrefix} unexpected end of string at position ${this._pos}`);
-  }
-  /**
-   * @returns {Token}
-   */
-  parseValue() {
-    switch (this.ch()) {
-      case 123:
-        this.modeStack.push("obj-start");
-        this._pos++;
-        return new Token(Type.map, Infinity, 1);
-      case 91:
-        this.modeStack.push("array-start");
-        this._pos++;
-        return new Token(Type.array, Infinity, 1);
-      case 34: {
-        return this.parseString();
-      }
-      case 110:
-        this.expect([110, 117, 108, 108]);
-        return new Token(Type.null, null, 4);
-      case 102:
-        this.expect([102, 97, 108, 115, 101]);
-        return new Token(Type.false, false, 5);
-      case 116:
-        this.expect([116, 114, 117, 101]);
-        return new Token(Type.true, true, 4);
-      case 45:
-      case 48:
-      case 49:
-      case 50:
-      case 51:
-      case 52:
-      case 53:
-      case 54:
-      case 55:
-      case 56:
-      case 57:
-        return this.parseNumber();
-      default:
-        throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}`);
-    }
-  }
-  /**
-   * @returns {Token}
-   */
-  next() {
-    this.skipWhitespace();
-    switch (this.currentMode()) {
-      case "value":
-        this.modeStack.pop();
-        return this.parseValue();
-      case "array-value": {
-        this.modeStack.pop();
-        if (this.ch() === 93) {
-          this._pos++;
-          this.skipWhitespace();
-          return new Token(Type.break, void 0, 1);
-        }
-        if (this.ch() !== 44) {
-          throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}, was expecting array delimiter but found '${String.fromCharCode(this.ch())}'`);
-        }
-        this._pos++;
-        this.modeStack.push("array-value");
-        this.skipWhitespace();
-        return this.parseValue();
-      }
-      case "array-start": {
-        this.modeStack.pop();
-        if (this.ch() === 93) {
-          this._pos++;
-          this.skipWhitespace();
-          return new Token(Type.break, void 0, 1);
-        }
-        this.modeStack.push("array-value");
-        this.skipWhitespace();
-        return this.parseValue();
-      }
-      case "obj-key":
-        if (this.ch() === 125) {
-          this.modeStack.pop();
-          this._pos++;
-          this.skipWhitespace();
-          return new Token(Type.break, void 0, 1);
-        }
-        if (this.ch() !== 44) {
-          throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}, was expecting object delimiter but found '${String.fromCharCode(this.ch())}'`);
-        }
-        this._pos++;
-        this.skipWhitespace();
-      case "obj-start": {
-        this.modeStack.pop();
-        if (this.ch() === 125) {
-          this._pos++;
-          this.skipWhitespace();
-          return new Token(Type.break, void 0, 1);
-        }
-        const token = this.parseString();
-        this.skipWhitespace();
-        if (this.ch() !== 58) {
-          throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}, was expecting key/value delimiter ':' but found '${String.fromCharCode(this.ch())}'`);
-        }
-        this._pos++;
-        this.modeStack.push("obj-value");
-        return token;
-      }
-      case "obj-value": {
-        this.modeStack.pop();
-        this.modeStack.push("obj-key");
-        this.skipWhitespace();
-        return this.parseValue();
-      }
-      default:
-        throw new Error(`${decodeErrPrefix} unexpected parse state at position ${this._pos}; this shouldn't happen`);
-    }
-  }
-};
-function decode12(data, options) {
-  options = Object.assign({ tokenizer: new Tokenizer(data, options) }, options);
-  return decode(data, options);
-}
-
-// node_modules/multiformats/src/bases/base64.js
-var base64_exports = {};
-__export(base64_exports, {
-  base64: () => base64,
-  base64pad: () => base64pad,
-  base64url: () => base64url,
-  base64urlpad: () => base64urlpad
-});
-var base64 = rfc46482({
-  prefix: "m",
-  name: "base64",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",
-  bitsPerChar: 6
-});
-var base64pad = rfc46482({
-  prefix: "M",
-  name: "base64pad",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
-  bitsPerChar: 6
-});
-var base64url = rfc46482({
-  prefix: "u",
-  name: "base64url",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_",
-  bitsPerChar: 6
-});
-var base64urlpad = rfc46482({
-  prefix: "U",
-  name: "base64urlpad",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_=",
-  bitsPerChar: 6
-});
-
-// node_modules/multiformats/src/codecs/raw.js
-var raw_exports = {};
-__export(raw_exports, {
-  code: () => code3,
-  decode: () => decode13,
-  encode: () => encode9,
-  name: () => name2
-});
-var name2 = "raw";
-var code3 = 85;
-var encode9 = (node) => coerce2(node);
-var decode13 = (data) => coerce2(data);
-
-// node_modules/@helia/car/dist/src/utils/dag-walkers.js
-var dagPbWalker = {
-  codec: code2,
-  async *walk(block) {
-    const node = decode11(block);
-    yield* node.Links.map((l) => l.Hash);
-  }
-};
-var rawWalker = {
-  codec: code3,
-  async *walk() {
-  }
-};
-var CID_TAG2 = 42;
-var cborWalker = {
-  codec: 113,
-  async *walk(block) {
-    const cids = [];
-    const tags = [];
-    tags[CID_TAG2] = (bytes) => {
-      if (bytes[0] !== 0) {
-        throw new Error("Invalid CID for CBOR tag 42; expected leading 0x00");
-      }
-      const cid = CID2.decode(bytes.subarray(1));
-      cids.push(cid);
-      return cid;
-    };
-    decode(block, {
-      tags
-    });
-    yield* cids;
-  }
-};
-var DagJsonTokenizer = class extends Tokenizer {
-  tokenBuffer;
-  constructor(data, options) {
-    super(data, options);
-    this.tokenBuffer = [];
-  }
-  done() {
-    return this.tokenBuffer.length === 0 && super.done();
-  }
-  _next() {
-    if (this.tokenBuffer.length > 0) {
-      return this.tokenBuffer.pop();
-    }
-    return super.next();
-  }
-  /**
-   * Implements rules outlined in https://github.com/ipld/specs/pull/356
-   */
-  next() {
-    const token = this._next();
-    if (token.type === Type.map) {
-      const keyToken = this._next();
-      if (keyToken.type === Type.string && keyToken.value === "/") {
-        const valueToken = this._next();
-        if (valueToken.type === Type.string) {
-          const breakToken = this._next();
-          if (breakToken.type !== Type.break) {
-            throw new Error("Invalid encoded CID form");
-          }
-          this.tokenBuffer.push(valueToken);
-          return new Token(Type.tag, 42, 0);
-        }
-        if (valueToken.type === Type.map) {
-          const innerKeyToken = this._next();
-          if (innerKeyToken.type === Type.string && innerKeyToken.value === "bytes") {
-            const innerValueToken = this._next();
-            if (innerValueToken.type === Type.string) {
-              for (let i = 0; i < 2; i++) {
-                const breakToken = this._next();
-                if (breakToken.type !== Type.break) {
-                  throw new Error("Invalid encoded Bytes form");
-                }
-              }
-              const bytes = base64.decode(`m${innerValueToken.value}`);
-              return new Token(Type.bytes, bytes, innerValueToken.value.length);
-            }
-            this.tokenBuffer.push(innerValueToken);
-          }
-          this.tokenBuffer.push(innerKeyToken);
-        }
-        this.tokenBuffer.push(valueToken);
-      }
-      this.tokenBuffer.push(keyToken);
-    }
-    return token;
-  }
-};
-var jsonWalker = {
-  codec: 297,
-  async *walk(block) {
-    const cids = [];
-    const tags = [];
-    tags[CID_TAG2] = (string2) => {
-      const cid = CID2.parse(string2);
-      cids.push(cid);
-      return cid;
-    };
-    decode12(block, {
-      tags,
-      tokenizer: new DagJsonTokenizer(block, {
-        tags,
-        allowIndefinite: true,
-        allowUndefined: true,
-        allowNaN: true,
-        allowInfinity: true,
-        allowBigInt: true,
-        strict: false,
-        rejectDuplicateMapKeys: false
-      })
-    });
-    yield* cids;
-  }
-};
-
-// node_modules/@helia/car/dist/src/index.js
-var DEFAULT_DAG_WALKERS = [
-  rawWalker,
-  dagPbWalker,
-  cborWalker,
-  jsonWalker
-];
-var DAG_WALK_QUEUE_CONCURRENCY = 1;
-var DefaultCar = class {
-  components;
-  dagWalkers;
-  constructor(components, init) {
-    this.components = components;
-    this.dagWalkers = {};
-    [...DEFAULT_DAG_WALKERS, ...init.dagWalkers ?? []].forEach((dagWalker) => {
-      this.dagWalkers[dagWalker.codec] = dagWalker;
-    });
-  }
-  async import(reader, options) {
-    await src_default(this.components.blockstore.putMany(src_default3(reader.blocks(), ({ cid, bytes }) => ({ cid, block: bytes })), options));
-  }
-  async export(root, writer, options) {
-    const deferred = pDefer();
-    const roots = Array.isArray(root) ? root : [root];
-    const queue = new dist_default({
-      concurrency: DAG_WALK_QUEUE_CONCURRENCY
-    });
-    queue.on("idle", () => {
-      deferred.resolve();
-    });
-    queue.on("error", (err) => {
-      deferred.resolve(err);
-    });
-    for (const root2 of roots) {
-      void queue.add(async () => {
-        await this.#walkDag(root2, queue, async (cid, bytes) => {
-          await writer.put({ cid, bytes });
-        }, options);
-      });
-    }
-    try {
-      await deferred.promise;
-    } finally {
-      await writer.close();
-    }
-  }
-  /**
-   * Walk the DAG behind the passed CID, ensure all blocks are present in the blockstore
-   * and update the pin count for them
-   */
-  async #walkDag(cid, queue, withBlock, options) {
-    const dagWalker = this.dagWalkers[cid.code];
-    if (dagWalker == null) {
-      throw new Error(`No dag walker found for cid codec ${cid.code}`);
-    }
-    const block = await this.components.blockstore.get(cid, options);
-    await withBlock(cid, block);
-    for await (const cid2 of dagWalker.walk(block)) {
-      void queue.add(async () => {
-        await this.#walkDag(cid2, queue, withBlock, options);
-      });
-    }
-  }
-};
-function car(helia, init = {}) {
-  return new DefaultCar(helia, init);
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/index.js
-var import_err_code4 = __toESM(require_err_code(), 1);
-
-// node_modules/it-first/dist/src/index.js
-function isAsyncIterable3(thing) {
-  return thing[Symbol.asyncIterator] != null;
-}
-function first(source) {
-  if (isAsyncIterable3(source)) {
-    return (async () => {
-      for await (const entry of source) {
-        return entry;
-      }
-      return void 0;
-    })();
-  }
-  for (const entry of source) {
-    return entry;
-  }
-  return void 0;
-}
-var src_default4 = first;
-
-// node_modules/it-batch/dist/src/index.js
-function isAsyncIterable4(thing) {
-  return thing[Symbol.asyncIterator] != null;
-}
-function batch(source, size = 1) {
-  size = Number(size);
-  if (isAsyncIterable4(source)) {
-    return async function* () {
-      let things = [];
-      if (size < 1) {
-        size = 1;
-      }
-      if (size !== Math.round(size)) {
-        throw new Error("Batch size must be an integer");
-      }
-      for await (const thing of source) {
-        things.push(thing);
-        while (things.length >= size) {
-          yield things.slice(0, size);
-          things = things.slice(size);
-        }
-      }
-      while (things.length > 0) {
-        yield things.slice(0, size);
-        things = things.slice(size);
-      }
-    }();
-  }
-  return function* () {
-    let things = [];
-    if (size < 1) {
-      size = 1;
-    }
-    if (size !== Math.round(size)) {
-      throw new Error("Batch size must be an integer");
-    }
-    for (const thing of source) {
-      things.push(thing);
-      while (things.length >= size) {
-        yield things.slice(0, size);
-        things = things.slice(size);
-      }
-    }
-    while (things.length > 0) {
-      yield things.slice(0, size);
-      things = things.slice(size);
-    }
-  }();
-}
-var src_default5 = batch;
-
-// node_modules/it-parallel-batch/dist/src/index.js
-async function* parallelBatch(source, size = 1) {
-  for await (const tasks of src_default5(source, size)) {
-    const things = tasks.map(async (p) => {
-      return p().then((value) => ({ ok: true, value }), (err) => ({ ok: false, err }));
-    });
-    for (let i = 0; i < things.length; i++) {
-      const result = await things[i];
-      if (result.ok) {
-        yield result.value;
-      } else {
-        throw result.err;
-      }
-    }
-  }
-}
-
-// node_modules/uint8arrays/dist/src/util/as-uint8array.js
-function asUint8Array(buf2) {
-  if (globalThis.Buffer != null) {
-    return new Uint8Array(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-  }
-  return buf2;
-}
-
-// node_modules/uint8arrays/dist/src/alloc.js
-function alloc2(size = 0) {
-  var _a;
-  if (((_a = globalThis.Buffer) == null ? void 0 : _a.alloc) != null) {
-    return asUint8Array(globalThis.Buffer.alloc(size));
-  }
-  return new Uint8Array(size);
-}
-function allocUnsafe(size = 0) {
-  var _a;
-  if (((_a = globalThis.Buffer) == null ? void 0 : _a.allocUnsafe) != null) {
-    return asUint8Array(globalThis.Buffer.allocUnsafe(size));
-  }
-  return new Uint8Array(size);
-}
-
-// node_modules/uint8arrays/dist/src/concat.js
-function concat2(arrays, length4) {
-  if (length4 == null) {
-    length4 = arrays.reduce((acc, curr) => acc + curr.length, 0);
-  }
-  const output = allocUnsafe(length4);
-  let offset = 0;
-  for (const arr of arrays) {
-    output.set(arr, offset);
-    offset += arr.length;
-  }
-  return asUint8Array(output);
-}
-
-// node_modules/uint8arrays/dist/src/equals.js
-function equals5(a, b) {
-  if (a === b) {
-    return true;
-  }
-  if (a.byteLength !== b.byteLength) {
-    return false;
-  }
-  for (let i = 0; i < a.byteLength; i++) {
-    if (a[i] !== b[i]) {
-      return false;
-    }
-  }
-  return true;
-}
-
-// node_modules/uint8arraylist/dist/src/index.js
-var symbol = Symbol.for("@achingbrain/uint8arraylist");
-function findBufAndOffset(bufs, index) {
-  if (index == null || index < 0) {
-    throw new RangeError("index is out of bounds");
-  }
-  let offset = 0;
-  for (const buf2 of bufs) {
-    const bufEnd = offset + buf2.byteLength;
-    if (index < bufEnd) {
-      return {
-        buf: buf2,
-        index: index - offset
-      };
-    }
-    offset = bufEnd;
-  }
-  throw new RangeError("index is out of bounds");
-}
-function isUint8ArrayList(value) {
-  return Boolean(value == null ? void 0 : value[symbol]);
-}
-var Uint8ArrayList = class _Uint8ArrayList {
-  bufs;
-  length;
-  [symbol] = true;
-  constructor(...data) {
-    this.bufs = [];
-    this.length = 0;
-    if (data.length > 0) {
-      this.appendAll(data);
-    }
-  }
-  *[Symbol.iterator]() {
-    yield* this.bufs;
-  }
-  get byteLength() {
-    return this.length;
-  }
-  /**
-   * Add one or more `bufs` to the end of this Uint8ArrayList
-   */
-  append(...bufs) {
-    this.appendAll(bufs);
-  }
-  /**
-   * Add all `bufs` to the end of this Uint8ArrayList
-   */
-  appendAll(bufs) {
-    let length4 = 0;
-    for (const buf2 of bufs) {
-      if (buf2 instanceof Uint8Array) {
-        length4 += buf2.byteLength;
-        this.bufs.push(buf2);
-      } else if (isUint8ArrayList(buf2)) {
-        length4 += buf2.byteLength;
-        this.bufs.push(...buf2.bufs);
-      } else {
-        throw new Error("Could not append value, must be an Uint8Array or a Uint8ArrayList");
-      }
-    }
-    this.length += length4;
-  }
-  /**
-   * Add one or more `bufs` to the start of this Uint8ArrayList
-   */
-  prepend(...bufs) {
-    this.prependAll(bufs);
-  }
-  /**
-   * Add all `bufs` to the start of this Uint8ArrayList
-   */
-  prependAll(bufs) {
-    let length4 = 0;
-    for (const buf2 of bufs.reverse()) {
-      if (buf2 instanceof Uint8Array) {
-        length4 += buf2.byteLength;
-        this.bufs.unshift(buf2);
-      } else if (isUint8ArrayList(buf2)) {
-        length4 += buf2.byteLength;
-        this.bufs.unshift(...buf2.bufs);
-      } else {
-        throw new Error("Could not prepend value, must be an Uint8Array or a Uint8ArrayList");
-      }
-    }
-    this.length += length4;
-  }
-  /**
-   * Read the value at `index`
-   */
-  get(index) {
-    const res = findBufAndOffset(this.bufs, index);
-    return res.buf[res.index];
-  }
-  /**
-   * Set the value at `index` to `value`
-   */
-  set(index, value) {
-    const res = findBufAndOffset(this.bufs, index);
-    res.buf[res.index] = value;
-  }
-  /**
-   * Copy bytes from `buf` to the index specified by `offset`
-   */
-  write(buf2, offset = 0) {
-    if (buf2 instanceof Uint8Array) {
-      for (let i = 0; i < buf2.length; i++) {
-        this.set(offset + i, buf2[i]);
-      }
-    } else if (isUint8ArrayList(buf2)) {
-      for (let i = 0; i < buf2.length; i++) {
-        this.set(offset + i, buf2.get(i));
-      }
-    } else {
-      throw new Error("Could not write value, must be an Uint8Array or a Uint8ArrayList");
-    }
-  }
-  /**
-   * Remove bytes from the front of the pool
-   */
-  consume(bytes) {
-    bytes = Math.trunc(bytes);
-    if (Number.isNaN(bytes) || bytes <= 0) {
-      return;
-    }
-    if (bytes === this.byteLength) {
-      this.bufs = [];
-      this.length = 0;
-      return;
-    }
-    while (this.bufs.length > 0) {
-      if (bytes >= this.bufs[0].byteLength) {
-        bytes -= this.bufs[0].byteLength;
-        this.length -= this.bufs[0].byteLength;
-        this.bufs.shift();
-      } else {
-        this.bufs[0] = this.bufs[0].subarray(bytes);
-        this.length -= bytes;
-        break;
-      }
-    }
-  }
-  /**
-   * Extracts a section of an array and returns a new array.
-   *
-   * This is a copy operation as it is with Uint8Arrays and Arrays
-   * - note this is different to the behaviour of Node Buffers.
-   */
-  slice(beginInclusive, endExclusive) {
-    const { bufs, length: length4 } = this._subList(beginInclusive, endExclusive);
-    return concat2(bufs, length4);
-  }
-  /**
-   * Returns a alloc from the given start and end element index.
-   *
-   * In the best case where the data extracted comes from a single Uint8Array
-   * internally this is a no-copy operation otherwise it is a copy operation.
-   */
-  subarray(beginInclusive, endExclusive) {
-    const { bufs, length: length4 } = this._subList(beginInclusive, endExclusive);
-    if (bufs.length === 1) {
-      return bufs[0];
-    }
-    return concat2(bufs, length4);
-  }
-  /**
-   * Returns a allocList from the given start and end element index.
-   *
-   * This is a no-copy operation.
-   */
-  sublist(beginInclusive, endExclusive) {
-    const { bufs, length: length4 } = this._subList(beginInclusive, endExclusive);
-    const list = new _Uint8ArrayList();
-    list.length = length4;
-    list.bufs = [...bufs];
-    return list;
-  }
-  _subList(beginInclusive, endExclusive) {
-    beginInclusive = beginInclusive ?? 0;
-    endExclusive = endExclusive ?? this.length;
-    if (beginInclusive < 0) {
-      beginInclusive = this.length + beginInclusive;
-    }
-    if (endExclusive < 0) {
-      endExclusive = this.length + endExclusive;
-    }
-    if (beginInclusive < 0 || endExclusive > this.length) {
-      throw new RangeError("index is out of bounds");
-    }
-    if (beginInclusive === endExclusive) {
-      return { bufs: [], length: 0 };
-    }
-    if (beginInclusive === 0 && endExclusive === this.length) {
-      return { bufs: this.bufs, length: this.length };
-    }
-    const bufs = [];
-    let offset = 0;
-    for (let i = 0; i < this.bufs.length; i++) {
-      const buf2 = this.bufs[i];
-      const bufStart = offset;
-      const bufEnd = bufStart + buf2.byteLength;
-      offset = bufEnd;
-      if (beginInclusive >= bufEnd) {
-        continue;
-      }
-      const sliceStartInBuf = beginInclusive >= bufStart && beginInclusive < bufEnd;
-      const sliceEndsInBuf = endExclusive > bufStart && endExclusive <= bufEnd;
-      if (sliceStartInBuf && sliceEndsInBuf) {
-        if (beginInclusive === bufStart && endExclusive === bufEnd) {
-          bufs.push(buf2);
-          break;
-        }
-        const start = beginInclusive - bufStart;
-        bufs.push(buf2.subarray(start, start + (endExclusive - beginInclusive)));
-        break;
-      }
-      if (sliceStartInBuf) {
-        if (beginInclusive === 0) {
-          bufs.push(buf2);
-          continue;
-        }
-        bufs.push(buf2.subarray(beginInclusive - bufStart));
-        continue;
-      }
-      if (sliceEndsInBuf) {
-        if (endExclusive === bufEnd) {
-          bufs.push(buf2);
-          break;
-        }
-        bufs.push(buf2.subarray(0, endExclusive - bufStart));
-        break;
-      }
-      bufs.push(buf2);
-    }
-    return { bufs, length: endExclusive - beginInclusive };
-  }
-  indexOf(search, offset = 0) {
-    if (!isUint8ArrayList(search) && !(search instanceof Uint8Array)) {
-      throw new TypeError('The "value" argument must be a Uint8ArrayList or Uint8Array');
-    }
-    const needle = search instanceof Uint8Array ? search : search.subarray();
-    offset = Number(offset ?? 0);
-    if (isNaN(offset)) {
-      offset = 0;
-    }
-    if (offset < 0) {
-      offset = this.length + offset;
-    }
-    if (offset < 0) {
-      offset = 0;
-    }
-    if (search.length === 0) {
-      return offset > this.length ? this.length : offset;
-    }
-    const M = needle.byteLength;
-    if (M === 0) {
-      throw new TypeError("search must be at least 1 byte long");
-    }
-    const radix = 256;
-    const rightmostPositions = new Int32Array(radix);
-    for (let c = 0; c < radix; c++) {
-      rightmostPositions[c] = -1;
-    }
-    for (let j = 0; j < M; j++) {
-      rightmostPositions[needle[j]] = j;
-    }
-    const right = rightmostPositions;
-    const lastIndex = this.byteLength - needle.byteLength;
-    const lastPatIndex = needle.byteLength - 1;
-    let skip;
-    for (let i = offset; i <= lastIndex; i += skip) {
-      skip = 0;
-      for (let j = lastPatIndex; j >= 0; j--) {
-        const char = this.get(i + j);
-        if (needle[j] !== char) {
-          skip = Math.max(1, j - right[char]);
-          break;
-        }
-      }
-      if (skip === 0) {
-        return i;
-      }
-    }
-    return -1;
-  }
-  getInt8(byteOffset) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 1);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getInt8(0);
-  }
-  setInt8(byteOffset, value) {
-    const buf2 = allocUnsafe(1);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setInt8(0, value);
-    this.write(buf2, byteOffset);
-  }
-  getInt16(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 2);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getInt16(0, littleEndian);
-  }
-  setInt16(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(2);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setInt16(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  getInt32(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 4);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getInt32(0, littleEndian);
-  }
-  setInt32(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(4);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setInt32(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  getBigInt64(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 8);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getBigInt64(0, littleEndian);
-  }
-  setBigInt64(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(8);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setBigInt64(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  getUint8(byteOffset) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 1);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getUint8(0);
-  }
-  setUint8(byteOffset, value) {
-    const buf2 = allocUnsafe(1);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setUint8(0, value);
-    this.write(buf2, byteOffset);
-  }
-  getUint16(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 2);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getUint16(0, littleEndian);
-  }
-  setUint16(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(2);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setUint16(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  getUint32(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 4);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getUint32(0, littleEndian);
-  }
-  setUint32(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(4);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setUint32(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  getBigUint64(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 8);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getBigUint64(0, littleEndian);
-  }
-  setBigUint64(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(8);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setBigUint64(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  getFloat32(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 4);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getFloat32(0, littleEndian);
-  }
-  setFloat32(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(4);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setFloat32(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  getFloat64(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 8);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getFloat64(0, littleEndian);
-  }
-  setFloat64(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(8);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setFloat64(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  equals(other) {
-    if (other == null) {
-      return false;
-    }
-    if (!(other instanceof _Uint8ArrayList)) {
-      return false;
-    }
-    if (other.bufs.length !== this.bufs.length) {
-      return false;
-    }
-    for (let i = 0; i < this.bufs.length; i++) {
-      if (!equals5(this.bufs[i], other.bufs[i])) {
-        return false;
-      }
-    }
-    return true;
-  }
-  /**
-   * Create a Uint8ArrayList from a pre-existing list of Uint8Arrays.  Use this
-   * method if you know the total size of all the Uint8Arrays ahead of time.
-   */
-  static fromUint8Arrays(bufs, length4) {
-    const list = new _Uint8ArrayList();
-    list.bufs = bufs;
-    if (length4 == null) {
-      length4 = bufs.reduce((acc, curr) => acc + curr.byteLength, 0);
-    }
-    list.length = length4;
-    return list;
-  }
-};
-
-// node_modules/ipfs-unixfs-importer/dist/src/chunker/fixed-size.js
-var DEFAULT_CHUNK_SIZE = 262144;
-var fixedSize = (options = {}) => {
-  const chunkSize = options.chunkSize ?? DEFAULT_CHUNK_SIZE;
-  return async function* fixedSizeChunker(source) {
-    let list = new Uint8ArrayList();
-    let currentLength = 0;
-    let emitted = false;
-    for await (const buffer2 of source) {
-      list.append(buffer2);
-      currentLength += buffer2.length;
-      while (currentLength >= chunkSize) {
-        yield list.slice(0, chunkSize);
-        emitted = true;
-        if (chunkSize === list.length) {
-          list = new Uint8ArrayList();
-          currentLength = 0;
-        } else {
-          const newBl = new Uint8ArrayList();
-          newBl.append(list.sublist(chunkSize));
-          list = newBl;
-          currentLength -= chunkSize;
-        }
-      }
-    }
-    if (!emitted || currentLength > 0) {
-      yield list.subarray(0, currentLength);
-    }
-  };
-};
-
-// node_modules/ipfs-unixfs/dist/src/index.js
-var import_err_code = __toESM(require_err_code(), 1);
-
-// node_modules/protons-runtime/dist/src/utils/float.js
-var f32 = new Float32Array([-0]);
-var f8b = new Uint8Array(f32.buffer);
-function writeFloatLE(val, buf2, pos) {
-  f32[0] = val;
-  buf2[pos] = f8b[0];
-  buf2[pos + 1] = f8b[1];
-  buf2[pos + 2] = f8b[2];
-  buf2[pos + 3] = f8b[3];
-}
-function readFloatLE(buf2, pos) {
-  f8b[0] = buf2[pos];
-  f8b[1] = buf2[pos + 1];
-  f8b[2] = buf2[pos + 2];
-  f8b[3] = buf2[pos + 3];
-  return f32[0];
-}
-var f64 = new Float64Array([-0]);
-var d8b = new Uint8Array(f64.buffer);
-function writeDoubleLE(val, buf2, pos) {
-  f64[0] = val;
-  buf2[pos] = d8b[0];
-  buf2[pos + 1] = d8b[1];
-  buf2[pos + 2] = d8b[2];
-  buf2[pos + 3] = d8b[3];
-  buf2[pos + 4] = d8b[4];
-  buf2[pos + 5] = d8b[5];
-  buf2[pos + 6] = d8b[6];
-  buf2[pos + 7] = d8b[7];
-}
-function readDoubleLE(buf2, pos) {
-  d8b[0] = buf2[pos];
-  d8b[1] = buf2[pos + 1];
-  d8b[2] = buf2[pos + 2];
-  d8b[3] = buf2[pos + 3];
-  d8b[4] = buf2[pos + 4];
-  d8b[5] = buf2[pos + 5];
-  d8b[6] = buf2[pos + 6];
-  d8b[7] = buf2[pos + 7];
-  return f64[0];
-}
-
-// node_modules/protons-runtime/dist/src/utils/longbits.js
-var MAX_SAFE_NUMBER_INTEGER = BigInt(Number.MAX_SAFE_INTEGER);
-var MIN_SAFE_NUMBER_INTEGER = BigInt(Number.MIN_SAFE_INTEGER);
-var LongBits = class _LongBits {
-  lo;
-  hi;
-  constructor(lo, hi) {
-    this.lo = lo | 0;
-    this.hi = hi | 0;
-  }
-  /**
-   * Converts this long bits to a possibly unsafe JavaScript number
-   */
-  toNumber(unsigned = false) {
-    if (!unsigned && this.hi >>> 31 > 0) {
-      const lo = ~this.lo + 1 >>> 0;
-      let hi = ~this.hi >>> 0;
-      if (lo === 0) {
-        hi = hi + 1 >>> 0;
-      }
-      return -(lo + hi * 4294967296);
-    }
-    return this.lo + this.hi * 4294967296;
-  }
-  /**
-   * Converts this long bits to a bigint
-   */
-  toBigInt(unsigned = false) {
-    if (unsigned) {
-      return BigInt(this.lo >>> 0) + (BigInt(this.hi >>> 0) << 32n);
-    }
-    if (this.hi >>> 31 !== 0) {
-      const lo = ~this.lo + 1 >>> 0;
-      let hi = ~this.hi >>> 0;
-      if (lo === 0) {
-        hi = hi + 1 >>> 0;
-      }
-      return -(BigInt(lo) + (BigInt(hi) << 32n));
-    }
-    return BigInt(this.lo >>> 0) + (BigInt(this.hi >>> 0) << 32n);
-  }
-  /**
-   * Converts this long bits to a string
-   */
-  toString(unsigned = false) {
-    return this.toBigInt(unsigned).toString();
-  }
-  /**
-   * Zig-zag encodes this long bits
-   */
-  zzEncode() {
-    const mask = this.hi >> 31;
-    this.hi = ((this.hi << 1 | this.lo >>> 31) ^ mask) >>> 0;
-    this.lo = (this.lo << 1 ^ mask) >>> 0;
-    return this;
-  }
-  /**
-   * Zig-zag decodes this long bits
-   */
-  zzDecode() {
-    const mask = -(this.lo & 1);
-    this.lo = ((this.lo >>> 1 | this.hi << 31) ^ mask) >>> 0;
-    this.hi = (this.hi >>> 1 ^ mask) >>> 0;
-    return this;
-  }
-  /**
-   * Calculates the length of this longbits when encoded as a varint.
-   */
-  length() {
-    const part0 = this.lo;
-    const part1 = (this.lo >>> 28 | this.hi << 4) >>> 0;
-    const part2 = this.hi >>> 24;
-    return part2 === 0 ? part1 === 0 ? part0 < 16384 ? part0 < 128 ? 1 : 2 : part0 < 2097152 ? 3 : 4 : part1 < 16384 ? part1 < 128 ? 5 : 6 : part1 < 2097152 ? 7 : 8 : part2 < 128 ? 9 : 10;
-  }
-  /**
-   * Constructs new long bits from the specified number
-   */
-  static fromBigInt(value) {
-    if (value === 0n) {
-      return zero;
-    }
-    if (value < MAX_SAFE_NUMBER_INTEGER && value > MIN_SAFE_NUMBER_INTEGER) {
-      return this.fromNumber(Number(value));
-    }
-    const negative = value < 0n;
-    if (negative) {
-      value = -value;
-    }
-    let hi = value >> 32n;
-    let lo = value - (hi << 32n);
-    if (negative) {
-      hi = ~hi | 0n;
-      lo = ~lo | 0n;
-      if (++lo > TWO_32) {
-        lo = 0n;
-        if (++hi > TWO_32) {
-          hi = 0n;
-        }
-      }
-    }
-    return new _LongBits(Number(lo), Number(hi));
-  }
-  /**
-   * Constructs new long bits from the specified number
-   */
-  static fromNumber(value) {
-    if (value === 0) {
-      return zero;
-    }
-    const sign = value < 0;
-    if (sign) {
-      value = -value;
-    }
-    let lo = value >>> 0;
-    let hi = (value - lo) / 4294967296 >>> 0;
-    if (sign) {
-      hi = ~hi >>> 0;
-      lo = ~lo >>> 0;
-      if (++lo > 4294967295) {
-        lo = 0;
-        if (++hi > 4294967295) {
-          hi = 0;
-        }
-      }
-    }
-    return new _LongBits(lo, hi);
-  }
-  /**
-   * Constructs new long bits from a number, long or string
-   */
-  static from(value) {
-    if (typeof value === "number") {
-      return _LongBits.fromNumber(value);
-    }
-    if (typeof value === "bigint") {
-      return _LongBits.fromBigInt(value);
-    }
-    if (typeof value === "string") {
-      return _LongBits.fromBigInt(BigInt(value));
-    }
-    return value.low != null || value.high != null ? new _LongBits(value.low >>> 0, value.high >>> 0) : zero;
-  }
-};
-var zero = new LongBits(0, 0);
-zero.toBigInt = function() {
-  return 0n;
-};
-zero.zzEncode = zero.zzDecode = function() {
-  return this;
-};
-zero.length = function() {
-  return 1;
-};
-var TWO_32 = 4294967296n;
-
-// node_modules/protons-runtime/dist/src/utils/utf8.js
-function length3(string2) {
-  let len = 0;
-  let c = 0;
-  for (let i = 0; i < string2.length; ++i) {
-    c = string2.charCodeAt(i);
-    if (c < 128) {
-      len += 1;
-    } else if (c < 2048) {
-      len += 2;
-    } else if ((c & 64512) === 55296 && (string2.charCodeAt(i + 1) & 64512) === 56320) {
-      ++i;
-      len += 4;
-    } else {
-      len += 3;
-    }
-  }
-  return len;
-}
-function read3(buffer2, start, end) {
-  const len = end - start;
-  if (len < 1) {
-    return "";
-  }
-  let parts;
-  const chunk = [];
-  let i = 0;
-  let t;
-  while (start < end) {
-    t = buffer2[start++];
-    if (t < 128) {
-      chunk[i++] = t;
-    } else if (t > 191 && t < 224) {
-      chunk[i++] = (t & 31) << 6 | buffer2[start++] & 63;
-    } else if (t > 239 && t < 365) {
-      t = ((t & 7) << 18 | (buffer2[start++] & 63) << 12 | (buffer2[start++] & 63) << 6 | buffer2[start++] & 63) - 65536;
-      chunk[i++] = 55296 + (t >> 10);
-      chunk[i++] = 56320 + (t & 1023);
-    } else {
-      chunk[i++] = (t & 15) << 12 | (buffer2[start++] & 63) << 6 | buffer2[start++] & 63;
-    }
-    if (i > 8191) {
-      (parts ?? (parts = [])).push(String.fromCharCode.apply(String, chunk));
-      i = 0;
-    }
-  }
-  if (parts != null) {
-    if (i > 0) {
-      parts.push(String.fromCharCode.apply(String, chunk.slice(0, i)));
-    }
-    return parts.join("");
-  }
-  return String.fromCharCode.apply(String, chunk.slice(0, i));
-}
-function write(string2, buffer2, offset) {
-  const start = offset;
-  let c1;
-  let c2;
-  for (let i = 0; i < string2.length; ++i) {
-    c1 = string2.charCodeAt(i);
-    if (c1 < 128) {
-      buffer2[offset++] = c1;
-    } else if (c1 < 2048) {
-      buffer2[offset++] = c1 >> 6 | 192;
-      buffer2[offset++] = c1 & 63 | 128;
-    } else if ((c1 & 64512) === 55296 && ((c2 = string2.charCodeAt(i + 1)) & 64512) === 56320) {
-      c1 = 65536 + ((c1 & 1023) << 10) + (c2 & 1023);
-      ++i;
-      buffer2[offset++] = c1 >> 18 | 240;
-      buffer2[offset++] = c1 >> 12 & 63 | 128;
-      buffer2[offset++] = c1 >> 6 & 63 | 128;
-      buffer2[offset++] = c1 & 63 | 128;
-    } else {
-      buffer2[offset++] = c1 >> 12 | 224;
-      buffer2[offset++] = c1 >> 6 & 63 | 128;
-      buffer2[offset++] = c1 & 63 | 128;
-    }
-  }
-  return offset - start;
-}
-
-// node_modules/protons-runtime/dist/src/utils/reader.js
-function indexOutOfRange(reader, writeLength) {
-  return RangeError(`index out of range: ${reader.pos} + ${writeLength ?? 1} > ${reader.len}`);
-}
-function readFixed32End(buf2, end) {
-  return (buf2[end - 4] | buf2[end - 3] << 8 | buf2[end - 2] << 16 | buf2[end - 1] << 24) >>> 0;
-}
-var Uint8ArrayReader = class {
-  buf;
-  pos;
-  len;
-  _slice = Uint8Array.prototype.subarray;
-  constructor(buffer2) {
-    this.buf = buffer2;
-    this.pos = 0;
-    this.len = buffer2.length;
-  }
-  /**
-   * Reads a varint as an unsigned 32 bit value
-   */
-  uint32() {
-    let value = 4294967295;
-    value = (this.buf[this.pos] & 127) >>> 0;
-    if (this.buf[this.pos++] < 128)
-      return value;
-    value = (value | (this.buf[this.pos] & 127) << 7) >>> 0;
-    if (this.buf[this.pos++] < 128)
-      return value;
-    value = (value | (this.buf[this.pos] & 127) << 14) >>> 0;
-    if (this.buf[this.pos++] < 128)
-      return value;
-    value = (value | (this.buf[this.pos] & 127) << 21) >>> 0;
-    if (this.buf[this.pos++] < 128)
-      return value;
-    value = (value | (this.buf[this.pos] & 15) << 28) >>> 0;
-    if (this.buf[this.pos++] < 128)
-      return value;
-    if ((this.pos += 5) > this.len) {
-      this.pos = this.len;
-      throw indexOutOfRange(this, 10);
-    }
-    return value;
-  }
-  /**
-   * Reads a varint as a signed 32 bit value
-   */
-  int32() {
-    return this.uint32() | 0;
-  }
-  /**
-   * Reads a zig-zag encoded varint as a signed 32 bit value
-   */
-  sint32() {
-    const value = this.uint32();
-    return value >>> 1 ^ -(value & 1) | 0;
-  }
-  /**
-   * Reads a varint as a boolean
-   */
-  bool() {
-    return this.uint32() !== 0;
-  }
-  /**
-   * Reads fixed 32 bits as an unsigned 32 bit integer
-   */
-  fixed32() {
-    if (this.pos + 4 > this.len) {
-      throw indexOutOfRange(this, 4);
-    }
-    const res = readFixed32End(this.buf, this.pos += 4);
-    return res;
-  }
-  /**
-   * Reads fixed 32 bits as a signed 32 bit integer
-   */
-  sfixed32() {
-    if (this.pos + 4 > this.len) {
-      throw indexOutOfRange(this, 4);
-    }
-    const res = readFixed32End(this.buf, this.pos += 4) | 0;
-    return res;
-  }
-  /**
-   * Reads a float (32 bit) as a number
-   */
-  float() {
-    if (this.pos + 4 > this.len) {
-      throw indexOutOfRange(this, 4);
-    }
-    const value = readFloatLE(this.buf, this.pos);
-    this.pos += 4;
-    return value;
-  }
-  /**
-   * Reads a double (64 bit float) as a number
-   */
-  double() {
-    if (this.pos + 8 > this.len) {
-      throw indexOutOfRange(this, 4);
-    }
-    const value = readDoubleLE(this.buf, this.pos);
-    this.pos += 8;
-    return value;
-  }
-  /**
-   * Reads a sequence of bytes preceded by its length as a varint
-   */
-  bytes() {
-    const length4 = this.uint32();
-    const start = this.pos;
-    const end = this.pos + length4;
-    if (end > this.len) {
-      throw indexOutOfRange(this, length4);
-    }
-    this.pos += length4;
-    return start === end ? new Uint8Array(0) : this.buf.subarray(start, end);
-  }
-  /**
-   * Reads a string preceded by its byte length as a varint
-   */
-  string() {
-    const bytes = this.bytes();
-    return read3(bytes, 0, bytes.length);
-  }
-  /**
-   * Skips the specified number of bytes if specified, otherwise skips a varint
-   */
-  skip(length4) {
-    if (typeof length4 === "number") {
-      if (this.pos + length4 > this.len) {
-        throw indexOutOfRange(this, length4);
-      }
-      this.pos += length4;
-    } else {
-      do {
-        if (this.pos >= this.len) {
-          throw indexOutOfRange(this);
-        }
-      } while ((this.buf[this.pos++] & 128) !== 0);
-    }
-    return this;
-  }
-  /**
-   * Skips the next element of the specified wire type
-   */
-  skipType(wireType) {
-    switch (wireType) {
-      case 0:
-        this.skip();
-        break;
-      case 1:
-        this.skip(8);
-        break;
-      case 2:
-        this.skip(this.uint32());
-        break;
-      case 3:
-        while ((wireType = this.uint32() & 7) !== 4) {
-          this.skipType(wireType);
-        }
-        break;
-      case 5:
-        this.skip(4);
-        break;
-      default:
-        throw Error(`invalid wire type ${wireType} at offset ${this.pos}`);
-    }
-    return this;
-  }
-  readLongVarint() {
-    const bits = new LongBits(0, 0);
-    let i = 0;
-    if (this.len - this.pos > 4) {
-      for (; i < 4; ++i) {
-        bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;
-        if (this.buf[this.pos++] < 128) {
-          return bits;
-        }
-      }
-      bits.lo = (bits.lo | (this.buf[this.pos] & 127) << 28) >>> 0;
-      bits.hi = (bits.hi | (this.buf[this.pos] & 127) >> 4) >>> 0;
-      if (this.buf[this.pos++] < 128) {
-        return bits;
-      }
-      i = 0;
-    } else {
-      for (; i < 3; ++i) {
-        if (this.pos >= this.len) {
-          throw indexOutOfRange(this);
-        }
-        bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;
-        if (this.buf[this.pos++] < 128) {
-          return bits;
-        }
-      }
-      bits.lo = (bits.lo | (this.buf[this.pos++] & 127) << i * 7) >>> 0;
-      return bits;
-    }
-    if (this.len - this.pos > 4) {
-      for (; i < 5; ++i) {
-        bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;
-        if (this.buf[this.pos++] < 128) {
-          return bits;
-        }
-      }
-    } else {
-      for (; i < 5; ++i) {
-        if (this.pos >= this.len) {
-          throw indexOutOfRange(this);
-        }
-        bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;
-        if (this.buf[this.pos++] < 128) {
-          return bits;
-        }
-      }
-    }
-    throw Error("invalid varint encoding");
-  }
-  readFixed64() {
-    if (this.pos + 8 > this.len) {
-      throw indexOutOfRange(this, 8);
-    }
-    const lo = readFixed32End(this.buf, this.pos += 4);
-    const hi = readFixed32End(this.buf, this.pos += 4);
-    return new LongBits(lo, hi);
-  }
-  /**
-   * Reads a varint as a signed 64 bit value
-   */
-  int64() {
-    return this.readLongVarint().toBigInt();
-  }
-  /**
-   * Reads a varint as a signed 64 bit value returned as a possibly unsafe
-   * JavaScript number
-   */
-  int64Number() {
-    return this.readLongVarint().toNumber();
-  }
-  /**
-   * Reads a varint as a signed 64 bit value returned as a string
-   */
-  int64String() {
-    return this.readLongVarint().toString();
-  }
-  /**
-   * Reads a varint as an unsigned 64 bit value
-   */
-  uint64() {
-    return this.readLongVarint().toBigInt(true);
-  }
-  /**
-   * Reads a varint as an unsigned 64 bit value returned as a possibly unsafe
-   * JavaScript number
-   */
-  uint64Number() {
-    return this.readLongVarint().toNumber(true);
-  }
-  /**
-   * Reads a varint as an unsigned 64 bit value returned as a string
-   */
-  uint64String() {
-    return this.readLongVarint().toString(true);
-  }
-  /**
-   * Reads a zig-zag encoded varint as a signed 64 bit value
-   */
-  sint64() {
-    return this.readLongVarint().zzDecode().toBigInt();
-  }
-  /**
-   * Reads a zig-zag encoded varint as a signed 64 bit value returned as a
-   * possibly unsafe JavaScript number
-   */
-  sint64Number() {
-    return this.readLongVarint().zzDecode().toNumber();
-  }
-  /**
-   * Reads a zig-zag encoded varint as a signed 64 bit value returned as a
-   * string
-   */
-  sint64String() {
-    return this.readLongVarint().zzDecode().toString();
-  }
-  /**
-   * Reads fixed 64 bits
-   */
-  fixed64() {
-    return this.readFixed64().toBigInt();
-  }
-  /**
-   * Reads fixed 64 bits returned as a possibly unsafe JavaScript number
-   */
-  fixed64Number() {
-    return this.readFixed64().toNumber();
-  }
-  /**
-   * Reads fixed 64 bits returned as a string
-   */
-  fixed64String() {
-    return this.readFixed64().toString();
-  }
-  /**
-   * Reads zig-zag encoded fixed 64 bits
-   */
-  sfixed64() {
-    return this.readFixed64().toBigInt();
-  }
-  /**
-   * Reads zig-zag encoded fixed 64 bits returned as a possibly unsafe
-   * JavaScript number
-   */
-  sfixed64Number() {
-    return this.readFixed64().toNumber();
-  }
-  /**
-   * Reads zig-zag encoded fixed 64 bits returned as a string
-   */
-  sfixed64String() {
-    return this.readFixed64().toString();
-  }
-};
-function createReader(buf2) {
-  return new Uint8ArrayReader(buf2 instanceof Uint8Array ? buf2 : buf2.subarray());
-}
-
-// node_modules/protons-runtime/dist/src/decode.js
-function decodeMessage(buf2, codec) {
-  const reader = createReader(buf2);
-  return codec.decode(reader);
-}
-
-// node_modules/multiformats/src/bases/base10.js
-var base10_exports = {};
-__export(base10_exports, {
-  base10: () => base10
-});
-var base10 = baseX2({
-  prefix: "9",
-  name: "base10",
-  alphabet: "0123456789"
-});
-
-// node_modules/multiformats/src/bases/base16.js
-var base16_exports = {};
-__export(base16_exports, {
-  base16: () => base16,
-  base16upper: () => base16upper
-});
-var base16 = rfc46482({
-  prefix: "f",
-  name: "base16",
-  alphabet: "0123456789abcdef",
-  bitsPerChar: 4
-});
-var base16upper = rfc46482({
-  prefix: "F",
-  name: "base16upper",
-  alphabet: "0123456789ABCDEF",
-  bitsPerChar: 4
-});
-
-// node_modules/multiformats/src/bases/base2.js
-var base2_exports = {};
-__export(base2_exports, {
-  base2: () => base22
-});
-var base22 = rfc46482({
-  prefix: "0",
-  name: "base2",
-  alphabet: "01",
-  bitsPerChar: 1
-});
-
-// node_modules/multiformats/src/bases/base256emoji.js
-var base256emoji_exports = {};
-__export(base256emoji_exports, {
-  base256emoji: () => base256emoji
-});
-var alphabet = Array.from("\u{1F680}\u{1FA90}\u2604\u{1F6F0}\u{1F30C}\u{1F311}\u{1F312}\u{1F313}\u{1F314}\u{1F315}\u{1F316}\u{1F317}\u{1F318}\u{1F30D}\u{1F30F}\u{1F30E}\u{1F409}\u2600\u{1F4BB}\u{1F5A5}\u{1F4BE}\u{1F4BF}\u{1F602}\u2764\u{1F60D}\u{1F923}\u{1F60A}\u{1F64F}\u{1F495}\u{1F62D}\u{1F618}\u{1F44D}\u{1F605}\u{1F44F}\u{1F601}\u{1F525}\u{1F970}\u{1F494}\u{1F496}\u{1F499}\u{1F622}\u{1F914}\u{1F606}\u{1F644}\u{1F4AA}\u{1F609}\u263A\u{1F44C}\u{1F917}\u{1F49C}\u{1F614}\u{1F60E}\u{1F607}\u{1F339}\u{1F926}\u{1F389}\u{1F49E}\u270C\u2728\u{1F937}\u{1F631}\u{1F60C}\u{1F338}\u{1F64C}\u{1F60B}\u{1F497}\u{1F49A}\u{1F60F}\u{1F49B}\u{1F642}\u{1F493}\u{1F929}\u{1F604}\u{1F600}\u{1F5A4}\u{1F603}\u{1F4AF}\u{1F648}\u{1F447}\u{1F3B6}\u{1F612}\u{1F92D}\u2763\u{1F61C}\u{1F48B}\u{1F440}\u{1F62A}\u{1F611}\u{1F4A5}\u{1F64B}\u{1F61E}\u{1F629}\u{1F621}\u{1F92A}\u{1F44A}\u{1F973}\u{1F625}\u{1F924}\u{1F449}\u{1F483}\u{1F633}\u270B\u{1F61A}\u{1F61D}\u{1F634}\u{1F31F}\u{1F62C}\u{1F643}\u{1F340}\u{1F337}\u{1F63B}\u{1F613}\u2B50\u2705\u{1F97A}\u{1F308}\u{1F608}\u{1F918}\u{1F4A6}\u2714\u{1F623}\u{1F3C3}\u{1F490}\u2639\u{1F38A}\u{1F498}\u{1F620}\u261D\u{1F615}\u{1F33A}\u{1F382}\u{1F33B}\u{1F610}\u{1F595}\u{1F49D}\u{1F64A}\u{1F639}\u{1F5E3}\u{1F4AB}\u{1F480}\u{1F451}\u{1F3B5}\u{1F91E}\u{1F61B}\u{1F534}\u{1F624}\u{1F33C}\u{1F62B}\u26BD\u{1F919}\u2615\u{1F3C6}\u{1F92B}\u{1F448}\u{1F62E}\u{1F646}\u{1F37B}\u{1F343}\u{1F436}\u{1F481}\u{1F632}\u{1F33F}\u{1F9E1}\u{1F381}\u26A1\u{1F31E}\u{1F388}\u274C\u270A\u{1F44B}\u{1F630}\u{1F928}\u{1F636}\u{1F91D}\u{1F6B6}\u{1F4B0}\u{1F353}\u{1F4A2}\u{1F91F}\u{1F641}\u{1F6A8}\u{1F4A8}\u{1F92C}\u2708\u{1F380}\u{1F37A}\u{1F913}\u{1F619}\u{1F49F}\u{1F331}\u{1F616}\u{1F476}\u{1F974}\u25B6\u27A1\u2753\u{1F48E}\u{1F4B8}\u2B07\u{1F628}\u{1F31A}\u{1F98B}\u{1F637}\u{1F57A}\u26A0\u{1F645}\u{1F61F}\u{1F635}\u{1F44E}\u{1F932}\u{1F920}\u{1F927}\u{1F4CC}\u{1F535}\u{1F485}\u{1F9D0}\u{1F43E}\u{1F352}\u{1F617}\u{1F911}\u{1F30A}\u{1F92F}\u{1F437}\u260E\u{1F4A7}\u{1F62F}\u{1F486}\u{1F446}\u{1F3A4}\u{1F647}\u{1F351}\u2744\u{1F334}\u{1F4A3}\u{1F438}\u{1F48C}\u{1F4CD}\u{1F940}\u{1F922}\u{1F445}\u{1F4A1}\u{1F4A9}\u{1F450}\u{1F4F8}\u{1F47B}\u{1F910}\u{1F92E}\u{1F3BC}\u{1F975}\u{1F6A9}\u{1F34E}\u{1F34A}\u{1F47C}\u{1F48D}\u{1F4E3}\u{1F942}");
-var alphabetBytesToChars = (
-  /** @type {string[]} */
-  alphabet.reduce(
-    (p, c, i) => {
-      p[i] = c;
-      return p;
-    },
-    /** @type {string[]} */
-    []
-  )
-);
-var alphabetCharsToBytes = (
-  /** @type {number[]} */
-  alphabet.reduce(
-    (p, c, i) => {
-      p[
-        /** @type {number} */
-        c.codePointAt(0)
-      ] = i;
-      return p;
-    },
-    /** @type {number[]} */
-    []
-  )
-);
-function encode10(data) {
-  return data.reduce((p, c) => {
-    p += alphabetBytesToChars[c];
-    return p;
-  }, "");
-}
-function decode14(str) {
-  const byts = [];
-  for (const char of str) {
-    const byt = alphabetCharsToBytes[
-      /** @type {number} */
-      char.codePointAt(0)
-    ];
-    if (byt === void 0) {
-      throw new Error(`Non-base256emoji character: ${char}`);
-    }
-    byts.push(byt);
-  }
-  return new Uint8Array(byts);
-}
-var base256emoji = from2({
-  prefix: "\u{1F680}",
-  name: "base256emoji",
-  encode: encode10,
-  decode: decode14
-});
-
-// node_modules/multiformats/src/bases/base36.js
-var base36_exports = {};
-__export(base36_exports, {
-  base36: () => base36,
-  base36upper: () => base36upper
-});
-var base36 = baseX2({
-  prefix: "k",
-  name: "base36",
-  alphabet: "0123456789abcdefghijklmnopqrstuvwxyz"
-});
-var base36upper = baseX2({
-  prefix: "K",
-  name: "base36upper",
-  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
-});
-
-// node_modules/multiformats/src/bases/base8.js
-var base8_exports = {};
-__export(base8_exports, {
-  base8: () => base8
-});
-var base8 = rfc46482({
-  prefix: "7",
-  name: "base8",
-  alphabet: "01234567",
-  bitsPerChar: 3
-});
-
-// node_modules/multiformats/src/bases/identity.js
-var identity_exports = {};
-__export(identity_exports, {
-  identity: () => identity
-});
-var identity = from2({
-  prefix: "\0",
-  name: "identity",
-  encode: (buf2) => toString2(buf2),
-  decode: (str) => fromString2(str)
-});
-
-// node_modules/multiformats/src/codecs/json.js
-var textEncoder4 = new TextEncoder();
-var textDecoder3 = new TextDecoder();
-
-// node_modules/multiformats/src/hashes/identity.js
-var identity_exports2 = {};
-__export(identity_exports2, {
-  identity: () => identity2
-});
-var code4 = 0;
-var name3 = "identity";
-var encode11 = coerce2;
-var digest = (input) => create2(code4, encode11(input));
-var identity2 = { code: code4, name: name3, encode: encode11, digest };
-
-// node_modules/multiformats/src/hashes/sha2.js
-var sha2_exports = {};
-__export(sha2_exports, {
-  sha256: () => sha256,
-  sha512: () => sha512
-});
-var import_crypto = __toESM(require("crypto"), 1);
-
-// node_modules/multiformats/src/hashes/hasher.js
-var from3 = ({ name: name4, code: code5, encode: encode12 }) => new Hasher(name4, code5, encode12);
-var Hasher = class {
-  /**
-   *
-   * @param {Name} name
-   * @param {Code} code
-   * @param {(input: Uint8Array) => Await} encode
-   */
-  constructor(name4, code5, encode12) {
-    this.name = name4;
-    this.code = code5;
-    this.encode = encode12;
-  }
-  /**
-   * @param {Uint8Array} input
-   * @returns {Await>}
-   */
-  digest(input) {
-    if (input instanceof Uint8Array) {
-      const result = this.encode(input);
-      return result instanceof Uint8Array ? create2(this.code, result) : result.then((digest2) => create2(this.code, digest2));
-    } else {
-      throw Error("Unknown type, must be binary type");
-    }
-  }
-};
-
-// node_modules/multiformats/src/hashes/sha2.js
-var sha256 = from3({
-  name: "sha2-256",
-  code: 18,
-  encode: (input) => coerce2(import_crypto.default.createHash("sha256").update(input).digest())
-});
-var sha512 = from3({
-  name: "sha2-512",
-  code: 19,
-  encode: (input) => coerce2(import_crypto.default.createHash("sha512").update(input).digest())
-});
-
-// node_modules/multiformats/src/basics.js
-var bases = { ...identity_exports, ...base2_exports, ...base8_exports, ...base10_exports, ...base16_exports, ...base32_exports, ...base36_exports, ...base58_exports, ...base64_exports, ...base256emoji_exports };
-var hashes = { ...sha2_exports, ...identity_exports2 };
-
-// node_modules/uint8arrays/dist/src/util/bases.js
-function createCodec(name4, prefix, encode12, decode15) {
-  return {
-    name: name4,
-    prefix,
-    encoder: {
-      name: name4,
-      prefix,
-      encode: encode12
-    },
-    decoder: {
-      decode: decode15
-    }
-  };
-}
-var string = createCodec("utf8", "u", (buf2) => {
-  const decoder = new TextDecoder("utf8");
-  return "u" + decoder.decode(buf2);
-}, (str) => {
-  const encoder = new TextEncoder();
-  return encoder.encode(str.substring(1));
-});
-var ascii = createCodec("ascii", "a", (buf2) => {
-  let string2 = "a";
-  for (let i = 0; i < buf2.length; i++) {
-    string2 += String.fromCharCode(buf2[i]);
-  }
-  return string2;
-}, (str) => {
-  str = str.substring(1);
-  const buf2 = allocUnsafe(str.length);
-  for (let i = 0; i < str.length; i++) {
-    buf2[i] = str.charCodeAt(i);
-  }
-  return buf2;
-});
-var BASES = {
-  utf8: string,
-  "utf-8": string,
-  hex: bases.base16,
-  latin1: ascii,
-  ascii,
-  binary: ascii,
-  ...bases
-};
-var bases_default = BASES;
-
-// node_modules/uint8arrays/dist/src/from-string.js
-function fromString3(string2, encoding = "utf8") {
-  const base3 = bases_default[encoding];
-  if (base3 == null) {
-    throw new Error(`Unsupported encoding "${encoding}"`);
-  }
-  if ((encoding === "utf8" || encoding === "utf-8") && globalThis.Buffer != null && globalThis.Buffer.from != null) {
-    return asUint8Array(globalThis.Buffer.from(string2, "utf-8"));
-  }
-  return base3.decoder.decode(`${base3.prefix}${string2}`);
-}
-
-// node_modules/protons-runtime/dist/src/utils/pool.js
-function pool(size) {
-  const SIZE = size ?? 8192;
-  const MAX = SIZE >>> 1;
-  let slab;
-  let offset = SIZE;
-  return function poolAlloc(size2) {
-    if (size2 < 1 || size2 > MAX) {
-      return allocUnsafe(size2);
-    }
-    if (offset + size2 > SIZE) {
-      slab = allocUnsafe(SIZE);
-      offset = 0;
-    }
-    const buf2 = slab.subarray(offset, offset += size2);
-    if ((offset & 7) !== 0) {
-      offset = (offset | 7) + 1;
-    }
-    return buf2;
-  };
-}
-
-// node_modules/protons-runtime/dist/src/utils/writer.js
-var Op = class {
-  /**
-   * Function to call
-   */
-  fn;
-  /**
-   * Value byte length
-   */
-  len;
-  /**
-   * Next operation
-   */
-  next;
-  /**
-   * Value to write
-   */
-  val;
-  constructor(fn, len, val) {
-    this.fn = fn;
-    this.len = len;
-    this.next = void 0;
-    this.val = val;
-  }
-};
-function noop2() {
-}
-var State = class {
-  /**
-   * Current head
-   */
-  head;
-  /**
-   * Current tail
-   */
-  tail;
-  /**
-   * Current buffer length
-   */
-  len;
-  /**
-   * Next state
-   */
-  next;
-  constructor(writer) {
-    this.head = writer.head;
-    this.tail = writer.tail;
-    this.len = writer.len;
-    this.next = writer.states;
-  }
-};
-var bufferPool = pool();
-function alloc3(size) {
-  if (globalThis.Buffer != null) {
-    return allocUnsafe(size);
-  }
-  return bufferPool(size);
-}
-var Uint8ArrayWriter = class {
-  /**
-   * Current length
-   */
-  len;
-  /**
-   * Operations head
-   */
-  head;
-  /**
-   * Operations tail
-   */
-  tail;
-  /**
-   * Linked forked states
-   */
-  states;
-  constructor() {
-    this.len = 0;
-    this.head = new Op(noop2, 0, 0);
-    this.tail = this.head;
-    this.states = null;
-  }
-  /**
-   * Pushes a new operation to the queue
-   */
-  _push(fn, len, val) {
-    this.tail = this.tail.next = new Op(fn, len, val);
-    this.len += len;
-    return this;
-  }
-  /**
-   * Writes an unsigned 32 bit value as a varint
-   */
-  uint32(value) {
-    this.len += (this.tail = this.tail.next = new VarintOp((value = value >>> 0) < 128 ? 1 : value < 16384 ? 2 : value < 2097152 ? 3 : value < 268435456 ? 4 : 5, value)).len;
-    return this;
-  }
-  /**
-   * Writes a signed 32 bit value as a varint`
-   */
-  int32(value) {
-    return value < 0 ? this._push(writeVarint64, 10, LongBits.fromNumber(value)) : this.uint32(value);
-  }
-  /**
-   * Writes a 32 bit value as a varint, zig-zag encoded
-   */
-  sint32(value) {
-    return this.uint32((value << 1 ^ value >> 31) >>> 0);
-  }
-  /**
-   * Writes an unsigned 64 bit value as a varint
-   */
-  uint64(value) {
-    const bits = LongBits.fromBigInt(value);
-    return this._push(writeVarint64, bits.length(), bits);
-  }
-  /**
-   * Writes an unsigned 64 bit value as a varint
-   */
-  uint64Number(value) {
-    const bits = LongBits.fromNumber(value);
-    return this._push(writeVarint64, bits.length(), bits);
-  }
-  /**
-   * Writes an unsigned 64 bit value as a varint
-   */
-  uint64String(value) {
-    return this.uint64(BigInt(value));
-  }
-  /**
-   * Writes a signed 64 bit value as a varint
-   */
-  int64(value) {
-    return this.uint64(value);
-  }
-  /**
-   * Writes a signed 64 bit value as a varint
-   */
-  int64Number(value) {
-    return this.uint64Number(value);
-  }
-  /**
-   * Writes a signed 64 bit value as a varint
-   */
-  int64String(value) {
-    return this.uint64String(value);
-  }
-  /**
-   * Writes a signed 64 bit value as a varint, zig-zag encoded
-   */
-  sint64(value) {
-    const bits = LongBits.fromBigInt(value).zzEncode();
-    return this._push(writeVarint64, bits.length(), bits);
-  }
-  /**
-   * Writes a signed 64 bit value as a varint, zig-zag encoded
-   */
-  sint64Number(value) {
-    const bits = LongBits.fromNumber(value).zzEncode();
-    return this._push(writeVarint64, bits.length(), bits);
-  }
-  /**
-   * Writes a signed 64 bit value as a varint, zig-zag encoded
-   */
-  sint64String(value) {
-    return this.sint64(BigInt(value));
-  }
-  /**
-   * Writes a boolish value as a varint
-   */
-  bool(value) {
-    return this._push(writeByte, 1, value ? 1 : 0);
-  }
-  /**
-   * Writes an unsigned 32 bit value as fixed 32 bits
-   */
-  fixed32(value) {
-    return this._push(writeFixed32, 4, value >>> 0);
-  }
-  /**
-   * Writes a signed 32 bit value as fixed 32 bits
-   */
-  sfixed32(value) {
-    return this.fixed32(value);
-  }
-  /**
-   * Writes an unsigned 64 bit value as fixed 64 bits
-   */
-  fixed64(value) {
-    const bits = LongBits.fromBigInt(value);
-    return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi);
-  }
-  /**
-   * Writes an unsigned 64 bit value as fixed 64 bits
-   */
-  fixed64Number(value) {
-    const bits = LongBits.fromNumber(value);
-    return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi);
-  }
-  /**
-   * Writes an unsigned 64 bit value as fixed 64 bits
-   */
-  fixed64String(value) {
-    return this.fixed64(BigInt(value));
-  }
-  /**
-   * Writes a signed 64 bit value as fixed 64 bits
-   */
-  sfixed64(value) {
-    return this.fixed64(value);
-  }
-  /**
-   * Writes a signed 64 bit value as fixed 64 bits
-   */
-  sfixed64Number(value) {
-    return this.fixed64Number(value);
-  }
-  /**
-   * Writes a signed 64 bit value as fixed 64 bits
-   */
-  sfixed64String(value) {
-    return this.fixed64String(value);
-  }
-  /**
-   * Writes a float (32 bit)
-   */
-  float(value) {
-    return this._push(writeFloatLE, 4, value);
-  }
-  /**
-   * Writes a double (64 bit float).
-   *
-   * @function
-   * @param {number} value - Value to write
-   * @returns {Writer} `this`
-   */
-  double(value) {
-    return this._push(writeDoubleLE, 8, value);
-  }
-  /**
-   * Writes a sequence of bytes
-   */
-  bytes(value) {
-    const len = value.length >>> 0;
-    if (len === 0) {
-      return this._push(writeByte, 1, 0);
-    }
-    return this.uint32(len)._push(writeBytes, len, value);
-  }
-  /**
-   * Writes a string
-   */
-  string(value) {
-    const len = length3(value);
-    return len !== 0 ? this.uint32(len)._push(write, len, value) : this._push(writeByte, 1, 0);
-  }
-  /**
-   * Forks this writer's state by pushing it to a stack.
-   * Calling {@link Writer#reset|reset} or {@link Writer#ldelim|ldelim} resets the writer to the previous state.
-   */
-  fork() {
-    this.states = new State(this);
-    this.head = this.tail = new Op(noop2, 0, 0);
-    this.len = 0;
-    return this;
-  }
-  /**
-   * Resets this instance to the last state
-   */
-  reset() {
-    if (this.states != null) {
-      this.head = this.states.head;
-      this.tail = this.states.tail;
-      this.len = this.states.len;
-      this.states = this.states.next;
-    } else {
-      this.head = this.tail = new Op(noop2, 0, 0);
-      this.len = 0;
-    }
-    return this;
-  }
-  /**
-   * Resets to the last state and appends the fork state's current write length as a varint followed by its operations.
-   */
-  ldelim() {
-    const head = this.head;
-    const tail = this.tail;
-    const len = this.len;
-    this.reset().uint32(len);
-    if (len !== 0) {
-      this.tail.next = head.next;
-      this.tail = tail;
-      this.len += len;
-    }
-    return this;
-  }
-  /**
-   * Finishes the write operation
-   */
-  finish() {
-    let head = this.head.next;
-    const buf2 = alloc3(this.len);
-    let pos = 0;
-    while (head != null) {
-      head.fn(head.val, buf2, pos);
-      pos += head.len;
-      head = head.next;
-    }
-    return buf2;
-  }
-};
-function writeByte(val, buf2, pos) {
-  buf2[pos] = val & 255;
-}
-function writeVarint32(val, buf2, pos) {
-  while (val > 127) {
-    buf2[pos++] = val & 127 | 128;
-    val >>>= 7;
-  }
-  buf2[pos] = val;
-}
-var VarintOp = class extends Op {
-  next;
-  constructor(len, val) {
-    super(writeVarint32, len, val);
-    this.next = void 0;
-  }
-};
-function writeVarint64(val, buf2, pos) {
-  while (val.hi !== 0) {
-    buf2[pos++] = val.lo & 127 | 128;
-    val.lo = (val.lo >>> 7 | val.hi << 25) >>> 0;
-    val.hi >>>= 7;
-  }
-  while (val.lo > 127) {
-    buf2[pos++] = val.lo & 127 | 128;
-    val.lo = val.lo >>> 7;
-  }
-  buf2[pos++] = val.lo;
-}
-function writeFixed32(val, buf2, pos) {
-  buf2[pos] = val & 255;
-  buf2[pos + 1] = val >>> 8 & 255;
-  buf2[pos + 2] = val >>> 16 & 255;
-  buf2[pos + 3] = val >>> 24;
-}
-function writeBytes(val, buf2, pos) {
-  buf2.set(val, pos);
-}
-if (globalThis.Buffer != null) {
-  Uint8ArrayWriter.prototype.bytes = function(value) {
-    const len = value.length >>> 0;
-    this.uint32(len);
-    if (len > 0) {
-      this._push(writeBytesBuffer, len, value);
-    }
-    return this;
-  };
-  Uint8ArrayWriter.prototype.string = function(value) {
-    const len = globalThis.Buffer.byteLength(value);
-    this.uint32(len);
-    if (len > 0) {
-      this._push(writeStringBuffer, len, value);
-    }
-    return this;
-  };
-}
-function writeBytesBuffer(val, buf2, pos) {
-  buf2.set(val, pos);
-}
-function writeStringBuffer(val, buf2, pos) {
-  if (val.length < 40) {
-    write(val, buf2, pos);
-  } else if (buf2.utf8Write != null) {
-    buf2.utf8Write(val, pos);
-  } else {
-    buf2.set(fromString3(val), pos);
-  }
-}
-function createWriter() {
-  return new Uint8ArrayWriter();
-}
-
-// node_modules/protons-runtime/dist/src/encode.js
-function encodeMessage(message2, codec) {
-  const w = createWriter();
-  codec.encode(message2, w, {
-    lengthDelimited: false
-  });
-  return w.finish();
-}
-
-// node_modules/protons-runtime/dist/src/codec.js
-var CODEC_TYPES;
-(function(CODEC_TYPES2) {
-  CODEC_TYPES2[CODEC_TYPES2["VARINT"] = 0] = "VARINT";
-  CODEC_TYPES2[CODEC_TYPES2["BIT64"] = 1] = "BIT64";
-  CODEC_TYPES2[CODEC_TYPES2["LENGTH_DELIMITED"] = 2] = "LENGTH_DELIMITED";
-  CODEC_TYPES2[CODEC_TYPES2["START_GROUP"] = 3] = "START_GROUP";
-  CODEC_TYPES2[CODEC_TYPES2["END_GROUP"] = 4] = "END_GROUP";
-  CODEC_TYPES2[CODEC_TYPES2["BIT32"] = 5] = "BIT32";
-})(CODEC_TYPES || (CODEC_TYPES = {}));
-function createCodec2(name4, type, encode12, decode15) {
-  return {
-    name: name4,
-    type,
-    encode: encode12,
-    decode: decode15
-  };
-}
-
-// node_modules/protons-runtime/dist/src/codecs/enum.js
-function enumeration(v) {
-  function findValue(val) {
-    if (v[val.toString()] == null) {
-      throw new Error("Invalid enum value");
-    }
-    return v[val];
-  }
-  const encode12 = function enumEncode(val, writer) {
-    const enumValue = findValue(val);
-    writer.int32(enumValue);
-  };
-  const decode15 = function enumDecode(reader) {
-    const val = reader.int32();
-    return findValue(val);
-  };
-  return createCodec2("enum", CODEC_TYPES.VARINT, encode12, decode15);
-}
-
-// node_modules/protons-runtime/dist/src/codecs/message.js
-function message(encode12, decode15) {
-  return createCodec2("message", CODEC_TYPES.LENGTH_DELIMITED, encode12, decode15);
-}
-
-// node_modules/ipfs-unixfs/dist/src/unixfs.js
-var Data;
-(function(Data2) {
-  let DataType;
-  (function(DataType2) {
-    DataType2["Raw"] = "Raw";
-    DataType2["Directory"] = "Directory";
-    DataType2["File"] = "File";
-    DataType2["Metadata"] = "Metadata";
-    DataType2["Symlink"] = "Symlink";
-    DataType2["HAMTShard"] = "HAMTShard";
-  })(DataType = Data2.DataType || (Data2.DataType = {}));
-  let __DataTypeValues;
-  (function(__DataTypeValues2) {
-    __DataTypeValues2[__DataTypeValues2["Raw"] = 0] = "Raw";
-    __DataTypeValues2[__DataTypeValues2["Directory"] = 1] = "Directory";
-    __DataTypeValues2[__DataTypeValues2["File"] = 2] = "File";
-    __DataTypeValues2[__DataTypeValues2["Metadata"] = 3] = "Metadata";
-    __DataTypeValues2[__DataTypeValues2["Symlink"] = 4] = "Symlink";
-    __DataTypeValues2[__DataTypeValues2["HAMTShard"] = 5] = "HAMTShard";
-  })(__DataTypeValues || (__DataTypeValues = {}));
-  (function(DataType2) {
-    DataType2.codec = () => {
-      return enumeration(__DataTypeValues);
-    };
-  })(DataType = Data2.DataType || (Data2.DataType = {}));
-  let _codec;
-  Data2.codec = () => {
-    if (_codec == null) {
-      _codec = message((obj, w, opts = {}) => {
-        if (opts.lengthDelimited !== false) {
-          w.fork();
-        }
-        if (obj.Type != null) {
-          w.uint32(8);
-          Data2.DataType.codec().encode(obj.Type, w);
-        }
-        if (obj.Data != null) {
-          w.uint32(18);
-          w.bytes(obj.Data);
-        }
-        if (obj.filesize != null) {
-          w.uint32(24);
-          w.uint64(obj.filesize);
-        }
-        if (obj.blocksizes != null) {
-          for (const value of obj.blocksizes) {
-            w.uint32(32);
-            w.uint64(value);
-          }
-        }
-        if (obj.hashType != null) {
-          w.uint32(40);
-          w.uint64(obj.hashType);
-        }
-        if (obj.fanout != null) {
-          w.uint32(48);
-          w.uint64(obj.fanout);
-        }
-        if (obj.mode != null) {
-          w.uint32(56);
-          w.uint32(obj.mode);
-        }
-        if (obj.mtime != null) {
-          w.uint32(66);
-          UnixTime.codec().encode(obj.mtime, w);
-        }
-        if (opts.lengthDelimited !== false) {
-          w.ldelim();
-        }
-      }, (reader, length4) => {
-        const obj = {
-          blocksizes: []
-        };
-        const end = length4 == null ? reader.len : reader.pos + length4;
-        while (reader.pos < end) {
-          const tag = reader.uint32();
-          switch (tag >>> 3) {
-            case 1:
-              obj.Type = Data2.DataType.codec().decode(reader);
-              break;
-            case 2:
-              obj.Data = reader.bytes();
-              break;
-            case 3:
-              obj.filesize = reader.uint64();
-              break;
-            case 4:
-              obj.blocksizes.push(reader.uint64());
-              break;
-            case 5:
-              obj.hashType = reader.uint64();
-              break;
-            case 6:
-              obj.fanout = reader.uint64();
-              break;
-            case 7:
-              obj.mode = reader.uint32();
-              break;
-            case 8:
-              obj.mtime = UnixTime.codec().decode(reader, reader.uint32());
-              break;
-            default:
-              reader.skipType(tag & 7);
-              break;
-          }
-        }
-        return obj;
-      });
-    }
-    return _codec;
-  };
-  Data2.encode = (obj) => {
-    return encodeMessage(obj, Data2.codec());
-  };
-  Data2.decode = (buf2) => {
-    return decodeMessage(buf2, Data2.codec());
-  };
-})(Data || (Data = {}));
-var UnixTime;
-(function(UnixTime2) {
-  let _codec;
-  UnixTime2.codec = () => {
-    if (_codec == null) {
-      _codec = message((obj, w, opts = {}) => {
-        if (opts.lengthDelimited !== false) {
-          w.fork();
-        }
-        if (obj.Seconds != null) {
-          w.uint32(8);
-          w.int64(obj.Seconds);
-        }
-        if (obj.FractionalNanoseconds != null) {
-          w.uint32(21);
-          w.fixed32(obj.FractionalNanoseconds);
-        }
-        if (opts.lengthDelimited !== false) {
-          w.ldelim();
-        }
-      }, (reader, length4) => {
-        const obj = {};
-        const end = length4 == null ? reader.len : reader.pos + length4;
-        while (reader.pos < end) {
-          const tag = reader.uint32();
-          switch (tag >>> 3) {
-            case 1:
-              obj.Seconds = reader.int64();
-              break;
-            case 2:
-              obj.FractionalNanoseconds = reader.fixed32();
-              break;
-            default:
-              reader.skipType(tag & 7);
-              break;
-          }
-        }
-        return obj;
-      });
-    }
-    return _codec;
-  };
-  UnixTime2.encode = (obj) => {
-    return encodeMessage(obj, UnixTime2.codec());
-  };
-  UnixTime2.decode = (buf2) => {
-    return decodeMessage(buf2, UnixTime2.codec());
-  };
-})(UnixTime || (UnixTime = {}));
-var Metadata;
-(function(Metadata2) {
-  let _codec;
-  Metadata2.codec = () => {
-    if (_codec == null) {
-      _codec = message((obj, w, opts = {}) => {
-        if (opts.lengthDelimited !== false) {
-          w.fork();
-        }
-        if (obj.MimeType != null) {
-          w.uint32(10);
-          w.string(obj.MimeType);
-        }
-        if (opts.lengthDelimited !== false) {
-          w.ldelim();
-        }
-      }, (reader, length4) => {
-        const obj = {};
-        const end = length4 == null ? reader.len : reader.pos + length4;
-        while (reader.pos < end) {
-          const tag = reader.uint32();
-          switch (tag >>> 3) {
-            case 1:
-              obj.MimeType = reader.string();
-              break;
-            default:
-              reader.skipType(tag & 7);
-              break;
-          }
-        }
-        return obj;
-      });
-    }
-    return _codec;
-  };
-  Metadata2.encode = (obj) => {
-    return encodeMessage(obj, Metadata2.codec());
-  };
-  Metadata2.decode = (buf2) => {
-    return decodeMessage(buf2, Metadata2.codec());
-  };
-})(Metadata || (Metadata = {}));
-
-// node_modules/ipfs-unixfs/dist/src/index.js
-var types = {
-  Raw: "raw",
-  Directory: "directory",
-  File: "file",
-  Metadata: "metadata",
-  Symlink: "symlink",
-  HAMTShard: "hamt-sharded-directory"
-};
-var dirTypes = [
-  "directory",
-  "hamt-sharded-directory"
-];
-var DEFAULT_FILE_MODE = parseInt("0644", 8);
-var DEFAULT_DIRECTORY_MODE = parseInt("0755", 8);
-var UnixFS = class _UnixFS {
-  /**
-   * Decode from protobuf https://github.com/ipfs/specs/blob/master/UNIXFS.md
-   */
-  static unmarshal(marshaled) {
-    const message2 = Data.decode(marshaled);
-    const data = new _UnixFS({
-      type: types[message2.Type != null ? message2.Type.toString() : "File"],
-      data: message2.Data,
-      blockSizes: message2.blocksizes,
-      mode: message2.mode,
-      mtime: message2.mtime != null ? {
-        secs: message2.mtime.Seconds ?? 0n,
-        nsecs: message2.mtime.FractionalNanoseconds
-      } : void 0,
-      fanout: message2.fanout
-    });
-    data._originalMode = message2.mode ?? 0;
-    return data;
-  }
-  type;
-  data;
-  blockSizes;
-  hashType;
-  fanout;
-  mtime;
-  _mode;
-  _originalMode;
-  constructor(options = {
-    type: "file"
-  }) {
-    const { type, data, blockSizes, hashType, fanout, mtime, mode } = options;
-    if (type != null && !Object.values(types).includes(type)) {
-      throw (0, import_err_code.default)(new Error("Type: " + type + " is not valid"), "ERR_INVALID_TYPE");
-    }
-    this.type = type ?? "file";
-    this.data = data;
-    this.hashType = hashType;
-    this.fanout = fanout;
-    this.blockSizes = blockSizes ?? [];
-    this._originalMode = 0;
-    this.mode = mode;
-    this.mtime = mtime;
-  }
-  set mode(mode) {
-    if (mode == null) {
-      this._mode = this.isDirectory() ? DEFAULT_DIRECTORY_MODE : DEFAULT_FILE_MODE;
-    } else {
-      this._mode = mode & 4095;
-    }
-  }
-  get mode() {
-    return this._mode;
-  }
-  isDirectory() {
-    return dirTypes.includes(this.type);
-  }
-  addBlockSize(size) {
-    this.blockSizes.push(size);
-  }
-  removeBlockSize(index) {
-    this.blockSizes.splice(index, 1);
-  }
-  /**
-   * Returns `0n` for directories or `data.length + sum(blockSizes)` for everything else
-   */
-  fileSize() {
-    if (this.isDirectory()) {
-      return 0n;
-    }
-    let sum = 0n;
-    this.blockSizes.forEach((size) => {
-      sum += size;
-    });
-    if (this.data != null) {
-      sum += BigInt(this.data.length);
-    }
-    return sum;
-  }
-  /**
-   * encode to protobuf Uint8Array
-   */
-  marshal() {
-    let type;
-    switch (this.type) {
-      case "raw":
-        type = Data.DataType.Raw;
-        break;
-      case "directory":
-        type = Data.DataType.Directory;
-        break;
-      case "file":
-        type = Data.DataType.File;
-        break;
-      case "metadata":
-        type = Data.DataType.Metadata;
-        break;
-      case "symlink":
-        type = Data.DataType.Symlink;
-        break;
-      case "hamt-sharded-directory":
-        type = Data.DataType.HAMTShard;
-        break;
-      default:
-        throw (0, import_err_code.default)(new Error(`Type: ${type} is not valid`), "ERR_INVALID_TYPE");
-    }
-    let data = this.data;
-    if (this.data == null || this.data.length === 0) {
-      data = void 0;
-    }
-    let mode;
-    if (this.mode != null) {
-      mode = this._originalMode & 4294963200 | (this.mode ?? 0);
-      if (mode === DEFAULT_FILE_MODE && !this.isDirectory()) {
-        mode = void 0;
-      }
-      if (mode === DEFAULT_DIRECTORY_MODE && this.isDirectory()) {
-        mode = void 0;
-      }
-    }
-    let mtime;
-    if (this.mtime != null) {
-      mtime = {
-        Seconds: this.mtime.secs,
-        FractionalNanoseconds: this.mtime.nsecs
-      };
-    }
-    return Data.encode({
-      Type: type,
-      Data: data,
-      filesize: this.isDirectory() ? void 0 : this.fileSize(),
-      blocksizes: this.blockSizes,
-      hashType: this.hashType,
-      fanout: this.fanout,
-      mode,
-      mtime
-    });
-  }
-};
-
-// node_modules/progress-events/dist/src/index.js
-var CustomProgressEvent = class extends Event {
-  constructor(type, detail) {
-    super(type);
-    this.detail = detail;
-  }
-};
-
-// node_modules/ipfs-unixfs-importer/dist/src/utils/persist.js
-var persist = async (buffer2, blockstore, options) => {
-  if (options.codec == null) {
-    options.codec = src_exports2;
-  }
-  const multihash = await sha256.digest(buffer2);
-  const cid = CID2.create(options.cidVersion, options.codec.code, multihash);
-  await blockstore.put(cid, buffer2, options);
-  return cid;
-};
-
-// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/buffer-importer.js
-function defaultBufferImporter(options) {
-  return async function* bufferImporter(file, blockstore) {
-    let bytesWritten = 0n;
-    for await (let block of file.content) {
-      yield async () => {
-        var _a;
-        let unixfs2;
-        const opts = {
-          codec: src_exports2,
-          cidVersion: options.cidVersion,
-          onProgress: options.onProgress
-        };
-        if (options.rawLeaves) {
-          opts.codec = raw_exports;
-          opts.cidVersion = 1;
-        } else {
-          unixfs2 = new UnixFS({
-            type: options.leafType,
-            data: block
-          });
-          block = encode7({
-            Data: unixfs2.marshal(),
-            Links: []
-          });
-        }
-        const cid = await persist(block, blockstore, opts);
-        bytesWritten += BigInt(block.byteLength);
-        (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:importer:progress:file:write", {
-          bytesWritten,
-          cid,
-          path: file.path
-        }));
-        return {
-          cid,
-          unixfs: unixfs2,
-          size: BigInt(block.length),
-          block
-        };
-      };
-    }
-  };
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/index.js
-var import_err_code2 = __toESM(require_err_code(), 1);
-
-// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/dir.js
-var dirBuilder = async (dir, blockstore, options) => {
-  const unixfs2 = new UnixFS({
-    type: "directory",
-    mtime: dir.mtime,
-    mode: dir.mode
-  });
-  const block = encode7(prepare({ Data: unixfs2.marshal() }));
-  const cid = await persist(block, blockstore, options);
-  const path6 = dir.path;
-  return {
-    cid,
-    path: path6,
-    unixfs: unixfs2,
-    size: BigInt(block.length),
-    originalPath: dir.originalPath,
-    block
-  };
-};
-
-// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/file.js
-async function* buildFileBatch(file, blockstore, options) {
-  let count = -1;
-  let previous;
-  for await (const entry of parallelBatch(options.bufferImporter(file, blockstore), options.blockWriteConcurrency)) {
-    count++;
-    if (count === 0) {
-      previous = {
-        ...entry,
-        single: true
-      };
-      continue;
-    } else if (count === 1 && previous != null) {
-      yield {
-        ...previous,
-        block: void 0,
-        single: void 0
-      };
-      previous = void 0;
-    }
-    yield {
-      ...entry,
-      block: void 0
-    };
-  }
-  if (previous != null) {
-    yield previous;
-  }
-}
-function isSingleBlockImport(result) {
-  return result.single === true;
-}
-var reduce = (file, blockstore, options) => {
-  const reducer = async function(leaves) {
-    var _a, _b;
-    if (leaves.length === 1 && isSingleBlockImport(leaves[0]) && options.reduceSingleLeafToSelf) {
-      const leaf = leaves[0];
-      let node2 = leaf.block;
-      if (isSingleBlockImport(leaf) && (file.mtime !== void 0 || file.mode !== void 0)) {
-        leaf.unixfs = new UnixFS({
-          type: "file",
-          mtime: file.mtime,
-          mode: file.mode,
-          data: leaf.block
-        });
-        node2 = { Data: leaf.unixfs.marshal(), Links: [] };
-        leaf.block = encode7(prepare(node2));
-        leaf.cid = await persist(leaf.block, blockstore, {
-          ...options,
-          cidVersion: options.cidVersion
-        });
-        leaf.size = BigInt(leaf.block.length);
-      }
-      (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:importer:progress:file:layout", {
-        cid: leaf.cid,
-        path: leaf.originalPath
-      }));
-      return {
-        cid: leaf.cid,
-        path: file.path,
-        unixfs: leaf.unixfs,
-        size: leaf.size,
-        originalPath: leaf.originalPath
-      };
-    }
-    const f = new UnixFS({
-      type: "file",
-      mtime: file.mtime,
-      mode: file.mode
-    });
-    const links = leaves.filter((leaf) => {
-      var _a2, _b2;
-      if (leaf.cid.code === code3 && leaf.size > 0) {
-        return true;
-      }
-      if (leaf.unixfs != null && leaf.unixfs.data == null && leaf.unixfs.fileSize() > 0n) {
-        return true;
-      }
-      return Boolean((_b2 = (_a2 = leaf.unixfs) == null ? void 0 : _a2.data) == null ? void 0 : _b2.length);
-    }).map((leaf) => {
-      var _a2;
-      if (leaf.cid.code === code3) {
-        f.addBlockSize(leaf.size);
-        return {
-          Name: "",
-          Tsize: Number(leaf.size),
-          Hash: leaf.cid
-        };
-      }
-      if (leaf.unixfs == null || leaf.unixfs.data == null) {
-        f.addBlockSize(((_a2 = leaf.unixfs) == null ? void 0 : _a2.fileSize()) ?? 0n);
-      } else {
-        f.addBlockSize(BigInt(leaf.unixfs.data.length));
-      }
-      return {
-        Name: "",
-        Tsize: Number(leaf.size),
-        Hash: leaf.cid
-      };
-    });
-    const node = {
-      Data: f.marshal(),
-      Links: links
-    };
-    const block = encode7(prepare(node));
-    const cid = await persist(block, blockstore, options);
-    (_b = options.onProgress) == null ? void 0 : _b.call(options, new CustomProgressEvent("unixfs:importer:progress:file:layout", {
-      cid,
-      path: file.originalPath
-    }));
-    return {
-      cid,
-      path: file.path,
-      unixfs: f,
-      size: BigInt(block.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0)),
-      originalPath: file.originalPath,
-      block
-    };
-  };
-  return reducer;
-};
-var fileBuilder = async (file, block, options) => {
-  return options.layout(buildFileBatch(file, block, options), reduce(file, block, options));
-};
-
-// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/index.js
-function isIterable(thing) {
-  return Symbol.iterator in thing;
-}
-function isAsyncIterable5(thing) {
-  return Symbol.asyncIterator in thing;
-}
-function contentAsAsyncIterable(content) {
-  try {
-    if (content instanceof Uint8Array) {
-      return async function* () {
-        yield content;
-      }();
-    } else if (isIterable(content)) {
-      return async function* () {
-        yield* content;
-      }();
-    } else if (isAsyncIterable5(content)) {
-      return content;
-    }
-  } catch {
-    throw (0, import_err_code2.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT");
-  }
-  throw (0, import_err_code2.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT");
-}
-function defaultDagBuilder(options) {
-  return async function* dagBuilder(source, blockstore) {
-    for await (const entry of source) {
-      let originalPath;
-      if (entry.path != null) {
-        originalPath = entry.path;
-        entry.path = entry.path.split("/").filter((path6) => path6 != null && path6 !== ".").join("/");
-      }
-      if (isFileCandidate(entry)) {
-        const file = {
-          path: entry.path,
-          mtime: entry.mtime,
-          mode: entry.mode,
-          content: async function* () {
-            var _a;
-            let bytesRead = 0n;
-            for await (const chunk of options.chunker(options.chunkValidator(contentAsAsyncIterable(entry.content)))) {
-              const currentChunkSize = BigInt(chunk.byteLength);
-              bytesRead += currentChunkSize;
-              (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:importer:progress:file:read", {
-                bytesRead,
-                chunkSize: currentChunkSize,
-                path: entry.path
-              }));
-              yield chunk;
-            }
-          }(),
-          originalPath
-        };
-        yield async () => fileBuilder(file, blockstore, options);
-      } else if (entry.path != null) {
-        const dir = {
-          path: entry.path,
-          mtime: entry.mtime,
-          mode: entry.mode,
-          originalPath
-        };
-        yield async () => dirBuilder(dir, blockstore, options);
-      } else {
-        throw new Error("Import candidate must have content or path or both");
-      }
-    }
-  };
-}
-function isFileCandidate(entry) {
-  return entry.content != null;
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/validate-chunks.js
-var import_err_code3 = __toESM(require_err_code(), 1);
-var defaultChunkValidator = () => {
-  return async function* validateChunks(source) {
-    for await (const content of source) {
-      if (content.length === void 0) {
-        throw (0, import_err_code3.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT");
-      }
-      if (typeof content === "string" || content instanceof String) {
-        yield fromString3(content.toString());
-      } else if (Array.isArray(content)) {
-        yield Uint8Array.from(content);
-      } else if (content instanceof Uint8Array) {
-        yield content;
-      } else {
-        throw (0, import_err_code3.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT");
-      }
-    }
-  };
-};
-
-// node_modules/ipfs-unixfs-importer/dist/src/layout/balanced.js
-var DEFAULT_MAX_CHILDREN_PER_NODE = 174;
-function balanced(options) {
-  const maxChildrenPerNode = (options == null ? void 0 : options.maxChildrenPerNode) ?? DEFAULT_MAX_CHILDREN_PER_NODE;
-  return async function balancedLayout(source, reduce2) {
-    const roots = [];
-    for await (const chunked of src_default5(source, maxChildrenPerNode)) {
-      roots.push(await reduce2(chunked));
-    }
-    if (roots.length > 1) {
-      return balancedLayout(roots, reduce2);
-    }
-    return roots[0];
-  };
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/dir.js
-var Dir = class {
-  options;
-  root;
-  dir;
-  path;
-  dirty;
-  flat;
-  parent;
-  parentKey;
-  unixfs;
-  mode;
-  mtime;
-  cid;
-  size;
-  nodeSize;
-  constructor(props, options) {
-    this.options = options ?? {};
-    this.root = props.root;
-    this.dir = props.dir;
-    this.path = props.path;
-    this.dirty = props.dirty;
-    this.flat = props.flat;
-    this.parent = props.parent;
-    this.parentKey = props.parentKey;
-    this.unixfs = props.unixfs;
-    this.mode = props.mode;
-    this.mtime = props.mtime;
-  }
-};
-var CID_V0 = CID2.parse("QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn");
-var CID_V1 = CID2.parse("zdj7WbTaiJT1fgatdet9Ei9iDB5hdCxkbVyhyh8YTUnXMiwYi");
-
-// node_modules/ipfs-unixfs-importer/dist/src/dir-flat.js
-var DirFlat = class extends Dir {
-  _children;
-  constructor(props, options) {
-    super(props, options);
-    this._children = /* @__PURE__ */ new Map();
-  }
-  async put(name4, value) {
-    this.cid = void 0;
-    this.size = void 0;
-    this.nodeSize = void 0;
-    this._children.set(name4, value);
-  }
-  async get(name4) {
-    return Promise.resolve(this._children.get(name4));
-  }
-  childCount() {
-    return this._children.size;
-  }
-  directChildrenCount() {
-    return this.childCount();
-  }
-  onlyChild() {
-    return this._children.values().next().value;
-  }
-  async *eachChildSeries() {
-    for (const [key, child] of this._children.entries()) {
-      yield {
-        key,
-        child
-      };
-    }
-  }
-  estimateNodeSize() {
-    if (this.nodeSize !== void 0) {
-      return this.nodeSize;
-    }
-    this.nodeSize = 0;
-    for (const [name4, child] of this._children.entries()) {
-      if (child.size != null && child.cid != null) {
-        this.nodeSize += name4.length + (this.options.cidVersion === 1 ? CID_V1.bytes.byteLength : CID_V0.bytes.byteLength);
-      }
-    }
-    return this.nodeSize;
-  }
-  async *flush(block) {
-    const links = [];
-    for (const [name4, child] of this._children.entries()) {
-      let result = child;
-      if (child instanceof Dir) {
-        for await (const entry of child.flush(block)) {
-          result = entry;
-          yield entry;
-        }
-      }
-      if (result.size != null && result.cid != null) {
-        links.push({
-          Name: name4,
-          Tsize: Number(result.size),
-          Hash: result.cid
-        });
-      }
-    }
-    const unixfs2 = new UnixFS({
-      type: "directory",
-      mtime: this.mtime,
-      mode: this.mode
-    });
-    const node = { Data: unixfs2.marshal(), Links: links };
-    const buffer2 = encode7(prepare(node));
-    const cid = await persist(buffer2, block, this.options);
-    const size = buffer2.length + node.Links.reduce(
-      /**
-       * @param {number} acc
-       * @param {PBLink} curr
-       */
-      (acc, curr) => acc + (curr.Tsize == null ? 0 : curr.Tsize),
-      0
-    );
-    this.cid = cid;
-    this.size = size;
-    yield {
-      cid,
-      unixfs: unixfs2,
-      path: this.path,
-      size: BigInt(size)
-    };
-  }
-};
-
-// node_modules/@multiformats/murmur3/src/index.js
-var import_murmurhash3js_revisited = __toESM(require_murmurhash3js_revisited(), 1);
-function fromNumberTo32BitBuf(number) {
-  const bytes = new Array(4);
-  for (let i = 0; i < 4; i++) {
-    bytes[i] = number & 255;
-    number = number >> 8;
-  }
-  return new Uint8Array(bytes);
-}
-var murmur332 = from3({
-  name: "murmur3-32",
-  code: 35,
-  encode: (input) => fromNumberTo32BitBuf(import_murmurhash3js_revisited.default.x86.hash32(input))
-});
-var murmur3128 = from3({
-  name: "murmur3-128",
-  code: 34,
-  encode: (input) => bytes_exports2.fromHex(import_murmurhash3js_revisited.default.x64.hash128(input))
-});
-var murmur364 = from3({
-  name: "murmur3-x64-64",
-  code: 34,
-  encode: (input) => bytes_exports2.fromHex(import_murmurhash3js_revisited.default.x64.hash128(input)).subarray(0, 8)
-});
-
-// node_modules/hamt-sharding/dist/src/bucket.js
-var import_sparse_array = __toESM(require_sparse_array(), 1);
-var Bucket = class _Bucket {
-  constructor(options, parent, posAtParent = 0) {
-    this._options = options;
-    this._popCount = 0;
-    this._parent = parent;
-    this._posAtParent = posAtParent;
-    this._children = new import_sparse_array.default();
-    this.key = null;
-  }
-  async put(key, value) {
-    const place = await this._findNewBucketAndPos(key);
-    await place.bucket._putAt(place, key, value);
-  }
-  async get(key) {
-    const child = await this._findChild(key);
-    if (child != null) {
-      return child.value;
-    }
-  }
-  async del(key) {
-    const place = await this._findPlace(key);
-    const child = place.bucket._at(place.pos);
-    if (child != null && child.key === key) {
-      place.bucket._delAt(place.pos);
-    }
-  }
-  leafCount() {
-    const children = this._children.compactArray();
-    return children.reduce((acc, child) => {
-      if (child instanceof _Bucket) {
-        return acc + child.leafCount();
-      }
-      return acc + 1;
-    }, 0);
-  }
-  childrenCount() {
-    return this._children.length;
-  }
-  onlyChild() {
-    return this._children.get(0);
-  }
-  *eachLeafSeries() {
-    const children = this._children.compactArray();
-    for (const child of children) {
-      if (child instanceof _Bucket) {
-        yield* child.eachLeafSeries();
-      } else {
-        yield child;
-      }
-    }
-  }
-  serialize(map2, reduce2) {
-    const acc = [];
-    return reduce2(this._children.reduce((acc2, child, index) => {
-      if (child != null) {
-        if (child instanceof _Bucket) {
-          acc2.push(child.serialize(map2, reduce2));
-        } else {
-          acc2.push(map2(child, index));
-        }
-      }
-      return acc2;
-    }, acc));
-  }
-  async asyncTransform(asyncMap, asyncReduce) {
-    return await asyncTransformBucket(this, asyncMap, asyncReduce);
-  }
-  toJSON() {
-    return this.serialize(mapNode, reduceNodes);
-  }
-  prettyPrint() {
-    return JSON.stringify(this.toJSON(), null, "  ");
-  }
-  tableSize() {
-    return Math.pow(2, this._options.bits);
-  }
-  async _findChild(key) {
-    const result = await this._findPlace(key);
-    const child = result.bucket._at(result.pos);
-    if (child instanceof _Bucket) {
-      return void 0;
-    }
-    if (child != null && child.key === key) {
-      return child;
-    }
-  }
-  async _findPlace(key) {
-    const hashValue = this._options.hash(typeof key === "string" ? fromString3(key) : key);
-    const index = await hashValue.take(this._options.bits);
-    const child = this._children.get(index);
-    if (child instanceof _Bucket) {
-      return await child._findPlace(hashValue);
-    }
-    return {
-      bucket: this,
-      pos: index,
-      hash: hashValue,
-      existingChild: child
-    };
-  }
-  async _findNewBucketAndPos(key) {
-    const place = await this._findPlace(key);
-    if (place.existingChild != null && place.existingChild.key !== key) {
-      const bucket = new _Bucket(this._options, place.bucket, place.pos);
-      place.bucket._putObjectAt(place.pos, bucket);
-      const newPlace = await bucket._findPlace(place.existingChild.hash);
-      newPlace.bucket._putAt(newPlace, place.existingChild.key, place.existingChild.value);
-      return await bucket._findNewBucketAndPos(place.hash);
-    }
-    return place;
-  }
-  _putAt(place, key, value) {
-    this._putObjectAt(place.pos, {
-      key,
-      value,
-      hash: place.hash
-    });
-  }
-  _putObjectAt(pos, object) {
-    if (this._children.get(pos) == null) {
-      this._popCount++;
-    }
-    this._children.set(pos, object);
-  }
-  _delAt(pos) {
-    if (pos === -1) {
-      throw new Error("Invalid position");
-    }
-    if (this._children.get(pos) != null) {
-      this._popCount--;
-    }
-    this._children.unset(pos);
-    this._level();
-  }
-  _level() {
-    if (this._parent != null && this._popCount <= 1) {
-      if (this._popCount === 1) {
-        const onlyChild = this._children.find(exists);
-        if (onlyChild != null && !(onlyChild instanceof _Bucket)) {
-          const hash = onlyChild.hash;
-          hash.untake(this._options.bits);
-          const place = {
-            pos: this._posAtParent,
-            hash,
-            bucket: this._parent
-          };
-          this._parent._putAt(place, onlyChild.key, onlyChild.value);
-        }
-      } else {
-        this._parent._delAt(this._posAtParent);
-      }
-    }
-  }
-  _at(index) {
-    return this._children.get(index);
-  }
-};
-function exists(o) {
-  return Boolean(o);
-}
-function mapNode(node, _) {
-  return node.key;
-}
-function reduceNodes(nodes) {
-  return nodes;
-}
-async function asyncTransformBucket(bucket, asyncMap, asyncReduce) {
-  const output = [];
-  for (const child of bucket._children.compactArray()) {
-    if (child instanceof Bucket) {
-      await asyncTransformBucket(child, asyncMap, asyncReduce);
-    } else {
-      const mappedChildren = await asyncMap(child);
-      output.push({
-        bitField: bucket._children.bitField(),
-        children: mappedChildren
-      });
-    }
-  }
-  return await asyncReduce(output);
-}
-
-// node_modules/hamt-sharding/dist/src/consumable-buffer.js
-var START_MASKS = [
-  255,
-  254,
-  252,
-  248,
-  240,
-  224,
-  192,
-  128
-];
-var STOP_MASKS = [
-  1,
-  3,
-  7,
-  15,
-  31,
-  63,
-  127,
-  255
-];
-var ConsumableBuffer = class {
-  constructor(value) {
-    this._value = value;
-    this._currentBytePos = value.length - 1;
-    this._currentBitPos = 7;
-  }
-  availableBits() {
-    return this._currentBitPos + 1 + this._currentBytePos * 8;
-  }
-  totalBits() {
-    return this._value.length * 8;
-  }
-  take(bits) {
-    let pendingBits = bits;
-    let result = 0;
-    while (pendingBits > 0 && this._haveBits()) {
-      const byte = this._value[this._currentBytePos];
-      const availableBits = this._currentBitPos + 1;
-      const taking = Math.min(availableBits, pendingBits);
-      const value = byteBitsToInt(byte, availableBits - taking, taking);
-      result = (result << taking) + value;
-      pendingBits -= taking;
-      this._currentBitPos -= taking;
-      if (this._currentBitPos < 0) {
-        this._currentBitPos = 7;
-        this._currentBytePos--;
-      }
-    }
-    return result;
-  }
-  untake(bits) {
-    this._currentBitPos += bits;
-    while (this._currentBitPos > 7) {
-      this._currentBitPos -= 8;
-      this._currentBytePos += 1;
-    }
-  }
-  _haveBits() {
-    return this._currentBytePos >= 0;
-  }
-};
-function byteBitsToInt(byte, start, length4) {
-  const mask = maskFor(start, length4);
-  return (byte & mask) >>> start;
-}
-function maskFor(start, length4) {
-  return START_MASKS[start] & STOP_MASKS[Math.min(length4 + start - 1, 7)];
-}
-
-// node_modules/hamt-sharding/dist/src/consumable-hash.js
-function wrapHash(hashFn2) {
-  function hashing(value) {
-    if (value instanceof InfiniteHash) {
-      return value;
-    } else {
-      return new InfiniteHash(value, hashFn2);
-    }
-  }
-  return hashing;
-}
-var InfiniteHash = class {
-  constructor(value, hashFn2) {
-    if (!(value instanceof Uint8Array)) {
-      throw new Error("can only hash Uint8Arrays");
-    }
-    this._value = value;
-    this._hashFn = hashFn2;
-    this._depth = -1;
-    this._availableBits = 0;
-    this._currentBufferIndex = 0;
-    this._buffers = [];
-  }
-  async take(bits) {
-    let pendingBits = bits;
-    while (this._availableBits < pendingBits) {
-      await this._produceMoreBits();
-    }
-    let result = 0;
-    while (pendingBits > 0) {
-      const hash = this._buffers[this._currentBufferIndex];
-      const available = Math.min(hash.availableBits(), pendingBits);
-      const took = hash.take(available);
-      result = (result << available) + took;
-      pendingBits -= available;
-      this._availableBits -= available;
-      if (hash.availableBits() === 0) {
-        this._currentBufferIndex++;
-      }
-    }
-    return result;
-  }
-  untake(bits) {
-    let pendingBits = bits;
-    while (pendingBits > 0) {
-      const hash = this._buffers[this._currentBufferIndex];
-      const availableForUntake = Math.min(hash.totalBits() - hash.availableBits(), pendingBits);
-      hash.untake(availableForUntake);
-      pendingBits -= availableForUntake;
-      this._availableBits += availableForUntake;
-      if (this._currentBufferIndex > 0 && hash.totalBits() === hash.availableBits()) {
-        this._depth--;
-        this._currentBufferIndex--;
-      }
-    }
-  }
-  async _produceMoreBits() {
-    this._depth++;
-    const value = this._depth > 0 ? concat2([this._value, Uint8Array.from([this._depth])]) : this._value;
-    const hashValue = await this._hashFn(value);
-    const buffer2 = new ConsumableBuffer(hashValue);
-    this._buffers.push(buffer2);
-    this._availableBits += buffer2.availableBits();
-  }
-};
-
-// node_modules/hamt-sharding/dist/src/index.js
-function createHAMT(options) {
-  if (options == null || options.hashFn == null) {
-    throw new Error("please define an options.hashFn");
-  }
-  const bucketOptions = {
-    bits: options.bits ?? 8,
-    hash: wrapHash(options.hashFn)
-  };
-  return new Bucket(bucketOptions);
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/dir-sharded.js
-async function hamtHashFn(buf2) {
-  return (await murmur3128.encode(buf2)).slice(0, 8).reverse();
-}
-var HAMT_HASH_CODE = BigInt(34);
-var DEFAULT_FANOUT_BITS = 8;
-var DirSharded = class extends Dir {
-  _bucket;
-  constructor(props, options) {
-    super(props, options);
-    this._bucket = createHAMT({
-      hashFn: hamtHashFn,
-      bits: options.shardFanoutBits ?? DEFAULT_FANOUT_BITS
-    });
-  }
-  async put(name4, value) {
-    this.cid = void 0;
-    this.size = void 0;
-    this.nodeSize = void 0;
-    await this._bucket.put(name4, value);
-  }
-  async get(name4) {
-    return this._bucket.get(name4);
-  }
-  childCount() {
-    return this._bucket.leafCount();
-  }
-  directChildrenCount() {
-    return this._bucket.childrenCount();
-  }
-  onlyChild() {
-    return this._bucket.onlyChild();
-  }
-  async *eachChildSeries() {
-    for await (const { key, value } of this._bucket.eachLeafSeries()) {
-      yield {
-        key,
-        child: value
-      };
-    }
-  }
-  estimateNodeSize() {
-    if (this.nodeSize !== void 0) {
-      return this.nodeSize;
-    }
-    this.nodeSize = calculateSize(this._bucket, this, this.options);
-    return this.nodeSize;
-  }
-  async *flush(blockstore) {
-    for await (const entry of flush(this._bucket, blockstore, this, this.options)) {
-      yield {
-        ...entry,
-        path: this.path
-      };
-    }
-  }
-};
-var dir_sharded_default = DirSharded;
-async function* flush(bucket, blockstore, shardRoot, options) {
-  const children = bucket._children;
-  const padLength = (bucket.tableSize() - 1).toString(16).length;
-  const links = [];
-  let childrenSize = 0n;
-  for (let i = 0; i < children.length; i++) {
-    const child = children.get(i);
-    if (child == null) {
-      continue;
-    }
-    const labelPrefix = i.toString(16).toUpperCase().padStart(padLength, "0");
-    if (child instanceof Bucket) {
-      let shard;
-      for await (const subShard of flush(child, blockstore, null, options)) {
-        shard = subShard;
-      }
-      if (shard == null) {
-        throw new Error("Could not flush sharded directory, no subshard found");
-      }
-      links.push({
-        Name: labelPrefix,
-        Tsize: Number(shard.size),
-        Hash: shard.cid
-      });
-      childrenSize += shard.size;
-    } else if (isDir(child.value)) {
-      const dir2 = child.value;
-      let flushedDir;
-      for await (const entry of dir2.flush(blockstore)) {
-        flushedDir = entry;
-        yield flushedDir;
-      }
-      if (flushedDir == null) {
-        throw new Error("Did not flush dir");
-      }
-      const label = labelPrefix + child.key;
-      links.push({
-        Name: label,
-        Tsize: Number(flushedDir.size),
-        Hash: flushedDir.cid
-      });
-      childrenSize += flushedDir.size;
-    } else {
-      const value = child.value;
-      if (value.cid == null) {
-        continue;
-      }
-      const label = labelPrefix + child.key;
-      const size2 = value.size;
-      links.push({
-        Name: label,
-        Tsize: Number(size2),
-        Hash: value.cid
-      });
-      childrenSize += BigInt(size2 ?? 0);
-    }
-  }
-  const data = Uint8Array.from(children.bitField().reverse());
-  const dir = new UnixFS({
-    type: "hamt-sharded-directory",
-    data,
-    fanout: BigInt(bucket.tableSize()),
-    hashType: HAMT_HASH_CODE,
-    mtime: shardRoot == null ? void 0 : shardRoot.mtime,
-    mode: shardRoot == null ? void 0 : shardRoot.mode
-  });
-  const node = {
-    Data: dir.marshal(),
-    Links: links
-  };
-  const buffer2 = encode7(prepare(node));
-  const cid = await persist(buffer2, blockstore, options);
-  const size = BigInt(buffer2.byteLength) + childrenSize;
-  yield {
-    cid,
-    unixfs: dir,
-    size
-  };
-}
-function isDir(obj) {
-  return typeof obj.flush === "function";
-}
-function calculateSize(bucket, shardRoot, options) {
-  const children = bucket._children;
-  const padLength = (bucket.tableSize() - 1).toString(16).length;
-  const links = [];
-  for (let i = 0; i < children.length; i++) {
-    const child = children.get(i);
-    if (child == null) {
-      continue;
-    }
-    const labelPrefix = i.toString(16).toUpperCase().padStart(padLength, "0");
-    if (child instanceof Bucket) {
-      const size = calculateSize(child, null, options);
-      links.push({
-        Name: labelPrefix,
-        Tsize: Number(size),
-        Hash: options.cidVersion === 0 ? CID_V0 : CID_V1
-      });
-    } else if (typeof child.value.flush === "function") {
-      const dir2 = child.value;
-      const size = dir2.nodeSize();
-      links.push({
-        Name: labelPrefix + child.key,
-        Tsize: Number(size),
-        Hash: options.cidVersion === 0 ? CID_V0 : CID_V1
-      });
-    } else {
-      const value = child.value;
-      if (value.cid == null) {
-        continue;
-      }
-      const label = labelPrefix + child.key;
-      const size = value.size;
-      links.push({
-        Name: label,
-        Tsize: Number(size),
-        Hash: value.cid
-      });
-    }
-  }
-  const data = Uint8Array.from(children.bitField().reverse());
-  const dir = new UnixFS({
-    type: "hamt-sharded-directory",
-    data,
-    fanout: BigInt(bucket.tableSize()),
-    hashType: HAMT_HASH_CODE,
-    mtime: shardRoot == null ? void 0 : shardRoot.mtime,
-    mode: shardRoot == null ? void 0 : shardRoot.mode
-  });
-  const buffer2 = encode7(prepare({
-    Data: dir.marshal(),
-    Links: links
-  }));
-  return buffer2.length;
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/flat-to-shard.js
-async function flatToShard(child, dir, threshold, options) {
-  let newDir = dir;
-  if (dir instanceof DirFlat && dir.estimateNodeSize() > threshold) {
-    newDir = await convertToShard(dir, options);
-  }
-  const parent = newDir.parent;
-  if (parent != null) {
-    if (newDir !== dir) {
-      if (child != null) {
-        child.parent = newDir;
-      }
-      if (newDir.parentKey == null) {
-        throw new Error("No parent key found");
-      }
-      await parent.put(newDir.parentKey, newDir);
-    }
-    return flatToShard(newDir, parent, threshold, options);
-  }
-  return newDir;
-}
-async function convertToShard(oldDir, options) {
-  const newDir = new dir_sharded_default({
-    root: oldDir.root,
-    dir: true,
-    parent: oldDir.parent,
-    parentKey: oldDir.parentKey,
-    path: oldDir.path,
-    dirty: oldDir.dirty,
-    flat: false,
-    mtime: oldDir.mtime,
-    mode: oldDir.mode
-  }, options);
-  for await (const { key, child } of oldDir.eachChildSeries()) {
-    await newDir.put(key, child);
-  }
-  return newDir;
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/utils/to-path-components.js
-var toPathComponents = (path6 = "") => {
-  return path6.split(/(? 1) {
-      yield* flushAndYield(tree, block);
-    } else {
-      for await (const unwrapped of tree.eachChildSeries()) {
-        if (unwrapped == null) {
-          continue;
-        }
-        yield* flushAndYield(unwrapped.child, block);
-      }
-    }
-  };
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/index.js
-async function* importer(source, blockstore, options = {}) {
-  let candidates;
-  if (Symbol.asyncIterator in source || Symbol.iterator in source) {
-    candidates = source;
-  } else {
-    candidates = [source];
-  }
-  const wrapWithDirectory = options.wrapWithDirectory ?? false;
-  const shardSplitThresholdBytes = options.shardSplitThresholdBytes ?? 262144;
-  const shardFanoutBits = options.shardFanoutBits ?? 8;
-  const cidVersion = options.cidVersion ?? 1;
-  const rawLeaves = options.rawLeaves ?? true;
-  const leafType = options.leafType ?? "file";
-  const fileImportConcurrency = options.fileImportConcurrency ?? 50;
-  const blockWriteConcurrency = options.blockWriteConcurrency ?? 10;
-  const reduceSingleLeafToSelf = options.reduceSingleLeafToSelf ?? true;
-  const chunker = options.chunker ?? fixedSize();
-  const chunkValidator = options.chunkValidator ?? defaultChunkValidator();
-  const buildDag = options.dagBuilder ?? defaultDagBuilder({
-    chunker,
-    chunkValidator,
-    wrapWithDirectory,
-    layout: options.layout ?? balanced(),
-    bufferImporter: options.bufferImporter ?? defaultBufferImporter({
-      cidVersion,
-      rawLeaves,
-      leafType,
-      onProgress: options.onProgress
-    }),
-    blockWriteConcurrency,
-    reduceSingleLeafToSelf,
-    cidVersion,
-    onProgress: options.onProgress
-  });
-  const buildTree = options.treeBuilder ?? defaultTreeBuilder({
-    wrapWithDirectory,
-    shardSplitThresholdBytes,
-    shardFanoutBits,
-    cidVersion,
-    onProgress: options.onProgress
-  });
-  for await (const entry of buildTree(parallelBatch(buildDag(candidates, blockstore), fileImportConcurrency), blockstore)) {
-    yield {
-      cid: entry.cid,
-      path: entry.path,
-      unixfs: entry.unixfs,
-      size: entry.size
-    };
-  }
-}
-async function importFile(content, blockstore, options = {}) {
-  const result = await src_default4(importer([content], blockstore, options));
-  if (result == null) {
-    throw (0, import_err_code4.default)(new Error("Nothing imported"), "ERR_INVALID_PARAMS");
-  }
-  return result;
-}
-async function importDirectory(content, blockstore, options = {}) {
-  const result = await src_default4(importer([content], blockstore, options));
-  if (result == null) {
-    throw (0, import_err_code4.default)(new Error("Nothing imported"), "ERR_INVALID_PARAMS");
-  }
-  return result;
-}
-async function importBytes(buf2, blockstore, options = {}) {
-  return importFile({
-    content: buf2
-  }, blockstore, options);
-}
-async function importByteStream(bufs, blockstore, options = {}) {
-  return importFile({
-    content: bufs
-  }, blockstore, options);
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/chunker/rabin.js
-var import_err_code5 = __toESM(require_err_code(), 1);
-var import_rabin_wasm = __toESM(require_src(), 1);
-
-// node_modules/@helia/unixfs/dist/src/commands/add.js
-var defaultImporterSettings = {
-  cidVersion: 1,
-  rawLeaves: true,
-  layout: balanced({
-    maxChildrenPerNode: 1024
-  }),
-  chunker: fixedSize({
-    chunkSize: 1048576
-  })
-};
-async function* addAll(source, blockstore, options = {}) {
-  yield* importer(source, blockstore, {
-    ...defaultImporterSettings,
-    ...options
-  });
-}
-async function addBytes(bytes, blockstore, options = {}) {
-  const { cid } = await importBytes(bytes, blockstore, {
-    ...defaultImporterSettings,
-    ...options
-  });
-  return cid;
-}
-async function addByteStream(bytes, blockstore, options = {}) {
-  const { cid } = await importByteStream(bytes, blockstore, {
-    ...defaultImporterSettings,
-    ...options
-  });
-  return cid;
-}
-async function addFile(file, blockstore, options = {}) {
-  const { cid } = await importFile(file, blockstore, {
-    ...defaultImporterSettings,
-    ...options
-  });
-  return cid;
-}
-async function addDirectory(dir, blockstore, options = {}) {
-  const { cid } = await importDirectory({
-    ...dir,
-    path: dir.path ?? "-"
-  }, blockstore, {
-    ...defaultImporterSettings,
-    ...options
-  });
-  return cid;
-}
-
-// node_modules/ipfs-unixfs-exporter/dist/src/index.js
-var import_err_code15 = __toESM(require_err_code(), 1);
-
-// node_modules/it-last/dist/src/index.js
-function isAsyncIterable6(thing) {
-  return thing[Symbol.asyncIterator] != null;
-}
-function last(source) {
-  if (isAsyncIterable6(source)) {
-    return (async () => {
-      let res2;
-      for await (const entry of source) {
-        res2 = entry;
-      }
-      return res2;
-    })();
-  }
-  let res;
-  for (const entry of source) {
-    res = entry;
-  }
-  return res;
-}
-var src_default7 = last;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/index.js
-var import_err_code14 = __toESM(require_err_code(), 1);
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/dag-cbor.js
-var import_err_code6 = __toESM(require_err_code(), 1);
-var resolve = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => {
-  const block = await blockstore.get(cid, options);
-  const object = decode6(block);
-  let subObject = object;
-  let subPath = path6;
-  while (toResolve.length > 0) {
-    const prop = toResolve[0];
-    if (prop in subObject) {
-      toResolve.shift();
-      subPath = `${subPath}/${prop}`;
-      const subObjectCid = CID2.asCID(subObject[prop]);
-      if (subObjectCid != null) {
-        return {
-          entry: {
-            type: "object",
-            name: name4,
-            path: path6,
-            cid,
-            node: block,
-            depth,
-            size: BigInt(block.length),
-            content: async function* () {
-              yield object;
-            }
-          },
-          next: {
-            cid: subObjectCid,
-            name: prop,
-            path: subPath,
-            toResolve
-          }
-        };
-      }
-      subObject = subObject[prop];
-    } else {
-      throw (0, import_err_code6.default)(new Error(`No property named ${prop} found in cbor node ${cid}`), "ERR_NO_PROP");
-    }
-  }
-  return {
-    entry: {
-      type: "object",
-      name: name4,
-      path: path6,
-      cid,
-      node: block,
-      depth,
-      size: BigInt(block.length),
-      content: async function* () {
-        yield object;
-      }
-    }
-  };
-};
-var dag_cbor_default = resolve;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/identity.js
-var import_err_code8 = __toESM(require_err_code(), 1);
-
-// node_modules/ipfs-unixfs-exporter/dist/src/utils/extract-data-from-block.js
-function extractDataFromBlock(block, blockStart, requestedStart, requestedEnd) {
-  const blockLength = BigInt(block.length);
-  const blockEnd = BigInt(blockStart + blockLength);
-  if (requestedStart >= blockEnd || requestedEnd < blockStart) {
-    return new Uint8Array(0);
-  }
-  if (requestedEnd >= blockStart && requestedEnd < blockEnd) {
-    block = block.subarray(0, Number(requestedEnd - blockStart));
-  }
-  if (requestedStart >= blockStart && requestedStart < blockEnd) {
-    block = block.subarray(Number(requestedStart - blockStart));
-  }
-  return block;
-}
-var extract_data_from_block_default = extractDataFromBlock;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/utils/validate-offset-and-length.js
-var import_err_code7 = __toESM(require_err_code(), 1);
-var validateOffsetAndLength = (size, offset = 0, length4 = size) => {
-  const fileSize = BigInt(size);
-  const start = BigInt(offset ?? 0);
-  let end = BigInt(length4);
-  if (end !== fileSize) {
-    end = start + end;
-  }
-  if (end > fileSize) {
-    end = fileSize;
-  }
-  if (start < 0n) {
-    throw (0, import_err_code7.default)(new Error("Offset must be greater than or equal to 0"), "ERR_INVALID_PARAMS");
-  }
-  if (start > fileSize) {
-    throw (0, import_err_code7.default)(new Error("Offset must be less than the file size"), "ERR_INVALID_PARAMS");
-  }
-  if (end < 0n) {
-    throw (0, import_err_code7.default)(new Error("Length must be greater than or equal to 0"), "ERR_INVALID_PARAMS");
-  }
-  if (end > fileSize) {
-    throw (0, import_err_code7.default)(new Error("Length must be less than the file size"), "ERR_INVALID_PARAMS");
-  }
-  return {
-    start,
-    end
-  };
-};
-var validate_offset_and_length_default = validateOffsetAndLength;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/identity.js
-var rawContent = (node) => {
-  async function* contentGenerator(options = {}) {
-    var _a;
-    const { start, end } = validate_offset_and_length_default(node.length, options.offset, options.length);
-    const buf2 = extract_data_from_block_default(node, 0n, start, end);
-    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:progress:identity", {
-      bytesRead: BigInt(buf2.byteLength),
-      totalBytes: end - start,
-      fileSize: BigInt(node.byteLength)
-    }));
-    yield buf2;
-  }
-  return contentGenerator;
-};
-var resolve2 = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => {
-  if (toResolve.length > 0) {
-    throw (0, import_err_code8.default)(new Error(`No link named ${path6} found in raw node ${cid}`), "ERR_NOT_FOUND");
-  }
-  const buf2 = decode10(cid.multihash.bytes);
-  return {
-    entry: {
-      type: "identity",
-      name: name4,
-      path: path6,
-      cid,
-      content: rawContent(buf2.digest),
-      depth,
-      size: BigInt(buf2.digest.length),
-      node: buf2.digest
-    }
-  };
-};
-var identity_default = resolve2;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/raw.js
-var import_err_code9 = __toESM(require_err_code(), 1);
-var rawContent2 = (node) => {
-  async function* contentGenerator(options = {}) {
-    var _a;
-    const { start, end } = validate_offset_and_length_default(node.length, options.offset, options.length);
-    const buf2 = extract_data_from_block_default(node, 0n, start, end);
-    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:progress:raw", {
-      bytesRead: BigInt(buf2.byteLength),
-      totalBytes: end - start,
-      fileSize: BigInt(node.byteLength)
-    }));
-    yield buf2;
-  }
-  return contentGenerator;
-};
-var resolve3 = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => {
-  if (toResolve.length > 0) {
-    throw (0, import_err_code9.default)(new Error(`No link named ${path6} found in raw node ${cid}`), "ERR_NOT_FOUND");
-  }
-  const block = await blockstore.get(cid, options);
-  return {
-    entry: {
-      type: "raw",
-      name: name4,
-      path: path6,
-      cid,
-      content: rawContent2(block),
-      depth,
-      size: BigInt(block.length),
-      node: block
-    }
-  };
-};
-var raw_default = resolve3;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/index.js
-var import_err_code13 = __toESM(require_err_code(), 1);
-
-// node_modules/ipfs-unixfs-exporter/dist/src/utils/find-cid-in-shard.js
-var import_err_code10 = __toESM(require_err_code(), 1);
-var hashFn = async function(buf2) {
-  return (await murmur3128.encode(buf2)).slice(0, 8).reverse();
-};
-var addLinksToHamtBucket = async (links, bucket, rootBucket) => {
-  const padLength = (bucket.tableSize() - 1).toString(16).length;
-  await Promise.all(links.map(async (link) => {
-    if (link.Name == null) {
-      throw new Error("Unexpected Link without a Name");
-    }
-    if (link.Name.length === padLength) {
-      const pos = parseInt(link.Name, 16);
-      bucket._putObjectAt(pos, new Bucket({
-        hash: rootBucket._options.hash,
-        bits: rootBucket._options.bits
-      }, bucket, pos));
-      return;
-    }
-    await rootBucket.put(link.Name.substring(2), true);
-  }));
-};
-var toPrefix = (position, padLength) => {
-  return position.toString(16).toUpperCase().padStart(padLength, "0").substring(0, padLength);
-};
-var toBucketPath = (position) => {
-  let bucket = position.bucket;
-  const path6 = [];
-  while (bucket._parent != null) {
-    path6.push(bucket);
-    bucket = bucket._parent;
-  }
-  path6.push(bucket);
-  return path6.reverse();
-};
-var findShardCid = async (node, name4, blockstore, context, options) => {
-  if (context == null) {
-    if (node.Data == null) {
-      throw (0, import_err_code10.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS");
-    }
-    let dir;
-    try {
-      dir = UnixFS.unmarshal(node.Data);
-    } catch (err) {
-      throw (0, import_err_code10.default)(err, "ERR_NOT_UNIXFS");
-    }
-    if (dir.type !== "hamt-sharded-directory") {
-      throw (0, import_err_code10.default)(new Error("not a HAMT"), "ERR_NOT_UNIXFS");
-    }
-    if (dir.fanout == null) {
-      throw (0, import_err_code10.default)(new Error("missing fanout"), "ERR_NOT_UNIXFS");
-    }
-    const rootBucket = createHAMT({
-      hashFn,
-      bits: Math.log2(Number(dir.fanout))
-    });
-    context = {
-      rootBucket,
-      hamtDepth: 1,
-      lastBucket: rootBucket
-    };
-  }
-  const padLength = (context.lastBucket.tableSize() - 1).toString(16).length;
-  await addLinksToHamtBucket(node.Links, context.lastBucket, context.rootBucket);
-  const position = await context.rootBucket._findNewBucketAndPos(name4);
-  let prefix = toPrefix(position.pos, padLength);
-  const bucketPath = toBucketPath(position);
-  if (bucketPath.length > context.hamtDepth) {
-    context.lastBucket = bucketPath[context.hamtDepth];
-    prefix = toPrefix(context.lastBucket._posAtParent, padLength);
-  }
-  const link = node.Links.find((link2) => {
-    if (link2.Name == null) {
-      return false;
-    }
-    const entryPrefix = link2.Name.substring(0, padLength);
-    const entryName = link2.Name.substring(padLength);
-    if (entryPrefix !== prefix) {
-      return false;
-    }
-    if (entryName !== "" && entryName !== name4) {
-      return false;
-    }
-    return true;
-  });
-  if (link == null) {
-    return;
-  }
-  if (link.Name != null && link.Name.substring(padLength) === name4) {
-    return link.Hash;
-  }
-  context.hamtDepth++;
-  const block = await blockstore.get(link.Hash, options);
-  node = decode11(block);
-  return findShardCid(node, name4, blockstore, context, options);
-};
-var find_cid_in_shard_default = findShardCid;
-
-// node_modules/it-filter/dist/src/index.js
-function isAsyncIterable7(thing) {
-  return thing[Symbol.asyncIterator] != null;
-}
-function filter(source, fn) {
-  if (isAsyncIterable7(source)) {
-    return async function* () {
-      for await (const entry of source) {
-        if (await fn(entry)) {
-          yield entry;
-        }
-      }
-    }();
-  }
-  const peekable2 = src_default2(source);
-  const { value, done } = peekable2.next();
-  if (done === true) {
-    return function* () {
-    }();
-  }
-  const res = fn(value);
-  if (typeof res.then === "function") {
-    return async function* () {
-      if (await res) {
-        yield value;
-      }
-      for await (const entry of peekable2) {
-        if (await fn(entry)) {
-          yield entry;
-        }
-      }
-    }();
-  }
-  const func = fn;
-  return function* () {
-    if (res === true) {
-      yield value;
-    }
-    for (const entry of peekable2) {
-      if (func(entry)) {
-        yield entry;
-      }
-    }
-  }();
-}
-var src_default8 = filter;
-
-// node_modules/it-parallel/dist/src/index.js
-var CustomEvent = globalThis.CustomEvent ?? Event;
-async function* parallel(source, options = {}) {
-  let concurrency = options.concurrency ?? Infinity;
-  if (concurrency < 1) {
-    concurrency = Infinity;
-  }
-  const ordered = options.ordered == null ? false : options.ordered;
-  const emitter = new EventTarget();
-  const ops = [];
-  let slotAvailable = pDefer();
-  let resultAvailable = pDefer();
-  let sourceFinished = false;
-  let sourceErr;
-  let opErred = false;
-  emitter.addEventListener("task-complete", () => {
-    resultAvailable.resolve();
-  });
-  void Promise.resolve().then(async () => {
-    try {
-      for await (const task of source) {
-        if (ops.length === concurrency) {
-          slotAvailable = pDefer();
-          await slotAvailable.promise;
-        }
-        if (opErred) {
-          break;
-        }
-        const op = {
-          done: false
-        };
-        ops.push(op);
-        task().then((result) => {
-          op.done = true;
-          op.ok = true;
-          op.value = result;
-          emitter.dispatchEvent(new CustomEvent("task-complete"));
-        }, (err) => {
-          op.done = true;
-          op.err = err;
-          emitter.dispatchEvent(new CustomEvent("task-complete"));
-        });
-      }
-      sourceFinished = true;
-      emitter.dispatchEvent(new CustomEvent("task-complete"));
-    } catch (err) {
-      sourceErr = err;
-      emitter.dispatchEvent(new CustomEvent("task-complete"));
-    }
-  });
-  function valuesAvailable() {
-    var _a;
-    if (ordered) {
-      return (_a = ops[0]) == null ? void 0 : _a.done;
-    }
-    return Boolean(ops.find((op) => op.done));
-  }
-  function* yieldOrderedValues() {
-    while (ops.length > 0 && ops[0].done) {
-      const op = ops[0];
-      ops.shift();
-      if (op.ok) {
-        yield op.value;
-      } else {
-        opErred = true;
-        slotAvailable.resolve();
-        throw op.err;
-      }
-      slotAvailable.resolve();
-    }
-  }
-  function* yieldUnOrderedValues() {
-    while (valuesAvailable()) {
-      for (let i = 0; i < ops.length; i++) {
-        if (ops[i].done) {
-          const op = ops[i];
-          ops.splice(i, 1);
-          i--;
-          if (op.ok) {
-            yield op.value;
-          } else {
-            opErred = true;
-            slotAvailable.resolve();
-            throw op.err;
-          }
-          slotAvailable.resolve();
-        }
-      }
-    }
-  }
-  while (true) {
-    if (!valuesAvailable()) {
-      resultAvailable = pDefer();
-      await resultAvailable.promise;
-    }
-    if (sourceErr != null) {
-      throw sourceErr;
-    }
-    if (ordered) {
-      yield* yieldOrderedValues();
-    } else {
-      yield* yieldUnOrderedValues();
-    }
-    if (sourceFinished && ops.length === 0) {
-      break;
-    }
-  }
-}
-
-// node_modules/it-pushable/dist/src/fifo.js
-var FixedFIFO = class {
-  buffer;
-  mask;
-  top;
-  btm;
-  next;
-  constructor(hwm) {
-    if (!(hwm > 0) || (hwm - 1 & hwm) !== 0) {
-      throw new Error("Max size for a FixedFIFO should be a power of two");
-    }
-    this.buffer = new Array(hwm);
-    this.mask = hwm - 1;
-    this.top = 0;
-    this.btm = 0;
-    this.next = null;
-  }
-  push(data) {
-    if (this.buffer[this.top] !== void 0) {
-      return false;
-    }
-    this.buffer[this.top] = data;
-    this.top = this.top + 1 & this.mask;
-    return true;
-  }
-  shift() {
-    const last2 = this.buffer[this.btm];
-    if (last2 === void 0) {
-      return void 0;
-    }
-    this.buffer[this.btm] = void 0;
-    this.btm = this.btm + 1 & this.mask;
-    return last2;
-  }
-  isEmpty() {
-    return this.buffer[this.btm] === void 0;
-  }
-};
-var FIFO = class {
-  size;
-  hwm;
-  head;
-  tail;
-  constructor(options = {}) {
-    this.hwm = options.splitLimit ?? 16;
-    this.head = new FixedFIFO(this.hwm);
-    this.tail = this.head;
-    this.size = 0;
-  }
-  calculateSize(obj) {
-    if ((obj == null ? void 0 : obj.byteLength) != null) {
-      return obj.byteLength;
-    }
-    return 1;
-  }
-  push(val) {
-    if ((val == null ? void 0 : val.value) != null) {
-      this.size += this.calculateSize(val.value);
-    }
-    if (!this.head.push(val)) {
-      const prev = this.head;
-      this.head = prev.next = new FixedFIFO(2 * this.head.buffer.length);
-      this.head.push(val);
-    }
-  }
-  shift() {
-    let val = this.tail.shift();
-    if (val === void 0 && this.tail.next != null) {
-      const next = this.tail.next;
-      this.tail.next = null;
-      this.tail = next;
-      val = this.tail.shift();
-    }
-    if ((val == null ? void 0 : val.value) != null) {
-      this.size -= this.calculateSize(val.value);
-    }
-    return val;
-  }
-  isEmpty() {
-    return this.head.isEmpty();
-  }
-};
-
-// node_modules/it-pushable/dist/src/index.js
-var AbortError3 = class extends Error {
-  type;
-  code;
-  constructor(message2, code5) {
-    super(message2 ?? "The operation was aborted");
-    this.type = "aborted";
-    this.code = code5 ?? "ABORT_ERR";
-  }
-};
-function pushable(options = {}) {
-  const getNext = (buffer2) => {
-    const next = buffer2.shift();
-    if (next == null) {
-      return { done: true };
-    }
-    if (next.error != null) {
-      throw next.error;
-    }
-    return {
-      done: next.done === true,
-      // @ts-expect-error if done is false, value will be present
-      value: next.value
-    };
-  };
-  return _pushable(getNext, options);
-}
-function _pushable(getNext, options) {
-  options = options ?? {};
-  let onEnd = options.onEnd;
-  let buffer2 = new FIFO();
-  let pushable2;
-  let onNext;
-  let ended;
-  let drain2 = pDefer();
-  const waitNext = async () => {
-    try {
-      if (!buffer2.isEmpty()) {
-        return getNext(buffer2);
-      }
-      if (ended) {
-        return { done: true };
-      }
-      return await new Promise((resolve6, reject) => {
-        onNext = (next) => {
-          onNext = null;
-          buffer2.push(next);
-          try {
-            resolve6(getNext(buffer2));
-          } catch (err) {
-            reject(err);
-          }
-          return pushable2;
-        };
-      });
-    } finally {
-      if (buffer2.isEmpty()) {
-        queueMicrotask(() => {
-          drain2.resolve();
-          drain2 = pDefer();
-        });
-      }
-    }
-  };
-  const bufferNext = (next) => {
-    if (onNext != null) {
-      return onNext(next);
-    }
-    buffer2.push(next);
-    return pushable2;
-  };
-  const bufferError = (err) => {
-    buffer2 = new FIFO();
-    if (onNext != null) {
-      return onNext({ error: err });
-    }
-    buffer2.push({ error: err });
-    return pushable2;
-  };
-  const push = (value) => {
-    if (ended) {
-      return pushable2;
-    }
-    if ((options == null ? void 0 : options.objectMode) !== true && (value == null ? void 0 : value.byteLength) == null) {
-      throw new Error("objectMode was not true but tried to push non-Uint8Array value");
-    }
-    return bufferNext({ done: false, value });
-  };
-  const end = (err) => {
-    if (ended)
-      return pushable2;
-    ended = true;
-    return err != null ? bufferError(err) : bufferNext({ done: true });
-  };
-  const _return = () => {
-    buffer2 = new FIFO();
-    end();
-    return { done: true };
-  };
-  const _throw = (err) => {
-    end(err);
-    return { done: true };
-  };
-  pushable2 = {
-    [Symbol.asyncIterator]() {
-      return this;
-    },
-    next: waitNext,
-    return: _return,
-    throw: _throw,
-    push,
-    end,
-    get readableLength() {
-      return buffer2.size;
-    },
-    onEmpty: async (options2) => {
-      const signal = options2 == null ? void 0 : options2.signal;
-      signal == null ? void 0 : signal.throwIfAborted();
-      if (buffer2.isEmpty()) {
-        return;
-      }
-      let cancel;
-      let listener;
-      if (signal != null) {
-        cancel = new Promise((resolve6, reject) => {
-          listener = () => {
-            reject(new AbortError3());
-          };
-          signal.addEventListener("abort", listener);
-        });
-      }
-      try {
-        await Promise.race([
-          drain2.promise,
-          cancel
-        ]);
-      } finally {
-        if (listener != null && signal != null) {
-          signal == null ? void 0 : signal.removeEventListener("abort", listener);
-        }
-      }
-    }
-  };
-  if (onEnd == null) {
-    return pushable2;
-  }
-  const _pushable2 = pushable2;
-  pushable2 = {
-    [Symbol.asyncIterator]() {
-      return this;
-    },
-    next() {
-      return _pushable2.next();
-    },
-    throw(err) {
-      _pushable2.throw(err);
-      if (onEnd != null) {
-        onEnd(err);
-        onEnd = void 0;
-      }
-      return { done: true };
-    },
-    return() {
-      _pushable2.return();
-      if (onEnd != null) {
-        onEnd();
-        onEnd = void 0;
-      }
-      return { done: true };
-    },
-    push,
-    end(err) {
-      _pushable2.end(err);
-      if (onEnd != null) {
-        onEnd(err);
-        onEnd = void 0;
-      }
-      return pushable2;
-    },
-    get readableLength() {
-      return _pushable2.readableLength;
-    },
-    onEmpty: (opts) => {
-      return _pushable2.onEmpty(opts);
-    }
-  };
-  return pushable2;
-}
-
-// node_modules/it-merge/dist/src/index.js
-function isAsyncIterable8(thing) {
-  return thing[Symbol.asyncIterator] != null;
-}
-function merge(...sources) {
-  const syncSources = [];
-  for (const source of sources) {
-    if (!isAsyncIterable8(source)) {
-      syncSources.push(source);
-    }
-  }
-  if (syncSources.length === sources.length) {
-    return function* () {
-      for (const source of syncSources) {
-        yield* source;
-      }
-    }();
-  }
-  return async function* () {
-    const output = pushable({
-      objectMode: true
-    });
-    void Promise.resolve().then(async () => {
-      try {
-        await Promise.all(sources.map(async (source) => {
-          for await (const item of source) {
-            output.push(item);
-          }
-        }));
-        output.end();
-      } catch (err) {
-        output.end(err);
-      }
-    });
-    yield* output;
-  }();
-}
-var src_default9 = merge;
-
-// node_modules/it-pipe/dist/src/index.js
-function pipe(first2, ...rest) {
-  if (first2 == null) {
-    throw new Error("Empty pipeline");
-  }
-  if (isDuplex(first2)) {
-    const duplex = first2;
-    first2 = () => duplex.source;
-  } else if (isIterable2(first2) || isAsyncIterable9(first2)) {
-    const source = first2;
-    first2 = () => source;
-  }
-  const fns = [first2, ...rest];
-  if (fns.length > 1) {
-    if (isDuplex(fns[fns.length - 1])) {
-      fns[fns.length - 1] = fns[fns.length - 1].sink;
-    }
-  }
-  if (fns.length > 2) {
-    for (let i = 1; i < fns.length - 1; i++) {
-      if (isDuplex(fns[i])) {
-        fns[i] = duplexPipelineFn(fns[i]);
-      }
-    }
-  }
-  return rawPipe(...fns);
-}
-var rawPipe = (...fns) => {
-  let res;
-  while (fns.length > 0) {
-    res = fns.shift()(res);
-  }
-  return res;
-};
-var isAsyncIterable9 = (obj) => {
-  return (obj == null ? void 0 : obj[Symbol.asyncIterator]) != null;
-};
-var isIterable2 = (obj) => {
-  return (obj == null ? void 0 : obj[Symbol.iterator]) != null;
-};
-var isDuplex = (obj) => {
-  if (obj == null) {
-    return false;
-  }
-  return obj.sink != null && obj.source != null;
-};
-var duplexPipelineFn = (duplex) => {
-  return (source) => {
-    const p = duplex.sink(source);
-    if ((p == null ? void 0 : p.then) != null) {
-      const stream = pushable({
-        objectMode: true
-      });
-      p.then(() => {
-        stream.end();
-      }, (err) => {
-        stream.end(err);
-      });
-      let sourceWrap;
-      const source2 = duplex.source;
-      if (isAsyncIterable9(source2)) {
-        sourceWrap = async function* () {
-          yield* source2;
-          stream.end();
-        };
-      } else if (isIterable2(source2)) {
-        sourceWrap = function* () {
-          yield* source2;
-          stream.end();
-        };
-      } else {
-        throw new Error("Unknown duplex source type - must be Iterable or AsyncIterable");
-      }
-      return src_default9(stream, sourceWrap());
-    }
-    return duplex.source;
-  };
-};
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/directory.js
-var directoryContent = (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
-  async function* yieldDirectoryContent(options = {}) {
-    var _a;
-    const offset = options.offset ?? 0;
-    const length4 = options.length ?? node.Links.length;
-    const links = node.Links.slice(offset, length4);
-    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:directory", {
-      cid
-    }));
-    yield* pipe(links, (source) => src_default3(source, (link) => {
-      return async () => {
-        const linkName = link.Name ?? "";
-        const linkPath = `${path6}/${linkName}`;
-        const result = await resolve6(link.Hash, linkName, linkPath, [], depth + 1, blockstore, options);
-        return result.entry;
-      };
-    }), (source) => parallel(source, { ordered: true }), (source) => src_default8(source, (entry) => entry != null));
-  }
-  return yieldDirectoryContent;
-};
-var directory_default = directoryContent;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/file.js
-var import_err_code11 = __toESM(require_err_code(), 1);
-async function walkDAG(blockstore, node, queue, streamPosition, start, end, options) {
-  if (node instanceof Uint8Array) {
-    const buf2 = extract_data_from_block_default(node, streamPosition, start, end);
-    queue.push(buf2);
-    return;
-  }
-  if (node.Data == null) {
-    throw (0, import_err_code11.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS");
-  }
-  let file;
-  try {
-    file = UnixFS.unmarshal(node.Data);
-  } catch (err) {
-    throw (0, import_err_code11.default)(err, "ERR_NOT_UNIXFS");
-  }
-  if (file.data != null) {
-    const data = file.data;
-    const buf2 = extract_data_from_block_default(data, streamPosition, start, end);
-    queue.push(buf2);
-    streamPosition += BigInt(buf2.byteLength);
-  }
-  const childOps = [];
-  if (node.Links.length !== file.blockSizes.length) {
-    throw (0, import_err_code11.default)(new Error("Inconsistent block sizes and dag links"), "ERR_NOT_UNIXFS");
-  }
-  for (let i = 0; i < node.Links.length; i++) {
-    const childLink = node.Links[i];
-    const childStart = streamPosition;
-    const childEnd = childStart + file.blockSizes[i];
-    if (start >= childStart && start < childEnd || // child has offset byte
-    end >= childStart && end <= childEnd || // child has end byte
-    start < childStart && end > childEnd) {
-      childOps.push({
-        link: childLink,
-        blockStart: streamPosition
-      });
-    }
-    streamPosition = childEnd;
-    if (streamPosition > end) {
-      break;
-    }
-  }
-  await pipe(childOps, (source) => src_default3(source, (op) => {
-    return async () => {
-      const block = await blockstore.get(op.link.Hash, options);
-      return {
-        ...op,
-        block
-      };
-    };
-  }), (source) => parallel(source, {
-    ordered: true
-  }), async (source) => {
-    for await (const { link, block, blockStart } of source) {
-      let child;
-      switch (link.Hash.code) {
-        case code2:
-          child = decode11(block);
-          break;
-        case code3:
-          child = block;
-          break;
-        default:
-          queue.end((0, import_err_code11.default)(new Error(`Unsupported codec: ${link.Hash.code}`), "ERR_NOT_UNIXFS"));
-          return;
-      }
-      const childQueue = new dist_default({
-        concurrency: 1
-      });
-      childQueue.on("error", (error) => {
-        queue.end(error);
-      });
-      void childQueue.add(async () => {
-        var _a;
-        (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:file", {
-          cid: link.Hash
-        }));
-        await walkDAG(blockstore, child, queue, blockStart, start, end, options);
-      });
-      await childQueue.onIdle();
-    }
-  });
-  if (streamPosition >= end) {
-    queue.end();
-  }
-}
-var fileContent = (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
-  async function* yieldFileContent(options = {}) {
-    var _a, _b;
-    const fileSize = unixfs2.fileSize();
-    if (fileSize === void 0) {
-      throw new Error("File was a directory");
-    }
-    const { start, end } = validate_offset_and_length_default(fileSize, options.offset, options.length);
-    if (end === 0n) {
-      return;
-    }
-    let read4 = 0n;
-    const wanted = end - start;
-    const queue = pushable();
-    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:file", {
-      cid
-    }));
-    void walkDAG(blockstore, node, queue, 0n, start, end, options).catch((err) => {
-      queue.end(err);
-    });
-    for await (const buf2 of queue) {
-      if (buf2 == null) {
-        continue;
-      }
-      read4 += BigInt(buf2.byteLength);
-      if (read4 > wanted) {
-        queue.end();
-        throw (0, import_err_code11.default)(new Error("Read too many bytes - the file size reported by the UnixFS data in the root node may be incorrect"), "ERR_OVER_READ");
-      }
-      if (read4 === wanted) {
-        queue.end();
-      }
-      (_b = options.onProgress) == null ? void 0 : _b.call(options, new CustomProgressEvent("unixfs:exporter:progress:unixfs:file", {
-        bytesRead: read4,
-        totalBytes: wanted,
-        fileSize
-      }));
-      yield buf2;
-    }
-    if (read4 < wanted) {
-      throw (0, import_err_code11.default)(new Error("Traversed entire DAG but did not read enough bytes"), "ERR_UNDER_READ");
-    }
-  }
-  return yieldFileContent;
-};
-var file_default = fileContent;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js
-var import_err_code12 = __toESM(require_err_code(), 1);
-var hamtShardedDirectoryContent = (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
-  function yieldHamtDirectoryContent(options = {}) {
-    var _a;
-    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:hamt-sharded-directory", {
-      cid
-    }));
-    return listDirectory(node, path6, resolve6, depth, blockstore, options);
-  }
-  return yieldHamtDirectoryContent;
-};
-async function* listDirectory(node, path6, resolve6, depth, blockstore, options) {
-  const links = node.Links;
-  if (node.Data == null) {
-    throw (0, import_err_code12.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS");
-  }
-  let dir;
-  try {
-    dir = UnixFS.unmarshal(node.Data);
-  } catch (err) {
-    throw (0, import_err_code12.default)(err, "ERR_NOT_UNIXFS");
-  }
-  if (dir.fanout == null) {
-    throw (0, import_err_code12.default)(new Error("missing fanout"), "ERR_NOT_UNIXFS");
-  }
-  const padLength = (dir.fanout - 1n).toString(16).length;
-  const results = pipe(links, (source) => src_default3(source, (link) => {
-    return async () => {
-      var _a;
-      const name4 = link.Name != null ? link.Name.substring(padLength) : null;
-      if (name4 != null && name4 !== "") {
-        const result = await resolve6(link.Hash, name4, `${path6}/${name4}`, [], depth + 1, blockstore, options);
-        return { entries: result.entry == null ? [] : [result.entry] };
-      } else {
-        const block = await blockstore.get(link.Hash, options);
-        node = decode11(block);
-        (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:hamt-sharded-directory", {
-          cid: link.Hash
-        }));
-        return { entries: listDirectory(node, path6, resolve6, depth, blockstore, options) };
-      }
-    };
-  }), (source) => parallel(source, { ordered: true }));
-  for await (const { entries } of results) {
-    yield* entries;
-  }
-}
-var hamt_sharded_directory_default = hamtShardedDirectoryContent;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/index.js
-var findLinkCid = (node, name4) => {
-  const link = node.Links.find((link2) => link2.Name === name4);
-  return link == null ? void 0 : link.Hash;
-};
-var contentExporters = {
-  raw: file_default,
-  file: file_default,
-  directory: directory_default,
-  "hamt-sharded-directory": hamt_sharded_directory_default,
-  metadata: (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
-    return () => [];
-  },
-  symlink: (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
-    return () => [];
-  }
-};
-var unixFsResolver = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => {
-  const block = await blockstore.get(cid, options);
-  const node = decode11(block);
-  let unixfs2;
-  let next;
-  if (name4 == null) {
-    name4 = cid.toString();
-  }
-  if (node.Data == null) {
-    throw (0, import_err_code13.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS");
-  }
-  try {
-    unixfs2 = UnixFS.unmarshal(node.Data);
-  } catch (err) {
-    throw (0, import_err_code13.default)(err, "ERR_NOT_UNIXFS");
-  }
-  if (path6 == null) {
-    path6 = name4;
-  }
-  if (toResolve.length > 0) {
-    let linkCid;
-    if ((unixfs2 == null ? void 0 : unixfs2.type) === "hamt-sharded-directory") {
-      linkCid = await find_cid_in_shard_default(node, toResolve[0], blockstore);
-    } else {
-      linkCid = findLinkCid(node, toResolve[0]);
-    }
-    if (linkCid == null) {
-      throw (0, import_err_code13.default)(new Error("file does not exist"), "ERR_NOT_FOUND");
-    }
-    const nextName = toResolve.shift();
-    const nextPath = `${path6}/${nextName}`;
-    next = {
-      cid: linkCid,
-      toResolve,
-      name: nextName ?? "",
-      path: nextPath
-    };
-  }
-  const content = contentExporters[unixfs2.type](cid, node, unixfs2, path6, resolve6, depth, blockstore);
-  if (content == null) {
-    throw (0, import_err_code13.default)(new Error("could not find content exporter"), "ERR_NOT_FOUND");
-  }
-  if (unixfs2.isDirectory()) {
-    return {
-      entry: {
-        type: "directory",
-        name: name4,
-        path: path6,
-        cid,
-        content,
-        unixfs: unixfs2,
-        depth,
-        node,
-        size: unixfs2.fileSize()
-      },
-      next
-    };
-  }
-  return {
-    entry: {
-      type: "file",
-      name: name4,
-      path: path6,
-      cid,
-      content,
-      unixfs: unixfs2,
-      depth,
-      node,
-      size: unixfs2.fileSize()
-    },
-    next
-  };
-};
-var unixfs_v1_default = unixFsResolver;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/index.js
-var resolvers = {
-  [code2]: unixfs_v1_default,
-  [code3]: raw_default,
-  [code]: dag_cbor_default,
-  [identity2.code]: identity_default
-};
-var resolve4 = async (cid, name4, path6, toResolve, depth, blockstore, options) => {
-  const resolver = resolvers[cid.code];
-  if (resolver == null) {
-    throw (0, import_err_code14.default)(new Error(`No resolver for code ${cid.code}`), "ERR_NO_RESOLVER");
-  }
-  return resolver(cid, name4, path6, toResolve, resolve4, depth, blockstore, options);
-};
-var resolvers_default = resolve4;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/index.js
-var toPathComponents2 = (path6 = "") => {
-  return (path6.trim().match(/([^\\^/]|\\\/)+/g) ?? []).filter(Boolean);
-};
-var cidAndRest = (path6) => {
-  if (path6 instanceof Uint8Array) {
-    return {
-      cid: CID2.decode(path6),
-      toResolve: []
-    };
-  }
-  const cid = CID2.asCID(path6);
-  if (cid != null) {
-    return {
-      cid,
-      toResolve: []
-    };
-  }
-  if (typeof path6 === "string") {
-    if (path6.indexOf("/ipfs/") === 0) {
-      path6 = path6.substring(6);
-    }
-    const output = toPathComponents2(path6);
-    return {
-      cid: CID2.parse(output[0]),
-      toResolve: output.slice(1)
-    };
-  }
-  throw (0, import_err_code15.default)(new Error(`Unknown path type ${path6}`), "ERR_BAD_PATH");
-};
-async function* walkPath(path6, blockstore, options = {}) {
-  let { cid, toResolve } = cidAndRest(path6);
-  let name4 = cid.toString();
-  let entryPath = name4;
-  const startingDepth = toResolve.length;
-  while (true) {
-    const result = await resolvers_default(cid, name4, entryPath, toResolve, startingDepth, blockstore, options);
-    if (result.entry == null && result.next == null) {
-      throw (0, import_err_code15.default)(new Error(`Could not resolve ${path6}`), "ERR_NOT_FOUND");
-    }
-    if (result.entry != null) {
-      yield result.entry;
-    }
-    if (result.next == null) {
-      return;
-    }
-    toResolve = result.next.toResolve;
-    cid = result.next.cid;
-    name4 = result.next.name;
-    entryPath = result.next.path;
-  }
-}
-async function exporter(path6, blockstore, options = {}) {
-  const result = await src_default7(walkPath(path6, blockstore, options));
-  if (result == null) {
-    throw (0, import_err_code15.default)(new Error(`Could not resolve ${path6}`), "ERR_NOT_FOUND");
-  }
-  return result;
-}
-async function* recursive(path6, blockstore, options = {}) {
-  const node = await exporter(path6, blockstore, options);
-  if (node == null) {
-    return;
-  }
-  yield node;
-  if (node.type === "directory") {
-    for await (const child of recurse(node, options)) {
-      yield child;
-    }
-  }
-  async function* recurse(node2, options2) {
-    for await (const file of node2.content(options2)) {
-      yield file;
-      if (file instanceof Uint8Array) {
-        continue;
-      }
-      if (file.type === "directory") {
-        yield* recurse(file, options2);
-      }
-    }
-  }
-}
-
-// node_modules/merge-options/index.mjs
-var import_index3 = __toESM(require_merge_options(), 1);
-var merge_options_default = import_index3.default;
-
-// node_modules/@helia/unixfs/dist/src/errors.js
-var UnixFSError = class extends Error {
-  name;
-  code;
-  constructor(message2, name4, code5) {
-    super(message2);
-    this.name = name4;
-    this.code = code5;
-  }
-};
-var NotUnixFSError = class extends UnixFSError {
-  constructor(message2 = "not a Unixfs node") {
-    super(message2, "NotUnixFSError", "ERR_NOT_UNIXFS");
-  }
-};
-var InvalidPBNodeError = class extends UnixFSError {
-  constructor(message2 = "invalid PBNode") {
-    super(message2, "InvalidPBNodeError", "ERR_INVALID_PBNODE");
-  }
-};
-var UnknownError = class extends UnixFSError {
-  constructor(message2 = "unknown error") {
-    super(message2, "InvalidPBNodeError", "ERR_UNKNOWN_ERROR");
-  }
-};
-var AlreadyExistsError = class extends UnixFSError {
-  constructor(message2 = "path already exists") {
-    super(message2, "AlreadyExistsError", "ERR_ALREADY_EXISTS");
-  }
-};
-var DoesNotExistError = class extends UnixFSError {
-  constructor(message2 = "path does not exist") {
-    super(message2, "DoesNotExistError", "ERR_DOES_NOT_EXIST");
-  }
-};
-var NoContentError = class extends UnixFSError {
-  constructor(message2 = "no content") {
-    super(message2, "NoContentError", "ERR_NO_CONTENT");
-  }
-};
-var NotAFileError = class extends UnixFSError {
-  constructor(message2 = "not a file") {
-    super(message2, "NotAFileError", "ERR_NOT_A_FILE");
-  }
-};
-var NotADirectoryError = class extends UnixFSError {
-  constructor(message2 = "not a directory") {
-    super(message2, "NotADirectoryError", "ERR_NOT_A_DIRECTORY");
-  }
-};
-var InvalidParametersError = class extends UnixFSError {
-  constructor(message2 = "invalid parameters") {
-    super(message2, "InvalidParametersError", "ERR_INVALID_PARAMETERS");
-  }
-};
-
-// node_modules/@libp2p/logger/dist/src/index.js
-var import_debug = __toESM(require_src2(), 1);
-import_debug.default.formatters.b = (v) => {
-  return v == null ? "undefined" : base58btc2.baseEncode(v);
-};
-import_debug.default.formatters.t = (v) => {
-  return v == null ? "undefined" : base322.baseEncode(v);
-};
-import_debug.default.formatters.m = (v) => {
-  return v == null ? "undefined" : base64.baseEncode(v);
-};
-import_debug.default.formatters.p = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-import_debug.default.formatters.c = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-import_debug.default.formatters.k = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-import_debug.default.formatters.a = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-function createDisabledLogger(namespace) {
-  const logger3 = () => {
-  };
-  logger3.enabled = false;
-  logger3.color = "";
-  logger3.diff = 0;
-  logger3.log = () => {
-  };
-  logger3.namespace = namespace;
-  logger3.destroy = () => true;
-  logger3.extend = () => logger3;
-  return logger3;
-}
-function logger(name4) {
-  let trace = createDisabledLogger(`${name4}:trace`);
-  if (import_debug.default.enabled(`${name4}:trace`) && import_debug.default.names.map((r) => r.toString()).find((n) => n.includes(":trace")) != null) {
-    trace = (0, import_debug.default)(`${name4}:trace`);
-  }
-  return Object.assign((0, import_debug.default)(name4), {
-    error: (0, import_debug.default)(`${name4}:error`),
-    trace
-  });
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/add-link.js
-var import_sparse_array3 = __toESM(require_sparse_array(), 1);
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/consumable-hash.js
-function wrapHash2(hashFn2) {
-  function hashing(value) {
-    if (value instanceof InfiniteHash2) {
-      return value;
-    } else {
-      return new InfiniteHash2(value, hashFn2);
-    }
-  }
-  return hashing;
-}
-var InfiniteHash2 = class {
-  _value;
-  _hashFn;
-  _depth;
-  _availableBits;
-  _currentBufferIndex;
-  _buffers;
-  constructor(value, hashFn2) {
-    if (!(value instanceof Uint8Array)) {
-      throw new Error("can only hash Uint8Arrays");
-    }
-    this._value = value;
-    this._hashFn = hashFn2;
-    this._depth = -1;
-    this._availableBits = 0;
-    this._currentBufferIndex = 0;
-    this._buffers = [];
-  }
-  async take(bits) {
-    let pendingBits = bits;
-    while (this._availableBits < pendingBits) {
-      await this._produceMoreBits();
-    }
-    let result = 0;
-    while (pendingBits > 0) {
-      const hash = this._buffers[this._currentBufferIndex];
-      const available = Math.min(hash.availableBits(), pendingBits);
-      const took = hash.take(available);
-      result = (result << available) + took;
-      pendingBits -= available;
-      this._availableBits -= available;
-      if (hash.availableBits() === 0) {
-        this._currentBufferIndex++;
-      }
-    }
-    return result;
-  }
-  untake(bits) {
-    let pendingBits = bits;
-    while (pendingBits > 0) {
-      const hash = this._buffers[this._currentBufferIndex];
-      const availableForUntake = Math.min(hash.totalBits() - hash.availableBits(), pendingBits);
-      hash.untake(availableForUntake);
-      pendingBits -= availableForUntake;
-      this._availableBits += availableForUntake;
-      if (this._currentBufferIndex > 0 && hash.totalBits() === hash.availableBits()) {
-        this._depth--;
-        this._currentBufferIndex--;
-      }
-    }
-  }
-  async _produceMoreBits() {
-    this._depth++;
-    const value = this._depth > 0 ? concat2([this._value, Uint8Array.from([this._depth])]) : this._value;
-    const hashValue = await this._hashFn(value);
-    const buffer2 = new ConsumableBuffer2(hashValue);
-    this._buffers.push(buffer2);
-    this._availableBits += buffer2.availableBits();
-  }
-};
-var START_MASKS2 = [
-  255,
-  254,
-  252,
-  248,
-  240,
-  224,
-  192,
-  128
-];
-var STOP_MASKS2 = [
-  1,
-  3,
-  7,
-  15,
-  31,
-  63,
-  127,
-  255
-];
-var ConsumableBuffer2 = class {
-  _value;
-  _currentBytePos;
-  _currentBitPos;
-  constructor(value) {
-    this._value = value;
-    this._currentBytePos = value.length - 1;
-    this._currentBitPos = 7;
-  }
-  availableBits() {
-    return this._currentBitPos + 1 + this._currentBytePos * 8;
-  }
-  totalBits() {
-    return this._value.length * 8;
-  }
-  take(bits) {
-    let pendingBits = bits;
-    let result = 0;
-    while (pendingBits > 0 && this._haveBits()) {
-      const byte = this._value[this._currentBytePos];
-      const availableBits = this._currentBitPos + 1;
-      const taking = Math.min(availableBits, pendingBits);
-      const value = byteBitsToInt2(byte, availableBits - taking, taking);
-      result = (result << taking) + value;
-      pendingBits -= taking;
-      this._currentBitPos -= taking;
-      if (this._currentBitPos < 0) {
-        this._currentBitPos = 7;
-        this._currentBytePos--;
-      }
-    }
-    return result;
-  }
-  untake(bits) {
-    this._currentBitPos += bits;
-    while (this._currentBitPos > 7) {
-      this._currentBitPos -= 8;
-      this._currentBytePos += 1;
-    }
-  }
-  _haveBits() {
-    return this._currentBytePos >= 0;
-  }
-};
-function byteBitsToInt2(byte, start, length4) {
-  const mask = maskFor2(start, length4);
-  return (byte & mask) >>> start;
-}
-function maskFor2(start, length4) {
-  return START_MASKS2[start] & STOP_MASKS2[Math.min(length4 + start - 1, 7)];
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/hamt-constants.js
-var hamtHashCode = BigInt(murmur3128.code);
-var hamtBucketBits = 8;
-async function hamtHashFn2(buf2) {
-  return (await murmur3128.encode(buf2)).subarray(0, 8).reverse();
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/hamt-utils.js
-var import_sparse_array2 = __toESM(require_sparse_array(), 1);
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/persist.js
-var persist2 = async (buffer2, blockstore, options) => {
-  if (options.codec == null) {
-    options.codec = src_exports2;
-  }
-  const multihash = await sha256.digest(buffer2);
-  const cid = CID2.create(options.cidVersion, options.codec.code, multihash);
-  await blockstore.put(cid, buffer2, {
-    ...options,
-    signal: options.signal
-  });
-  return cid;
-};
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/dir-sharded.js
-var Dir2 = class {
-  options;
-  root;
-  dir;
-  path;
-  dirty;
-  flat;
-  parent;
-  parentKey;
-  unixfs;
-  mode;
-  mtime;
-  cid;
-  size;
-  nodeSize;
-  constructor(props, options) {
-    this.options = options ?? {};
-    this.root = props.root;
-    this.dir = props.dir;
-    this.path = props.path;
-    this.dirty = props.dirty;
-    this.flat = props.flat;
-    this.parent = props.parent;
-    this.parentKey = props.parentKey;
-    this.unixfs = props.unixfs;
-    this.mode = props.mode;
-    this.mtime = props.mtime;
-  }
-};
-var DirSharded2 = class extends Dir2 {
-  _bucket;
-  constructor(props, options) {
-    super(props, options);
-    this._bucket = createHAMT({
-      hashFn: hamtHashFn2,
-      bits: 8
-    });
-  }
-  async put(name4, value) {
-    this.cid = void 0;
-    this.size = void 0;
-    this.nodeSize = void 0;
-    await this._bucket.put(name4, value);
-  }
-  async get(name4) {
-    return this._bucket.get(name4);
-  }
-  childCount() {
-    return this._bucket.leafCount();
-  }
-  directChildrenCount() {
-    return this._bucket.childrenCount();
-  }
-  onlyChild() {
-    return this._bucket.onlyChild();
-  }
-  async *eachChildSeries() {
-    for await (const { key, value } of this._bucket.eachLeafSeries()) {
-      yield {
-        key,
-        child: value
-      };
-    }
-  }
-  estimateNodeSize() {
-    if (this.nodeSize !== void 0) {
-      return this.nodeSize;
-    }
-    this.nodeSize = calculateSize2(this._bucket, this, this.options);
-    return this.nodeSize;
-  }
-  async *flush(blockstore) {
-    for await (const entry of flush2(this._bucket, blockstore, this, this.options)) {
-      yield {
-        ...entry,
-        path: this.path
-      };
-    }
-  }
-};
-async function* flush2(bucket, blockstore, shardRoot, options) {
-  const children = bucket._children;
-  const links = [];
-  let childrenSize = 0n;
-  for (let i = 0; i < children.length; i++) {
-    const child = children.get(i);
-    if (child == null) {
-      continue;
-    }
-    const labelPrefix = i.toString(16).toUpperCase().padStart(2, "0");
-    if (child instanceof Bucket) {
-      let shard;
-      for await (const subShard of flush2(child, blockstore, null, options)) {
-        shard = subShard;
-      }
-      if (shard == null) {
-        throw new Error("Could not flush sharded directory, no subshard found");
-      }
-      links.push({
-        Name: labelPrefix,
-        Tsize: Number(shard.size),
-        Hash: shard.cid
-      });
-      childrenSize += shard.size;
-    } else if (isDir2(child.value)) {
-      const dir2 = child.value;
-      let flushedDir;
-      for await (const entry of dir2.flush(blockstore)) {
-        flushedDir = entry;
-        yield flushedDir;
-      }
-      if (flushedDir == null) {
-        throw new Error("Did not flush dir");
-      }
-      const label = labelPrefix + child.key;
-      links.push({
-        Name: label,
-        Tsize: Number(flushedDir.size),
-        Hash: flushedDir.cid
-      });
-      childrenSize += flushedDir.size;
-    } else {
-      const value = child.value;
-      if (value.cid == null) {
-        continue;
-      }
-      const label = labelPrefix + child.key;
-      const size2 = value.size;
-      links.push({
-        Name: label,
-        Tsize: Number(size2),
-        Hash: value.cid
-      });
-      childrenSize += BigInt(size2 ?? 0);
-    }
-  }
-  const data = Uint8Array.from(children.bitField().reverse());
-  const dir = new UnixFS({
-    type: "hamt-sharded-directory",
-    data,
-    fanout: BigInt(bucket.tableSize()),
-    hashType: hamtHashCode,
-    mtime: shardRoot == null ? void 0 : shardRoot.mtime,
-    mode: shardRoot == null ? void 0 : shardRoot.mode
-  });
-  const node = {
-    Data: dir.marshal(),
-    Links: links
-  };
-  const buffer2 = encode7(prepare(node));
-  const cid = await persist2(buffer2, blockstore, options);
-  const size = BigInt(buffer2.byteLength) + childrenSize;
-  yield {
-    cid,
-    unixfs: dir,
-    size
-  };
-}
-function isDir2(obj) {
-  return typeof obj.flush === "function";
-}
-function calculateSize2(bucket, shardRoot, options) {
-  const children = bucket._children;
-  const links = [];
-  for (let i = 0; i < children.length; i++) {
-    const child = children.get(i);
-    if (child == null) {
-      continue;
-    }
-    const labelPrefix = i.toString(16).toUpperCase().padStart(2, "0");
-    if (child instanceof Bucket) {
-      const size = calculateSize2(child, null, options);
-      links.push({
-        Name: labelPrefix,
-        Tsize: Number(size),
-        Hash: options.cidVersion === 0 ? CID_V02 : CID_V12
-      });
-    } else if (typeof child.value.flush === "function") {
-      const dir2 = child.value;
-      const size = dir2.nodeSize();
-      links.push({
-        Name: labelPrefix + child.key,
-        Tsize: Number(size),
-        Hash: options.cidVersion === 0 ? CID_V02 : CID_V12
-      });
-    } else {
-      const value = child.value;
-      if (value.cid == null) {
-        continue;
-      }
-      const label = labelPrefix + child.key;
-      const size = value.size;
-      links.push({
-        Name: label,
-        Tsize: Number(size),
-        Hash: value.cid
-      });
-    }
-  }
-  const data = Uint8Array.from(children.bitField().reverse());
-  const dir = new UnixFS({
-    type: "hamt-sharded-directory",
-    data,
-    fanout: BigInt(bucket.tableSize()),
-    hashType: hamtHashCode,
-    mtime: shardRoot == null ? void 0 : shardRoot.mtime,
-    mode: shardRoot == null ? void 0 : shardRoot.mode
-  });
-  const buffer2 = encode7(prepare({
-    Data: dir.marshal(),
-    Links: links
-  }));
-  return buffer2.length;
-}
-var CID_V02 = CID2.parse("QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn");
-var CID_V12 = CID2.parse("zdj7WbTaiJT1fgatdet9Ei9iDB5hdCxkbVyhyh8YTUnXMiwYi");
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/hamt-utils.js
-var log = logger("helia:unixfs:commands:utils:hamt-utils");
-var toPrefix2 = (position) => {
-  return position.toString(16).toUpperCase().padStart(2, "0").substring(0, 2);
-};
-var createShard = async (blockstore, contents, options) => {
-  const shard = new DirSharded2({
-    root: true,
-    dir: true,
-    parent: void 0,
-    parentKey: void 0,
-    path: "",
-    dirty: true,
-    flat: false,
-    mtime: options.mtime,
-    mode: options.mode
-  }, options);
-  for (let i = 0; i < contents.length; i++) {
-    await shard._bucket.put(contents[i].name, {
-      size: contents[i].size,
-      cid: contents[i].cid
-    });
-  }
-  const res = await src_default7(shard.flush(blockstore));
-  if (res == null) {
-    throw new Error("Flushing shard yielded no result");
-  }
-  return res;
-};
-var updateShardedDirectory = async (path6, blockstore, options) => {
-  const shardRoot = UnixFS.unmarshal(path6[0].node.Data ?? new Uint8Array(0));
-  const fanout = BigInt(Math.pow(2, hamtBucketBits));
-  path6.reverse();
-  let cid;
-  let node;
-  for (let i = 0; i < path6.length; i++) {
-    const isRoot = i === path6.length - 1;
-    const segment = path6[i];
-    const data = Uint8Array.from(segment.children.bitField().reverse());
-    const dir = new UnixFS({
-      type: "hamt-sharded-directory",
-      data,
-      fanout,
-      hashType: hamtHashCode
-    });
-    if (isRoot) {
-      dir.mtime = shardRoot.mtime;
-      dir.mode = shardRoot.mode;
-    }
-    node = {
-      Data: dir.marshal(),
-      Links: segment.node.Links
-    };
-    const block = encode7(prepare(node));
-    cid = await persist2(block, blockstore, options);
-    if (!isRoot) {
-      const nextSegment = path6[i + 1];
-      if (nextSegment == null) {
-        throw new Error("Was not operating on shard root but also had no parent?");
-      }
-      log("updating link in parent sub-shard with prefix %s", nextSegment.prefix);
-      nextSegment.node.Links = nextSegment.node.Links.filter((l) => l.Name !== nextSegment.prefix);
-      nextSegment.node.Links.push({
-        Name: nextSegment.prefix,
-        Hash: cid,
-        Tsize: segment.node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), block.byteLength)
-      });
-    }
-  }
-  if (cid == null || node == null) {
-    throw new Error("Noting persisted");
-  }
-  return { cid, node };
-};
-var recreateShardedDirectory = async (cid, fileName, blockstore, options) => {
-  const wrapped = wrapHash2(hamtHashFn2);
-  const hash = wrapped(fromString3(fileName));
-  const path6 = [];
-  while (true) {
-    const block = await blockstore.get(cid, options);
-    const node = decode11(block);
-    const children = new import_sparse_array2.default();
-    const index = await hash.take(hamtBucketBits);
-    const prefix = toPrefix2(index);
-    path6.push({
-      prefix,
-      children,
-      node
-    });
-    let childLink;
-    for (const link of node.Links) {
-      const linkName2 = link.Name ?? "";
-      if (linkName2.length < 2) {
-        throw new Error("Invalid HAMT - link name was too short");
-      }
-      const position = parseInt(linkName2.substring(0, 2), 16);
-      children.set(position, true);
-      if (linkName2.startsWith(prefix)) {
-        childLink = link;
-      }
-    }
-    if (childLink == null) {
-      log("no link found with prefix %s for %s", prefix, fileName);
-      break;
-    }
-    const linkName = childLink.Name ?? "";
-    if (linkName.length < 2) {
-      throw new Error("Invalid HAMT - link name was too short");
-    }
-    if (linkName.length === 2) {
-      cid = childLink.Hash;
-      log("descend into sub-shard with prefix %s", linkName);
-      continue;
-    }
-    break;
-  }
-  return { path: path6, hash };
-};
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/is-over-shard-threshold.js
-async function isOverShardThreshold(node, blockstore, threshold, options) {
-  if (node.Data == null) {
-    throw new Error("DagPB node had no data");
-  }
-  const unixfs2 = UnixFS.unmarshal(node.Data);
-  let size;
-  if (unixfs2.type === "directory") {
-    size = estimateNodeSize(node);
-  } else if (unixfs2.type === "hamt-sharded-directory") {
-    size = await estimateShardSize(node, 0, threshold, blockstore, options);
-  } else {
-    throw new Error("Can only estimate the size of directories or shards");
-  }
-  return size > threshold;
-}
-function estimateNodeSize(node) {
-  let size = 0;
-  for (const link of node.Links) {
-    size += (link.Name ?? "").length;
-    size += link.Hash.version === 1 ? CID_V12.bytes.byteLength : CID_V02.bytes.byteLength;
-  }
-  return size;
-}
-async function estimateShardSize(node, current, max, blockstore, options) {
-  if (current > max) {
-    return max;
-  }
-  if (node.Data == null) {
-    return current;
-  }
-  const unixfs2 = UnixFS.unmarshal(node.Data);
-  if (!unixfs2.isDirectory()) {
-    return current;
-  }
-  for (const link of node.Links) {
-    let name4 = link.Name ?? "";
-    name4 = name4.substring(2);
-    current += name4.length;
-    current += link.Hash.bytes.byteLength;
-    if (link.Hash.code === code2) {
-      const block = await blockstore.get(link.Hash, options);
-      const node2 = decode11(block);
-      current += await estimateShardSize(node2, current, max, blockstore, options);
-    }
-  }
-  return current;
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/add-link.js
-var log2 = logger("helia:unixfs:components:utils:add-link");
-async function addLink(parent, child, blockstore, options) {
-  if (parent.node.Data == null) {
-    throw new InvalidParametersError("Invalid parent passed to addLink");
-  }
-  const meta = UnixFS.unmarshal(parent.node.Data);
-  if (meta.type === "hamt-sharded-directory") {
-    log2("adding link to sharded directory");
-    return addToShardedDirectory(parent, child, blockstore, options);
-  }
-  log2(`adding ${child.Name} (${child.Hash}) to regular directory`);
-  const result = await addToDirectory(parent, child, blockstore, options);
-  if (await isOverShardThreshold(result.node, blockstore, options.shardSplitThresholdBytes, options)) {
-    log2("converting directory to sharded directory");
-    const converted = await convertToShardedDirectory(result, blockstore);
-    result.cid = converted.cid;
-    result.node = decode11(await blockstore.get(converted.cid, options));
-  }
-  return result;
-}
-var convertToShardedDirectory = async (parent, blockstore) => {
-  if (parent.node.Data == null) {
-    throw new InvalidParametersError("Invalid parent passed to convertToShardedDirectory");
-  }
-  const unixfs2 = UnixFS.unmarshal(parent.node.Data);
-  const result = await createShard(blockstore, parent.node.Links.map((link) => ({
-    name: link.Name ?? "",
-    size: BigInt(link.Tsize ?? 0),
-    cid: link.Hash
-  })), {
-    mode: unixfs2.mode,
-    mtime: unixfs2.mtime,
-    cidVersion: parent.cid.version
-  });
-  log2(`converted directory to sharded directory ${result.cid}`);
-  return result;
-};
-var addToDirectory = async (parent, child, blockstore, options) => {
-  const parentLinks = parent.node.Links.filter((link) => {
-    const matches = link.Name === child.Name;
-    if (matches && !options.allowOverwriting) {
-      throw new AlreadyExistsError();
-    }
-    return !matches;
-  });
-  parentLinks.push(child);
-  if (parent.node.Data == null) {
-    throw new InvalidPBNodeError("Parent node with no data passed to addToDirectory");
-  }
-  const node = UnixFS.unmarshal(parent.node.Data);
-  let data;
-  if (node.mtime != null) {
-    const ms = Date.now();
-    const secs = Math.floor(ms / 1e3);
-    node.mtime = {
-      secs: BigInt(secs),
-      nsecs: (ms - secs * 1e3) * 1e3
-    };
-    data = node.marshal();
-  } else {
-    data = parent.node.Data;
-  }
-  parent.node = prepare({
-    Data: data,
-    Links: parentLinks
-  });
-  const buf2 = encode7(parent.node);
-  const hash = await sha256.digest(buf2);
-  const cid = CID2.create(parent.cid.version, code2, hash);
-  await blockstore.put(cid, buf2);
-  return {
-    node: parent.node,
-    cid
-  };
-};
-var addToShardedDirectory = async (parent, child, blockstore, options) => {
-  var _a;
-  const { path: path6, hash } = await recreateShardedDirectory(parent.cid, child.Name, blockstore, options);
-  const finalSegment = path6[path6.length - 1];
-  if (finalSegment == null) {
-    throw new Error("Invalid HAMT, could not generate path");
-  }
-  const prefix = finalSegment.prefix;
-  const index = parseInt(prefix, 16);
-  log2("next prefix for %s is %s", child.Name, prefix);
-  const linkName = `${prefix}${child.Name}`;
-  const existingLink = finalSegment.node.Links.find((l) => (l.Name ?? "").startsWith(prefix));
-  if (existingLink != null) {
-    log2("link %s was present in shard", linkName);
-    if (existingLink.Name === linkName) {
-      if (!options.allowOverwriting) {
-        throw new AlreadyExistsError();
-      }
-      log2("overwriting %s in subshard", child.Name);
-      finalSegment.node.Links = finalSegment.node.Links.filter((l) => l.Name !== linkName);
-      finalSegment.node.Links.push({
-        Name: linkName,
-        Hash: child.Hash,
-        Tsize: child.Tsize
-      });
-    } else if (((_a = existingLink.Name) == null ? void 0 : _a.length) === 2) {
-      throw new Error("Existing link was subshard?!");
-    } else {
-      log2("prefix %s already exists, creating new subshard", prefix);
-      const index2 = finalSegment.node.Links.findIndex((l) => {
-        var _a2;
-        return (_a2 = l.Name) == null ? void 0 : _a2.startsWith(prefix);
-      });
-      const sibling = finalSegment.node.Links.splice(index2, 1)[0];
-      const siblingName = (sibling.Name ?? "").substring(2);
-      const wrapped = wrapHash2(hamtHashFn2);
-      const siblingHash = wrapped(fromString3(siblingName));
-      for (let i = 0; i < path6.length; i++) {
-        await siblingHash.take(hamtBucketBits);
-      }
-      while (true) {
-        const siblingIndex = await siblingHash.take(hamtBucketBits);
-        const siblingPrefix = toPrefix2(siblingIndex);
-        sibling.Name = `${siblingPrefix}${siblingName}`;
-        const newIndex = await hash.take(hamtBucketBits);
-        const newPrefix = toPrefix2(newIndex);
-        if (siblingPrefix === newPrefix) {
-          const children2 = new import_sparse_array3.default();
-          children2.set(newIndex, true);
-          path6.push({
-            prefix: newPrefix,
-            children: children2,
-            node: {
-              Links: []
-            }
-          });
-          continue;
-        }
-        const children = new import_sparse_array3.default();
-        children.set(newIndex, true);
-        children.set(siblingIndex, true);
-        path6.push({
-          prefix,
-          children,
-          node: {
-            Links: [
-              sibling,
-              {
-                Name: `${newPrefix}${child.Name}`,
-                Hash: child.Hash,
-                Tsize: child.Tsize
-              }
-            ]
-          }
-        });
-        break;
-      }
-    }
-  } else {
-    log2("link %s was not present in sub-shard", linkName);
-    child.Name = linkName;
-    finalSegment.node.Links.push(child);
-    finalSegment.children.set(index, true);
-    log2("adding %s to existing sub-shard", linkName);
-  }
-  return updateShardedDirectory(path6, blockstore, options);
-};
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/cid-to-directory.js
-async function cidToDirectory(cid, blockstore, options = {}) {
-  const entry = await exporter(cid, blockstore, options);
-  if (entry.type !== "directory") {
-    throw new NotADirectoryError(`${cid.toString()} was not a UnixFS directory`);
-  }
-  return {
-    cid,
-    node: entry.node
-  };
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/cid-to-pblink.js
-async function cidToPBLink(cid, name4, blockstore, options) {
-  const sourceEntry = await exporter(cid, blockstore, options);
-  if (sourceEntry.type !== "directory" && sourceEntry.type !== "file" && sourceEntry.type !== "raw") {
-    throw new NotUnixFSError(`${cid.toString()} was not a UnixFS node`);
-  }
-  return {
-    Name: name4,
-    Tsize: sourceEntry.node instanceof Uint8Array ? sourceEntry.node.byteLength : dagNodeTsize(sourceEntry.node),
-    Hash: cid
-  };
-}
-function dagNodeTsize(node) {
-  const linkSizes = node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0);
-  return encode7(node).byteLength + linkSizes;
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/resolve.js
-var log3 = logger("helia:unixfs:components:utils:resolve");
-async function resolve5(cid, path6, blockstore, options) {
-  if (path6 == null || path6 === "") {
-    return { cid };
-  }
-  log3('resolve "%s" under %c', path6, cid);
-  const parts = path6.split("/").filter(Boolean);
-  const segments = [{
-    name: "",
-    cid,
-    size: 0n
-  }];
-  for (let i = 0; i < parts.length; i++) {
-    const part = parts[i];
-    const result = await exporter(cid, blockstore, options);
-    log3('resolving "%s"', part, result);
-    if (result.type === "file") {
-      if (i < parts.length - 1) {
-        throw new InvalidParametersError("Path was invalid");
-      }
-      cid = result.cid;
-    } else if (result.type === "directory") {
-      let dirCid;
-      for await (const entry of result.content()) {
-        if (entry.name === part) {
-          dirCid = entry.cid;
-          break;
-        }
-      }
-      if (dirCid == null) {
-        throw new DoesNotExistError("Could not find path in directory");
-      }
-      cid = dirCid;
-      segments.push({
-        name: part,
-        cid,
-        size: result.size
-      });
-    } else {
-      throw new InvalidParametersError("Could not resolve path");
-    }
-  }
-  log3("resolved %s to %c", path6, cid);
-  return {
-    cid,
-    path: path6,
-    segments
-  };
-}
-async function updatePathCids(cid, result, blockstore, options) {
-  if (result.segments == null || result.segments.length === 0) {
-    return cid;
-  }
-  let child = result.segments.pop();
-  if (child == null) {
-    throw new Error("Insufficient segments");
-  }
-  child.cid = cid;
-  result.segments.reverse();
-  for (const parent of result.segments) {
-    const [directory, pblink] = await Promise.all([
-      cidToDirectory(parent.cid, blockstore, options),
-      cidToPBLink(child.cid, child.name, blockstore, options)
-    ]);
-    const result2 = await addLink(directory, pblink, blockstore, {
-      ...options,
-      allowOverwriting: true,
-      cidVersion: cid.version
-    });
-    cid = result2.cid;
-    parent.cid = cid;
-    child = parent;
-  }
-  return cid;
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/cat.js
-var mergeOptions2 = merge_options_default.bind({ ignoreUndefined: true });
-var defaultOptions = {};
-async function* cat(cid, blockstore, options = {}) {
-  const opts = mergeOptions2(defaultOptions, options);
-  const resolved = await resolve5(cid, opts.path, blockstore, opts);
-  const result = await exporter(resolved.cid, blockstore, opts);
-  if (result.type !== "file" && result.type !== "raw") {
-    throw new NotAFileError();
-  }
-  if (result.content == null) {
-    throw new NoContentError();
-  }
-  yield* result.content(opts);
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/constants.js
-var SHARD_SPLIT_THRESHOLD_BYTES = 262144;
-
-// node_modules/@helia/unixfs/dist/src/commands/chmod.js
-var mergeOptions3 = merge_options_default.bind({ ignoreUndefined: true });
-var log4 = logger("helia:unixfs:chmod");
-var defaultOptions2 = {
-  recursive: false,
-  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
-};
-async function chmod(cid, mode, blockstore, options = {}) {
-  const opts = mergeOptions3(defaultOptions2, options);
-  const resolved = await resolve5(cid, opts.path, blockstore, options);
-  log4("chmod %c %d", resolved.cid, mode);
-  if (opts.recursive) {
-    const root = await pipe(
-      async function* () {
-        for await (const entry of recursive(resolved.cid, blockstore, options)) {
-          let metadata2;
-          let links2 = [];
-          if (entry.type === "raw") {
-            metadata2 = new UnixFS({ type: "file", data: entry.node });
-          } else if (entry.type === "file" || entry.type === "directory") {
-            metadata2 = entry.unixfs;
-            links2 = entry.node.Links;
-          } else {
-            throw new NotUnixFSError();
-          }
-          metadata2.mode = mode;
-          const node = {
-            Data: metadata2.marshal(),
-            Links: links2
-          };
-          yield {
-            path: entry.path,
-            content: node
-          };
-        }
-      },
-      // @ts-expect-error cannot combine progress types
-      (source) => importer(source, blockstore, {
-        ...opts,
-        dagBuilder: async function* (source2, block2) {
-          for await (const entry of source2) {
-            yield async function() {
-              const node = entry.content;
-              const buf2 = encode7(node);
-              const updatedCid2 = await persist2(buf2, block2, {
-                ...opts,
-                cidVersion: cid.version
-              });
-              if (node.Data == null) {
-                throw new InvalidPBNodeError(`${updatedCid2} had no data`);
-              }
-              const unixfs2 = UnixFS.unmarshal(node.Data);
-              return {
-                cid: updatedCid2,
-                size: BigInt(buf2.length),
-                path: entry.path,
-                unixfs: unixfs2
-              };
-            };
-          }
-        }
-      }),
-      async (nodes) => src_default7(nodes)
-    );
-    if (root == null) {
-      throw new UnknownError(`Could not chmod ${resolved.cid.toString()}`);
-    }
-    return updatePathCids(root.cid, resolved, blockstore, opts);
-  }
-  const block = await blockstore.get(resolved.cid, options);
-  let metadata;
-  let links = [];
-  if (resolved.cid.code === code3) {
-    metadata = new UnixFS({ type: "file", data: block });
-  } else {
-    const node = decode11(block);
-    if (node.Data == null) {
-      throw new InvalidPBNodeError(`${resolved.cid.toString()} had no data`);
-    }
-    links = node.Links;
-    metadata = UnixFS.unmarshal(node.Data);
-  }
-  metadata.mode = mode;
-  const updatedBlock = encode7({
-    Data: metadata.marshal(),
-    Links: links
-  });
-  const hash = await sha256.digest(updatedBlock);
-  const updatedCid = CID2.create(resolved.cid.version, code2, hash);
-  await blockstore.put(updatedCid, updatedBlock);
-  return updatePathCids(updatedCid, resolved, blockstore, opts);
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/cp.js
-var mergeOptions4 = merge_options_default.bind({ ignoreUndefined: true });
-var log5 = logger("helia:unixfs:cp");
-var defaultOptions3 = {
-  force: false,
-  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
-};
-async function cp(source, target, name4, blockstore, options = {}) {
-  const opts = mergeOptions4(defaultOptions3, options);
-  if (name4.includes("/")) {
-    throw new InvalidParametersError("Name must not have slashes");
-  }
-  const [directory, pblink] = await Promise.all([
-    cidToDirectory(target, blockstore, opts),
-    cidToPBLink(source, name4, blockstore, opts)
-  ]);
-  log5('Adding %c as "%s" to %c', source, name4, target);
-  const result = await addLink(directory, pblink, blockstore, {
-    allowOverwriting: opts.force,
-    cidVersion: target.version,
-    ...opts
-  });
-  return result.cid;
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/ls.js
-var mergeOptions5 = merge_options_default.bind({ ignoreUndefined: true });
-var defaultOptions4 = {};
-async function* ls(cid, blockstore, options = {}) {
-  const opts = mergeOptions5(defaultOptions4, options);
-  const resolved = await resolve5(cid, opts.path, blockstore, opts);
-  const result = await exporter(resolved.cid, blockstore);
-  if (result.type === "file" || result.type === "raw") {
-    yield result;
-    return;
-  }
-  if (result.content == null) {
-    throw new NoContentError();
-  }
-  if (result.type !== "directory") {
-    throw new NotADirectoryError();
-  }
-  yield* result.content({
-    offset: options.offset,
-    length: options.length
-  });
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/mkdir.js
-var mergeOptions6 = merge_options_default.bind({ ignoreUndefined: true });
-var log6 = logger("helia:unixfs:mkdir");
-var defaultOptions5 = {
-  cidVersion: 1,
-  force: false,
-  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
-};
-async function mkdir(parentCid, dirname, blockstore, options = {}) {
-  const opts = mergeOptions6(defaultOptions5, options);
-  if (dirname.includes("/")) {
-    throw new InvalidParametersError("Path must not have slashes");
-  }
-  const entry = await exporter(parentCid, blockstore, options);
-  if (entry.type !== "directory") {
-    throw new NotADirectoryError(`${parentCid.toString()} was not a UnixFS directory`);
-  }
-  log6("creating %s", dirname);
-  const metadata = new UnixFS({
-    type: "directory",
-    mode: opts.mode,
-    mtime: opts.mtime
-  });
-  const node = {
-    Data: metadata.marshal(),
-    Links: []
-  };
-  const buf2 = encode7(node);
-  const hash = await sha256.digest(buf2);
-  const emptyDirCid = CID2.create(opts.cidVersion, code2, hash);
-  await blockstore.put(emptyDirCid, buf2);
-  const [directory, pblink] = await Promise.all([
-    cidToDirectory(parentCid, blockstore, opts),
-    cidToPBLink(emptyDirCid, dirname, blockstore, opts)
-  ]);
-  log6("adding empty dir called %s to %c", dirname, parentCid);
-  const result = await addLink(directory, pblink, blockstore, {
-    ...opts,
-    allowOverwriting: opts.force
-  });
-  return result.cid;
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/remove-link.js
-var log7 = logger("helia:unixfs:utils:remove-link");
-async function removeLink(parent, name4, blockstore, options) {
-  if (parent.node.Data == null) {
-    throw new InvalidPBNodeError("Parent node had no data");
-  }
-  const meta = UnixFS.unmarshal(parent.node.Data);
-  if (meta.type === "hamt-sharded-directory") {
-    log7(`removing ${name4} from sharded directory`);
-    const result = await removeFromShardedDirectory(parent, name4, blockstore, options);
-    if (!await isOverShardThreshold(result.node, blockstore, options.shardSplitThresholdBytes, options)) {
-      log7("converting shard to flat directory %c", parent.cid);
-      return convertToFlatDirectory(result, blockstore, options);
-    }
-    return result;
-  }
-  log7(`removing link ${name4} regular directory`);
-  return removeFromDirectory(parent, name4, blockstore, options);
-}
-var removeFromDirectory = async (parent, name4, blockstore, options) => {
-  parent.node.Links = parent.node.Links.filter((link) => {
-    return link.Name !== name4;
-  });
-  const parentBlock = encode7(parent.node);
-  const parentCid = await persist2(parentBlock, blockstore, {
-    ...options,
-    cidVersion: parent.cid.version
-  });
-  log7(`Updated regular directory ${parentCid}`);
-  return {
-    node: parent.node,
-    cid: parentCid
-  };
-};
-var removeFromShardedDirectory = async (parent, name4, blockstore, options) => {
-  const { path: path6 } = await recreateShardedDirectory(parent.cid, name4, blockstore, options);
-  const finalSegment = path6[path6.length - 1];
-  if (finalSegment == null) {
-    throw new Error("Invalid HAMT, could not generate path");
-  }
-  const linkName = finalSegment.node.Links.filter((l) => (l.Name ?? "").substring(2) === name4).map((l) => l.Name).pop();
-  if (linkName == null) {
-    throw new Error("File not found");
-  }
-  const prefix = linkName.substring(0, 2);
-  const index = parseInt(prefix, 16);
-  finalSegment.node.Links = finalSegment.node.Links.filter((link) => link.Name !== linkName);
-  finalSegment.children.unset(index);
-  if (finalSegment.node.Links.length === 1) {
-    while (true) {
-      if (path6.length === 1) {
-        break;
-      }
-      const segment = path6[path6.length - 1];
-      if (segment == null || segment.node.Links.length > 1) {
-        break;
-      }
-      path6.pop();
-      const nextSegment = path6[path6.length - 1];
-      if (nextSegment == null) {
-        break;
-      }
-      const link = segment.node.Links[0];
-      nextSegment.node.Links = nextSegment.node.Links.filter((l) => !(l.Name ?? "").startsWith(nextSegment.prefix));
-      nextSegment.node.Links.push({
-        Hash: link.Hash,
-        Name: `${nextSegment.prefix}${(link.Name ?? "").substring(2)}`,
-        Tsize: link.Tsize
-      });
-    }
-  }
-  return updateShardedDirectory(path6, blockstore, options);
-};
-var convertToFlatDirectory = async (parent, blockstore, options) => {
-  if (parent.node.Data == null) {
-    throw new InvalidParametersError("Invalid parent passed to convertToFlatDirectory");
-  }
-  const rootNode = {
-    Links: []
-  };
-  const dir = await exporter(parent.cid, blockstore);
-  if (dir.type !== "directory") {
-    throw new Error("Unexpected node type");
-  }
-  for await (const entry of dir.content()) {
-    let tsize = 0;
-    if (entry.node instanceof Uint8Array) {
-      tsize = entry.node.byteLength;
-    } else {
-      tsize = encode7(entry.node).length;
-    }
-    rootNode.Links.push({
-      Hash: entry.cid,
-      Name: entry.name,
-      Tsize: tsize
-    });
-  }
-  const oldUnixfs = UnixFS.unmarshal(parent.node.Data);
-  rootNode.Data = new UnixFS({ type: "directory", mode: oldUnixfs.mode, mtime: oldUnixfs.mtime }).marshal();
-  const block = encode7(prepare(rootNode));
-  const cid = await persist2(block, blockstore, {
-    codec: src_exports2,
-    cidVersion: parent.cid.version,
-    signal: options.signal
-  });
-  return {
-    cid,
-    node: rootNode
-  };
-};
-
-// node_modules/@helia/unixfs/dist/src/commands/rm.js
-var mergeOptions7 = merge_options_default.bind({ ignoreUndefined: true });
-var log8 = logger("helia:unixfs:rm");
-var defaultOptions6 = {
-  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
-};
-async function rm(target, name4, blockstore, options = {}) {
-  const opts = mergeOptions7(defaultOptions6, options);
-  if (name4.includes("/")) {
-    throw new InvalidParametersError("Name must not have slashes");
-  }
-  const directory = await cidToDirectory(target, blockstore, opts);
-  log8("Removing %s from %c", name4, target);
-  const result = await removeLink(directory, name4, blockstore, {
-    ...opts,
-    cidVersion: target.version
-  });
-  return result.cid;
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/stat.js
-var mergeOptions8 = merge_options_default.bind({ ignoreUndefined: true });
-var log9 = logger("helia:unixfs:stat");
-var defaultOptions7 = {};
-async function stat(cid, blockstore, options = {}) {
-  var _a;
-  const opts = mergeOptions8(defaultOptions7, options);
-  const resolved = await resolve5(cid, options.path, blockstore, opts);
-  log9("stat %c", resolved.cid);
-  const result = await exporter(resolved.cid, blockstore, opts);
-  if (result.type !== "file" && result.type !== "directory" && result.type !== "raw") {
-    throw new NotUnixFSError();
-  }
-  let fileSize = 0n;
-  let dagSize = 0n;
-  let localFileSize = 0n;
-  let localDagSize = 0n;
-  let blocks = 0;
-  let mode;
-  let mtime;
-  const type = result.type;
-  let unixfs2;
-  if (result.type === "raw") {
-    fileSize = BigInt(result.node.byteLength);
-    dagSize = BigInt(result.node.byteLength);
-    localFileSize = BigInt(result.node.byteLength);
-    localDagSize = BigInt(result.node.byteLength);
-    blocks = 1;
-  }
-  if (result.type === "directory") {
-    fileSize = 0n;
-    dagSize = BigInt(result.unixfs.marshal().byteLength);
-    localFileSize = 0n;
-    localDagSize = dagSize;
-    blocks = 1;
-    mode = result.unixfs.mode;
-    mtime = result.unixfs.mtime;
-    unixfs2 = result.unixfs;
-  }
-  if (result.type === "file") {
-    const results = await inspectDag(resolved.cid, blockstore, opts);
-    fileSize = result.unixfs.fileSize();
-    dagSize = BigInt((((_a = result.node.Data) == null ? void 0 : _a.byteLength) ?? 0) + result.node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0));
-    localFileSize = BigInt(results.localFileSize);
-    localDagSize = BigInt(results.localDagSize);
-    blocks = results.blocks;
-    mode = result.unixfs.mode;
-    mtime = result.unixfs.mtime;
-    unixfs2 = result.unixfs;
-  }
-  return {
-    cid: resolved.cid,
-    mode,
-    mtime,
-    fileSize,
-    dagSize,
-    localFileSize,
-    localDagSize,
-    blocks,
-    type,
-    unixfs: unixfs2
-  };
-}
-async function inspectDag(cid, blockstore, options) {
-  const results = {
-    localFileSize: 0,
-    localDagSize: 0,
-    blocks: 0
-  };
-  if (await blockstore.has(cid, options)) {
-    const block = await blockstore.get(cid, options);
-    results.blocks++;
-    results.localDagSize += block.byteLength;
-    if (cid.code === code3) {
-      results.localFileSize += block.byteLength;
-    } else if (cid.code === code2) {
-      const pbNode = decode11(block);
-      if (pbNode.Links.length > 0) {
-        for (const link of pbNode.Links) {
-          const linkResult = await inspectDag(link.Hash, blockstore, options);
-          results.localFileSize += linkResult.localFileSize;
-          results.localDagSize += linkResult.localDagSize;
-          results.blocks += linkResult.blocks;
-        }
-      } else {
-        if (pbNode.Data == null) {
-          throw new InvalidPBNodeError(`PBNode ${cid.toString()} had no data`);
-        }
-        const unixfs2 = UnixFS.unmarshal(pbNode.Data);
-        if (unixfs2.data == null) {
-          throw new InvalidPBNodeError(`UnixFS node ${cid.toString()} had no data`);
-        }
-        results.localFileSize += unixfs2.data.byteLength ?? 0;
-      }
-    } else {
-      throw new UnknownError(`${cid.toString()} was neither DAG_PB nor RAW`);
-    }
-  }
-  return results;
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/touch.js
-var mergeOptions9 = merge_options_default.bind({ ignoreUndefined: true });
-var log10 = logger("helia:unixfs:touch");
-var defaultOptions8 = {
-  recursive: false,
-  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
-};
-async function touch(cid, blockstore, options = {}) {
-  const opts = mergeOptions9(defaultOptions8, options);
-  const resolved = await resolve5(cid, opts.path, blockstore, opts);
-  const mtime = opts.mtime ?? {
-    secs: BigInt(Math.round(Date.now() / 1e3)),
-    nsecs: 0
-  };
-  log10("touch %c %o", resolved.cid, mtime);
-  if (opts.recursive) {
-    const root = await pipe(
-      async function* () {
-        for await (const entry of recursive(resolved.cid, blockstore)) {
-          let metadata2;
-          let links2;
-          if (entry.type === "raw") {
-            metadata2 = new UnixFS({ data: entry.node });
-            links2 = [];
-          } else if (entry.type === "file" || entry.type === "directory") {
-            metadata2 = entry.unixfs;
-            links2 = entry.node.Links;
-          } else {
-            throw new NotUnixFSError();
-          }
-          metadata2.mtime = mtime;
-          const node = {
-            Data: metadata2.marshal(),
-            Links: links2
-          };
-          yield {
-            path: entry.path,
-            content: node
-          };
-        }
-      },
-      // @ts-expect-error blockstore types are incompatible
-      (source) => importer(source, blockstore, {
-        ...opts,
-        dagBuilder: async function* (source2, block2) {
-          for await (const entry of source2) {
-            yield async function() {
-              const node = entry.content;
-              const buf2 = encode7(node);
-              const updatedCid2 = await persist2(buf2, block2, {
-                ...opts,
-                cidVersion: cid.version
-              });
-              if (node.Data == null) {
-                throw new InvalidPBNodeError(`${updatedCid2} had no data`);
-              }
-              const unixfs2 = UnixFS.unmarshal(node.Data);
-              return {
-                cid: updatedCid2,
-                size: BigInt(buf2.length),
-                path: entry.path,
-                unixfs: unixfs2
-              };
-            };
-          }
-        }
-      }),
-      async (nodes) => src_default7(nodes)
-    );
-    if (root == null) {
-      throw new UnknownError(`Could not chmod ${resolved.cid.toString()}`);
-    }
-    return updatePathCids(root.cid, resolved, blockstore, opts);
-  }
-  const block = await blockstore.get(resolved.cid, options);
-  let metadata;
-  let links = [];
-  if (resolved.cid.code === code3) {
-    metadata = new UnixFS({ data: block });
-  } else {
-    const node = decode11(block);
-    links = node.Links;
-    if (node.Data == null) {
-      throw new InvalidPBNodeError(`${resolved.cid.toString()} had no data`);
-    }
-    metadata = UnixFS.unmarshal(node.Data);
-  }
-  metadata.mtime = mtime;
-  const updatedBlock = encode7({
-    Data: metadata.marshal(),
-    Links: links
-  });
-  const hash = await sha256.digest(updatedBlock);
-  const updatedCid = CID2.create(resolved.cid.version, code2, hash);
-  await blockstore.put(updatedCid, updatedBlock);
-  return updatePathCids(updatedCid, resolved, blockstore, opts);
-}
-
-// node_modules/it-glob/dist/src/index.js
-var import_promises = __toESM(require("fs/promises"), 1);
-var import_path = __toESM(require("path"), 1);
-
-// node_modules/minimatch/dist/mjs/index.js
-var import_brace_expansion = __toESM(require_brace_expansion(), 1);
-
-// node_modules/minimatch/dist/mjs/assert-valid-pattern.js
-var MAX_PATTERN_LENGTH = 1024 * 64;
-var assertValidPattern = (pattern) => {
-  if (typeof pattern !== "string") {
-    throw new TypeError("invalid pattern");
-  }
-  if (pattern.length > MAX_PATTERN_LENGTH) {
-    throw new TypeError("pattern is too long");
-  }
-};
-
-// node_modules/minimatch/dist/mjs/brace-expressions.js
-var posixClasses = {
-  "[:alnum:]": ["\\p{L}\\p{Nl}\\p{Nd}", true],
-  "[:alpha:]": ["\\p{L}\\p{Nl}", true],
-  "[:ascii:]": ["\\x00-\\x7f", false],
-  "[:blank:]": ["\\p{Zs}\\t", true],
-  "[:cntrl:]": ["\\p{Cc}", true],
-  "[:digit:]": ["\\p{Nd}", true],
-  "[:graph:]": ["\\p{Z}\\p{C}", true, true],
-  "[:lower:]": ["\\p{Ll}", true],
-  "[:print:]": ["\\p{C}", true],
-  "[:punct:]": ["\\p{P}", true],
-  "[:space:]": ["\\p{Z}\\t\\r\\n\\v\\f", true],
-  "[:upper:]": ["\\p{Lu}", true],
-  "[:word:]": ["\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}", true],
-  "[:xdigit:]": ["A-Fa-f0-9", false]
-};
-var braceEscape = (s) => s.replace(/[[\]\\-]/g, "\\$&");
-var regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
-var rangesToString = (ranges) => ranges.join("");
-var parseClass = (glob2, position) => {
-  const pos = position;
-  if (glob2.charAt(pos) !== "[") {
-    throw new Error("not in a brace expression");
-  }
-  const ranges = [];
-  const negs = [];
-  let i = pos + 1;
-  let sawStart = false;
-  let uflag = false;
-  let escaping = false;
-  let negate = false;
-  let endPos = pos;
-  let rangeStart = "";
-  WHILE:
-    while (i < glob2.length) {
-      const c = glob2.charAt(i);
-      if ((c === "!" || c === "^") && i === pos + 1) {
-        negate = true;
-        i++;
-        continue;
-      }
-      if (c === "]" && sawStart && !escaping) {
-        endPos = i + 1;
-        break;
-      }
-      sawStart = true;
-      if (c === "\\") {
-        if (!escaping) {
-          escaping = true;
-          i++;
-          continue;
-        }
-      }
-      if (c === "[" && !escaping) {
-        for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
-          if (glob2.startsWith(cls, i)) {
-            if (rangeStart) {
-              return ["$.", false, glob2.length - pos, true];
-            }
-            i += cls.length;
-            if (neg)
-              negs.push(unip);
-            else
-              ranges.push(unip);
-            uflag = uflag || u;
-            continue WHILE;
-          }
-        }
-      }
-      escaping = false;
-      if (rangeStart) {
-        if (c > rangeStart) {
-          ranges.push(braceEscape(rangeStart) + "-" + braceEscape(c));
-        } else if (c === rangeStart) {
-          ranges.push(braceEscape(c));
-        }
-        rangeStart = "";
-        i++;
-        continue;
-      }
-      if (glob2.startsWith("-]", i + 1)) {
-        ranges.push(braceEscape(c + "-"));
-        i += 2;
-        continue;
-      }
-      if (glob2.startsWith("-", i + 1)) {
-        rangeStart = c;
-        i += 2;
-        continue;
-      }
-      ranges.push(braceEscape(c));
-      i++;
-    }
-  if (endPos < i) {
-    return ["", false, 0, false];
-  }
-  if (!ranges.length && !negs.length) {
-    return ["$.", false, glob2.length - pos, true];
-  }
-  if (negs.length === 0 && ranges.length === 1 && /^\\?.$/.test(ranges[0]) && !negate) {
-    const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
-    return [regexpEscape(r), false, endPos - pos, false];
-  }
-  const sranges = "[" + (negate ? "^" : "") + rangesToString(ranges) + "]";
-  const snegs = "[" + (negate ? "" : "^") + rangesToString(negs) + "]";
-  const comb = ranges.length && negs.length ? "(" + sranges + "|" + snegs + ")" : ranges.length ? sranges : snegs;
-  return [comb, uflag, endPos - pos, true];
-};
-
-// node_modules/minimatch/dist/mjs/unescape.js
-var unescape = (s, { windowsPathsNoEscape = false } = {}) => {
-  return windowsPathsNoEscape ? s.replace(/\[([^\/\\])\]/g, "$1") : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, "$1$2").replace(/\\([^\/])/g, "$1");
-};
-
-// node_modules/minimatch/dist/mjs/ast.js
-var types2 = /* @__PURE__ */ new Set(["!", "?", "+", "*", "@"]);
-var isExtglobType = (c) => types2.has(c);
-var startNoTraversal = "(?!(?:^|/)\\.\\.?(?:$|/))";
-var startNoDot = "(?!\\.)";
-var addPatternStart = /* @__PURE__ */ new Set(["[", "."]);
-var justDots = /* @__PURE__ */ new Set(["..", "."]);
-var reSpecials = new Set("().*{}+?[]^$\\!");
-var regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
-var qmark = "[^/]";
-var star = qmark + "*?";
-var starNoEmpty = qmark + "+?";
-var AST = class _AST {
-  type;
-  #root;
-  #hasMagic;
-  #uflag = false;
-  #parts = [];
-  #parent;
-  #parentIndex;
-  #negs;
-  #filledNegs = false;
-  #options;
-  #toString;
-  // set to true if it's an extglob with no children
-  // (which really means one child of '')
-  #emptyExt = false;
-  constructor(type, parent, options = {}) {
-    this.type = type;
-    if (type)
-      this.#hasMagic = true;
-    this.#parent = parent;
-    this.#root = this.#parent ? this.#parent.#root : this;
-    this.#options = this.#root === this ? options : this.#root.#options;
-    this.#negs = this.#root === this ? [] : this.#root.#negs;
-    if (type === "!" && !this.#root.#filledNegs)
-      this.#negs.push(this);
-    this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
-  }
-  get hasMagic() {
-    if (this.#hasMagic !== void 0)
-      return this.#hasMagic;
-    for (const p of this.#parts) {
-      if (typeof p === "string")
-        continue;
-      if (p.type || p.hasMagic)
-        return this.#hasMagic = true;
-    }
-    return this.#hasMagic;
-  }
-  // reconstructs the pattern
-  toString() {
-    if (this.#toString !== void 0)
-      return this.#toString;
-    if (!this.type) {
-      return this.#toString = this.#parts.map((p) => String(p)).join("");
-    } else {
-      return this.#toString = this.type + "(" + this.#parts.map((p) => String(p)).join("|") + ")";
-    }
-  }
-  #fillNegs() {
-    if (this !== this.#root)
-      throw new Error("should only call on root");
-    if (this.#filledNegs)
-      return this;
-    this.toString();
-    this.#filledNegs = true;
-    let n;
-    while (n = this.#negs.pop()) {
-      if (n.type !== "!")
-        continue;
-      let p = n;
-      let pp = p.#parent;
-      while (pp) {
-        for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
-          for (const part of n.#parts) {
-            if (typeof part === "string") {
-              throw new Error("string part in extglob AST??");
-            }
-            part.copyIn(pp.#parts[i]);
-          }
-        }
-        p = pp;
-        pp = p.#parent;
-      }
-    }
-    return this;
-  }
-  push(...parts) {
-    for (const p of parts) {
-      if (p === "")
-        continue;
-      if (typeof p !== "string" && !(p instanceof _AST && p.#parent === this)) {
-        throw new Error("invalid part: " + p);
-      }
-      this.#parts.push(p);
-    }
-  }
-  toJSON() {
-    var _a;
-    const ret = this.type === null ? this.#parts.slice().map((p) => typeof p === "string" ? p : p.toJSON()) : [this.type, ...this.#parts.map((p) => p.toJSON())];
-    if (this.isStart() && !this.type)
-      ret.unshift([]);
-    if (this.isEnd() && (this === this.#root || this.#root.#filledNegs && ((_a = this.#parent) == null ? void 0 : _a.type) === "!")) {
-      ret.push({});
-    }
-    return ret;
-  }
-  isStart() {
-    var _a;
-    if (this.#root === this)
-      return true;
-    if (!((_a = this.#parent) == null ? void 0 : _a.isStart()))
-      return false;
-    if (this.#parentIndex === 0)
-      return true;
-    const p = this.#parent;
-    for (let i = 0; i < this.#parentIndex; i++) {
-      const pp = p.#parts[i];
-      if (!(pp instanceof _AST && pp.type === "!")) {
-        return false;
-      }
-    }
-    return true;
-  }
-  isEnd() {
-    var _a, _b, _c;
-    if (this.#root === this)
-      return true;
-    if (((_a = this.#parent) == null ? void 0 : _a.type) === "!")
-      return true;
-    if (!((_b = this.#parent) == null ? void 0 : _b.isEnd()))
-      return false;
-    if (!this.type)
-      return (_c = this.#parent) == null ? void 0 : _c.isEnd();
-    const pl = this.#parent ? this.#parent.#parts.length : 0;
-    return this.#parentIndex === pl - 1;
-  }
-  copyIn(part) {
-    if (typeof part === "string")
-      this.push(part);
-    else
-      this.push(part.clone(this));
-  }
-  clone(parent) {
-    const c = new _AST(this.type, parent);
-    for (const p of this.#parts) {
-      c.copyIn(p);
-    }
-    return c;
-  }
-  static #parseAST(str, ast, pos, opt) {
-    let escaping = false;
-    let inBrace = false;
-    let braceStart = -1;
-    let braceNeg = false;
-    if (ast.type === null) {
-      let i2 = pos;
-      let acc2 = "";
-      while (i2 < str.length) {
-        const c = str.charAt(i2++);
-        if (escaping || c === "\\") {
-          escaping = !escaping;
-          acc2 += c;
-          continue;
-        }
-        if (inBrace) {
-          if (i2 === braceStart + 1) {
-            if (c === "^" || c === "!") {
-              braceNeg = true;
-            }
-          } else if (c === "]" && !(i2 === braceStart + 2 && braceNeg)) {
-            inBrace = false;
-          }
-          acc2 += c;
-          continue;
-        } else if (c === "[") {
-          inBrace = true;
-          braceStart = i2;
-          braceNeg = false;
-          acc2 += c;
-          continue;
-        }
-        if (!opt.noext && isExtglobType(c) && str.charAt(i2) === "(") {
-          ast.push(acc2);
-          acc2 = "";
-          const ext2 = new _AST(c, ast);
-          i2 = _AST.#parseAST(str, ext2, i2, opt);
-          ast.push(ext2);
-          continue;
-        }
-        acc2 += c;
-      }
-      ast.push(acc2);
-      return i2;
-    }
-    let i = pos + 1;
-    let part = new _AST(null, ast);
-    const parts = [];
-    let acc = "";
-    while (i < str.length) {
-      const c = str.charAt(i++);
-      if (escaping || c === "\\") {
-        escaping = !escaping;
-        acc += c;
-        continue;
-      }
-      if (inBrace) {
-        if (i === braceStart + 1) {
-          if (c === "^" || c === "!") {
-            braceNeg = true;
-          }
-        } else if (c === "]" && !(i === braceStart + 2 && braceNeg)) {
-          inBrace = false;
-        }
-        acc += c;
-        continue;
-      } else if (c === "[") {
-        inBrace = true;
-        braceStart = i;
-        braceNeg = false;
-        acc += c;
-        continue;
-      }
-      if (isExtglobType(c) && str.charAt(i) === "(") {
-        part.push(acc);
-        acc = "";
-        const ext2 = new _AST(c, part);
-        part.push(ext2);
-        i = _AST.#parseAST(str, ext2, i, opt);
-        continue;
-      }
-      if (c === "|") {
-        part.push(acc);
-        acc = "";
-        parts.push(part);
-        part = new _AST(null, ast);
-        continue;
-      }
-      if (c === ")") {
-        if (acc === "" && ast.#parts.length === 0) {
-          ast.#emptyExt = true;
-        }
-        part.push(acc);
-        acc = "";
-        ast.push(...parts, part);
-        return i;
-      }
-      acc += c;
-    }
-    ast.type = null;
-    ast.#hasMagic = void 0;
-    ast.#parts = [str.substring(pos - 1)];
-    return i;
-  }
-  static fromGlob(pattern, options = {}) {
-    const ast = new _AST(null, void 0, options);
-    _AST.#parseAST(pattern, ast, 0, options);
-    return ast;
-  }
-  // returns the regular expression if there's magic, or the unescaped
-  // string if not.
-  toMMPattern() {
-    if (this !== this.#root)
-      return this.#root.toMMPattern();
-    const glob2 = this.toString();
-    const [re, body, hasMagic, uflag] = this.toRegExpSource();
-    const anyMagic = hasMagic || this.#hasMagic || this.#options.nocase && !this.#options.nocaseMagicOnly && glob2.toUpperCase() !== glob2.toLowerCase();
-    if (!anyMagic) {
-      return body;
-    }
-    const flags = (this.#options.nocase ? "i" : "") + (uflag ? "u" : "");
-    return Object.assign(new RegExp(`^${re}$`, flags), {
-      _src: re,
-      _glob: glob2
-    });
-  }
-  // returns the string match, the regexp source, whether there's magic
-  // in the regexp (so a regular expression is required) and whether or
-  // not the uflag is needed for the regular expression (for posix classes)
-  // TODO: instead of injecting the start/end at this point, just return
-  // the BODY of the regexp, along with the start/end portions suitable
-  // for binding the start/end in either a joined full-path makeRe context
-  // (where we bind to (^|/), or a standalone matchPart context (where
-  // we bind to ^, and not /).  Otherwise slashes get duped!
-  //
-  // In part-matching mode, the start is:
-  // - if not isStart: nothing
-  // - if traversal possible, but not allowed: ^(?!\.\.?$)
-  // - if dots allowed or not possible: ^
-  // - if dots possible and not allowed: ^(?!\.)
-  // end is:
-  // - if not isEnd(): nothing
-  // - else: $
-  //
-  // In full-path matching mode, we put the slash at the START of the
-  // pattern, so start is:
-  // - if first pattern: same as part-matching mode
-  // - if not isStart(): nothing
-  // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
-  // - if dots allowed or not possible: /
-  // - if dots possible and not allowed: /(?!\.)
-  // end is:
-  // - if last pattern, same as part-matching mode
-  // - else nothing
-  //
-  // Always put the (?:$|/) on negated tails, though, because that has to be
-  // there to bind the end of the negated pattern portion, and it's easier to
-  // just stick it in now rather than try to inject it later in the middle of
-  // the pattern.
-  //
-  // We can just always return the same end, and leave it up to the caller
-  // to know whether it's going to be used joined or in parts.
-  // And, if the start is adjusted slightly, can do the same there:
-  // - if not isStart: nothing
-  // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
-  // - if dots allowed or not possible: (?:/|^)
-  // - if dots possible and not allowed: (?:/|^)(?!\.)
-  //
-  // But it's better to have a simpler binding without a conditional, for
-  // performance, so probably better to return both start options.
-  //
-  // Then the caller just ignores the end if it's not the first pattern,
-  // and the start always gets applied.
-  //
-  // But that's always going to be $ if it's the ending pattern, or nothing,
-  // so the caller can just attach $ at the end of the pattern when building.
-  //
-  // So the todo is:
-  // - better detect what kind of start is needed
-  // - return both flavors of starting pattern
-  // - attach $ at the end of the pattern when creating the actual RegExp
-  //
-  // Ah, but wait, no, that all only applies to the root when the first pattern
-  // is not an extglob. If the first pattern IS an extglob, then we need all
-  // that dot prevention biz to live in the extglob portions, because eg
-  // +(*|.x*) can match .xy but not .yx.
-  //
-  // So, return the two flavors if it's #root and the first child is not an
-  // AST, otherwise leave it to the child AST to handle it, and there,
-  // use the (?:^|/) style of start binding.
-  //
-  // Even simplified further:
-  // - Since the start for a join is eg /(?!\.) and the start for a part
-  // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
-  // or start or whatever) and prepend ^ or / at the Regexp construction.
-  toRegExpSource(allowDot) {
-    var _a;
-    const dot = allowDot ?? !!this.#options.dot;
-    if (this.#root === this)
-      this.#fillNegs();
-    if (!this.type) {
-      const noEmpty = this.isStart() && this.isEnd();
-      const src3 = this.#parts.map((p) => {
-        const [re, _, hasMagic, uflag] = typeof p === "string" ? _AST.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot);
-        this.#hasMagic = this.#hasMagic || hasMagic;
-        this.#uflag = this.#uflag || uflag;
-        return re;
-      }).join("");
-      let start2 = "";
-      if (this.isStart()) {
-        if (typeof this.#parts[0] === "string") {
-          const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
-          if (!dotTravAllowed) {
-            const aps = addPatternStart;
-            const needNoTrav = (
-              // dots are allowed, and the pattern starts with [ or .
-              dot && aps.has(src3.charAt(0)) || // the pattern starts with \., and then [ or .
-              src3.startsWith("\\.") && aps.has(src3.charAt(2)) || // the pattern starts with \.\., and then [ or .
-              src3.startsWith("\\.\\.") && aps.has(src3.charAt(4))
-            );
-            const needNoDot = !dot && !allowDot && aps.has(src3.charAt(0));
-            start2 = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : "";
-          }
-        }
-      }
-      let end = "";
-      if (this.isEnd() && this.#root.#filledNegs && ((_a = this.#parent) == null ? void 0 : _a.type) === "!") {
-        end = "(?:$|\\/)";
-      }
-      const final2 = start2 + src3 + end;
-      return [
-        final2,
-        unescape(src3),
-        this.#hasMagic = !!this.#hasMagic,
-        this.#uflag
-      ];
-    }
-    const repeated = this.type === "*" || this.type === "+";
-    const start = this.type === "!" ? "(?:(?!(?:" : "(?:";
-    let body = this.#partsToRegExp(dot);
-    if (this.isStart() && this.isEnd() && !body && this.type !== "!") {
-      const s = this.toString();
-      this.#parts = [s];
-      this.type = null;
-      this.#hasMagic = void 0;
-      return [s, unescape(this.toString()), false, false];
-    }
-    let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot ? "" : this.#partsToRegExp(true);
-    if (bodyDotAllowed === body) {
-      bodyDotAllowed = "";
-    }
-    if (bodyDotAllowed) {
-      body = `(?:${body})(?:${bodyDotAllowed})*?`;
-    }
-    let final = "";
-    if (this.type === "!" && this.#emptyExt) {
-      final = (this.isStart() && !dot ? startNoDot : "") + starNoEmpty;
-    } else {
-      const close = this.type === "!" ? (
-        // !() must match something,but !(x) can match ''
-        "))" + (this.isStart() && !dot && !allowDot ? startNoDot : "") + star + ")"
-      ) : this.type === "@" ? ")" : this.type === "?" ? ")?" : this.type === "+" && bodyDotAllowed ? ")" : this.type === "*" && bodyDotAllowed ? `)?` : `)${this.type}`;
-      final = start + body + close;
-    }
-    return [
-      final,
-      unescape(body),
-      this.#hasMagic = !!this.#hasMagic,
-      this.#uflag
-    ];
-  }
-  #partsToRegExp(dot) {
-    return this.#parts.map((p) => {
-      if (typeof p === "string") {
-        throw new Error("string type in extglob ast??");
-      }
-      const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
-      this.#uflag = this.#uflag || uflag;
-      return re;
-    }).filter((p) => !(this.isStart() && this.isEnd()) || !!p).join("|");
-  }
-  static #parseGlob(glob2, hasMagic, noEmpty = false) {
-    let escaping = false;
-    let re = "";
-    let uflag = false;
-    for (let i = 0; i < glob2.length; i++) {
-      const c = glob2.charAt(i);
-      if (escaping) {
-        escaping = false;
-        re += (reSpecials.has(c) ? "\\" : "") + c;
-        continue;
-      }
-      if (c === "\\") {
-        if (i === glob2.length - 1) {
-          re += "\\\\";
-        } else {
-          escaping = true;
-        }
-        continue;
-      }
-      if (c === "[") {
-        const [src3, needUflag, consumed, magic] = parseClass(glob2, i);
-        if (consumed) {
-          re += src3;
-          uflag = uflag || needUflag;
-          i += consumed - 1;
-          hasMagic = hasMagic || magic;
-          continue;
-        }
-      }
-      if (c === "*") {
-        if (noEmpty && glob2 === "*")
-          re += starNoEmpty;
-        else
-          re += star;
-        hasMagic = true;
-        continue;
-      }
-      if (c === "?") {
-        re += qmark;
-        hasMagic = true;
-        continue;
-      }
-      re += regExpEscape(c);
-    }
-    return [re, unescape(glob2), !!hasMagic, uflag];
-  }
-};
-
-// node_modules/minimatch/dist/mjs/escape.js
-var escape = (s, { windowsPathsNoEscape = false } = {}) => {
-  return windowsPathsNoEscape ? s.replace(/[?*()[\]]/g, "[$&]") : s.replace(/[?*()[\]\\]/g, "\\$&");
-};
-
-// node_modules/minimatch/dist/mjs/index.js
-var minimatch = (p, pattern, options = {}) => {
-  assertValidPattern(pattern);
-  if (!options.nocomment && pattern.charAt(0) === "#") {
-    return false;
-  }
-  return new Minimatch(pattern, options).match(p);
-};
-var starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
-var starDotExtTest = (ext2) => (f) => !f.startsWith(".") && f.endsWith(ext2);
-var starDotExtTestDot = (ext2) => (f) => f.endsWith(ext2);
-var starDotExtTestNocase = (ext2) => {
-  ext2 = ext2.toLowerCase();
-  return (f) => !f.startsWith(".") && f.toLowerCase().endsWith(ext2);
-};
-var starDotExtTestNocaseDot = (ext2) => {
-  ext2 = ext2.toLowerCase();
-  return (f) => f.toLowerCase().endsWith(ext2);
-};
-var starDotStarRE = /^\*+\.\*+$/;
-var starDotStarTest = (f) => !f.startsWith(".") && f.includes(".");
-var starDotStarTestDot = (f) => f !== "." && f !== ".." && f.includes(".");
-var dotStarRE = /^\.\*+$/;
-var dotStarTest = (f) => f !== "." && f !== ".." && f.startsWith(".");
-var starRE = /^\*+$/;
-var starTest = (f) => f.length !== 0 && !f.startsWith(".");
-var starTestDot = (f) => f.length !== 0 && f !== "." && f !== "..";
-var qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
-var qmarksTestNocase = ([$0, ext2 = ""]) => {
-  const noext = qmarksTestNoExt([$0]);
-  if (!ext2)
-    return noext;
-  ext2 = ext2.toLowerCase();
-  return (f) => noext(f) && f.toLowerCase().endsWith(ext2);
-};
-var qmarksTestNocaseDot = ([$0, ext2 = ""]) => {
-  const noext = qmarksTestNoExtDot([$0]);
-  if (!ext2)
-    return noext;
-  ext2 = ext2.toLowerCase();
-  return (f) => noext(f) && f.toLowerCase().endsWith(ext2);
-};
-var qmarksTestDot = ([$0, ext2 = ""]) => {
-  const noext = qmarksTestNoExtDot([$0]);
-  return !ext2 ? noext : (f) => noext(f) && f.endsWith(ext2);
-};
-var qmarksTest = ([$0, ext2 = ""]) => {
-  const noext = qmarksTestNoExt([$0]);
-  return !ext2 ? noext : (f) => noext(f) && f.endsWith(ext2);
-};
-var qmarksTestNoExt = ([$0]) => {
-  const len = $0.length;
-  return (f) => f.length === len && !f.startsWith(".");
-};
-var qmarksTestNoExtDot = ([$0]) => {
-  const len = $0.length;
-  return (f) => f.length === len && f !== "." && f !== "..";
-};
-var defaultPlatform = typeof process === "object" && process ? typeof process.env === "object" && process.env && process.env.__MINIMATCH_TESTING_PLATFORM__ || process.platform : "posix";
-var path = {
-  win32: { sep: "\\" },
-  posix: { sep: "/" }
-};
-var sep = defaultPlatform === "win32" ? path.win32.sep : path.posix.sep;
-minimatch.sep = sep;
-var GLOBSTAR = Symbol("globstar **");
-minimatch.GLOBSTAR = GLOBSTAR;
-var qmark2 = "[^/]";
-var star2 = qmark2 + "*?";
-var twoStarDot = "(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?";
-var twoStarNoDot = "(?:(?!(?:\\/|^)\\.).)*?";
-var filter2 = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
-minimatch.filter = filter2;
-var ext = (a, b = {}) => Object.assign({}, a, b);
-var defaults = (def) => {
-  if (!def || typeof def !== "object" || !Object.keys(def).length) {
-    return minimatch;
-  }
-  const orig = minimatch;
-  const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
-  return Object.assign(m, {
-    Minimatch: class Minimatch extends orig.Minimatch {
-      constructor(pattern, options = {}) {
-        super(pattern, ext(def, options));
-      }
-      static defaults(options) {
-        return orig.defaults(ext(def, options)).Minimatch;
-      }
-    },
-    AST: class AST extends orig.AST {
-      /* c8 ignore start */
-      constructor(type, parent, options = {}) {
-        super(type, parent, ext(def, options));
-      }
-      /* c8 ignore stop */
-      static fromGlob(pattern, options = {}) {
-        return orig.AST.fromGlob(pattern, ext(def, options));
-      }
-    },
-    unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
-    escape: (s, options = {}) => orig.escape(s, ext(def, options)),
-    filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
-    defaults: (options) => orig.defaults(ext(def, options)),
-    makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
-    braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
-    match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
-    sep: orig.sep,
-    GLOBSTAR
-  });
-};
-minimatch.defaults = defaults;
-var braceExpand = (pattern, options = {}) => {
-  assertValidPattern(pattern);
-  if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
-    return [pattern];
-  }
-  return (0, import_brace_expansion.default)(pattern);
-};
-minimatch.braceExpand = braceExpand;
-var makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
-minimatch.makeRe = makeRe;
-var match = (list, pattern, options = {}) => {
-  const mm = new Minimatch(pattern, options);
-  list = list.filter((f) => mm.match(f));
-  if (mm.options.nonull && !list.length) {
-    list.push(pattern);
-  }
-  return list;
-};
-minimatch.match = match;
-var globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
-var regExpEscape2 = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
-var Minimatch = class {
-  options;
-  set;
-  pattern;
-  windowsPathsNoEscape;
-  nonegate;
-  negate;
-  comment;
-  empty;
-  preserveMultipleSlashes;
-  partial;
-  globSet;
-  globParts;
-  nocase;
-  isWindows;
-  platform;
-  windowsNoMagicRoot;
-  regexp;
-  constructor(pattern, options = {}) {
-    assertValidPattern(pattern);
-    options = options || {};
-    this.options = options;
-    this.pattern = pattern;
-    this.platform = options.platform || defaultPlatform;
-    this.isWindows = this.platform === "win32";
-    this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
-    if (this.windowsPathsNoEscape) {
-      this.pattern = this.pattern.replace(/\\/g, "/");
-    }
-    this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
-    this.regexp = null;
-    this.negate = false;
-    this.nonegate = !!options.nonegate;
-    this.comment = false;
-    this.empty = false;
-    this.partial = !!options.partial;
-    this.nocase = !!this.options.nocase;
-    this.windowsNoMagicRoot = options.windowsNoMagicRoot !== void 0 ? options.windowsNoMagicRoot : !!(this.isWindows && this.nocase);
-    this.globSet = [];
-    this.globParts = [];
-    this.set = [];
-    this.make();
-  }
-  hasMagic() {
-    if (this.options.magicalBraces && this.set.length > 1) {
-      return true;
-    }
-    for (const pattern of this.set) {
-      for (const part of pattern) {
-        if (typeof part !== "string")
-          return true;
-      }
-    }
-    return false;
-  }
-  debug(..._) {
-  }
-  make() {
-    const pattern = this.pattern;
-    const options = this.options;
-    if (!options.nocomment && pattern.charAt(0) === "#") {
-      this.comment = true;
-      return;
-    }
-    if (!pattern) {
-      this.empty = true;
-      return;
-    }
-    this.parseNegate();
-    this.globSet = [...new Set(this.braceExpand())];
-    if (options.debug) {
-      this.debug = (...args) => console.error(...args);
-    }
-    this.debug(this.pattern, this.globSet);
-    const rawGlobParts = this.globSet.map((s) => this.slashSplit(s));
-    this.globParts = this.preprocess(rawGlobParts);
-    this.debug(this.pattern, this.globParts);
-    let set = this.globParts.map((s, _, __) => {
-      if (this.isWindows && this.windowsNoMagicRoot) {
-        const isUNC = s[0] === "" && s[1] === "" && (s[2] === "?" || !globMagic.test(s[2])) && !globMagic.test(s[3]);
-        const isDrive = /^[a-z]:/i.test(s[0]);
-        if (isUNC) {
-          return [...s.slice(0, 4), ...s.slice(4).map((ss) => this.parse(ss))];
-        } else if (isDrive) {
-          return [s[0], ...s.slice(1).map((ss) => this.parse(ss))];
-        }
-      }
-      return s.map((ss) => this.parse(ss));
-    });
-    this.debug(this.pattern, set);
-    this.set = set.filter((s) => s.indexOf(false) === -1);
-    if (this.isWindows) {
-      for (let i = 0; i < this.set.length; i++) {
-        const p = this.set[i];
-        if (p[0] === "" && p[1] === "" && this.globParts[i][2] === "?" && typeof p[3] === "string" && /^[a-z]:$/i.test(p[3])) {
-          p[2] = "?";
-        }
-      }
-    }
-    this.debug(this.pattern, this.set);
-  }
-  // various transforms to equivalent pattern sets that are
-  // faster to process in a filesystem walk.  The goal is to
-  // eliminate what we can, and push all ** patterns as far
-  // to the right as possible, even if it increases the number
-  // of patterns that we have to process.
-  preprocess(globParts) {
-    if (this.options.noglobstar) {
-      for (let i = 0; i < globParts.length; i++) {
-        for (let j = 0; j < globParts[i].length; j++) {
-          if (globParts[i][j] === "**") {
-            globParts[i][j] = "*";
-          }
-        }
-      }
-    }
-    const { optimizationLevel = 1 } = this.options;
-    if (optimizationLevel >= 2) {
-      globParts = this.firstPhasePreProcess(globParts);
-      globParts = this.secondPhasePreProcess(globParts);
-    } else if (optimizationLevel >= 1) {
-      globParts = this.levelOneOptimize(globParts);
-    } else {
-      globParts = this.adjascentGlobstarOptimize(globParts);
-    }
-    return globParts;
-  }
-  // just get rid of adjascent ** portions
-  adjascentGlobstarOptimize(globParts) {
-    return globParts.map((parts) => {
-      let gs = -1;
-      while (-1 !== (gs = parts.indexOf("**", gs + 1))) {
-        let i = gs;
-        while (parts[i + 1] === "**") {
-          i++;
-        }
-        if (i !== gs) {
-          parts.splice(gs, i - gs);
-        }
-      }
-      return parts;
-    });
-  }
-  // get rid of adjascent ** and resolve .. portions
-  levelOneOptimize(globParts) {
-    return globParts.map((parts) => {
-      parts = parts.reduce((set, part) => {
-        const prev = set[set.length - 1];
-        if (part === "**" && prev === "**") {
-          return set;
-        }
-        if (part === "..") {
-          if (prev && prev !== ".." && prev !== "." && prev !== "**") {
-            set.pop();
-            return set;
-          }
-        }
-        set.push(part);
-        return set;
-      }, []);
-      return parts.length === 0 ? [""] : parts;
-    });
-  }
-  levelTwoFileOptimize(parts) {
-    if (!Array.isArray(parts)) {
-      parts = this.slashSplit(parts);
-    }
-    let didSomething = false;
-    do {
-      didSomething = false;
-      if (!this.preserveMultipleSlashes) {
-        for (let i = 1; i < parts.length - 1; i++) {
-          const p = parts[i];
-          if (i === 1 && p === "" && parts[0] === "")
-            continue;
-          if (p === "." || p === "") {
-            didSomething = true;
-            parts.splice(i, 1);
-            i--;
-          }
-        }
-        if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
-          didSomething = true;
-          parts.pop();
-        }
-      }
-      let dd = 0;
-      while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
-        const p = parts[dd - 1];
-        if (p && p !== "." && p !== ".." && p !== "**") {
-          didSomething = true;
-          parts.splice(dd - 1, 2);
-          dd -= 2;
-        }
-      }
-    } while (didSomething);
-    return parts.length === 0 ? [""] : parts;
-  }
-  // First phase: single-pattern processing
-  // 
 is 1 or more portions
-  //  is 1 or more portions
-  // 

is any portion other than ., .., '', or ** - // is . or '' - // - // **/.. is *brutal* for filesystem walking performance, because - // it effectively resets the recursive walk each time it occurs, - // and ** cannot be reduced out by a .. pattern part like a regexp - // or most strings (other than .., ., and '') can be. - // - //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - //

// -> 
/
-  // 
/

/../ ->

/
-  // **/**/ -> **/
-  //
-  // **/*/ -> */**/ <== not valid because ** doesn't follow
-  // this WOULD be allowed if ** did follow symlinks, or * didn't
-  firstPhasePreProcess(globParts) {
-    let didSomething = false;
-    do {
-      didSomething = false;
-      for (let parts of globParts) {
-        let gs = -1;
-        while (-1 !== (gs = parts.indexOf("**", gs + 1))) {
-          let gss = gs;
-          while (parts[gss + 1] === "**") {
-            gss++;
-          }
-          if (gss > gs) {
-            parts.splice(gs + 1, gss - gs);
-          }
-          let next = parts[gs + 1];
-          const p = parts[gs + 2];
-          const p2 = parts[gs + 3];
-          if (next !== "..")
-            continue;
-          if (!p || p === "." || p === ".." || !p2 || p2 === "." || p2 === "..") {
-            continue;
-          }
-          didSomething = true;
-          parts.splice(gs, 1);
-          const other = parts.slice(0);
-          other[gs] = "**";
-          globParts.push(other);
-          gs--;
-        }
-        if (!this.preserveMultipleSlashes) {
-          for (let i = 1; i < parts.length - 1; i++) {
-            const p = parts[i];
-            if (i === 1 && p === "" && parts[0] === "")
-              continue;
-            if (p === "." || p === "") {
-              didSomething = true;
-              parts.splice(i, 1);
-              i--;
-            }
-          }
-          if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
-            didSomething = true;
-            parts.pop();
-          }
-        }
-        let dd = 0;
-        while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
-          const p = parts[dd - 1];
-          if (p && p !== "." && p !== ".." && p !== "**") {
-            didSomething = true;
-            const needDot = dd === 1 && parts[dd + 1] === "**";
-            const splin = needDot ? ["."] : [];
-            parts.splice(dd - 1, 2, ...splin);
-            if (parts.length === 0)
-              parts.push("");
-            dd -= 2;
-          }
-        }
-      }
-    } while (didSomething);
-    return globParts;
-  }
-  // second phase: multi-pattern dedupes
-  // {
/*/,
/

/} ->

/*/
-  // {
/,
/} -> 
/
-  // {
/**/,
/} -> 
/**/
-  //
-  // {
/**/,
/**/

/} ->

/**/
-  // ^-- not valid because ** doens't follow symlinks
-  secondPhasePreProcess(globParts) {
-    for (let i = 0; i < globParts.length - 1; i++) {
-      for (let j = i + 1; j < globParts.length; j++) {
-        const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
-        if (!matched)
-          continue;
-        globParts[i] = matched;
-        globParts[j] = [];
-      }
-    }
-    return globParts.filter((gs) => gs.length);
-  }
-  partsMatch(a, b, emptyGSMatch = false) {
-    let ai = 0;
-    let bi = 0;
-    let result = [];
-    let which = "";
-    while (ai < a.length && bi < b.length) {
-      if (a[ai] === b[bi]) {
-        result.push(which === "b" ? b[bi] : a[ai]);
-        ai++;
-        bi++;
-      } else if (emptyGSMatch && a[ai] === "**" && b[bi] === a[ai + 1]) {
-        result.push(a[ai]);
-        ai++;
-      } else if (emptyGSMatch && b[bi] === "**" && a[ai] === b[bi + 1]) {
-        result.push(b[bi]);
-        bi++;
-      } else if (a[ai] === "*" && b[bi] && (this.options.dot || !b[bi].startsWith(".")) && b[bi] !== "**") {
-        if (which === "b")
-          return false;
-        which = "a";
-        result.push(a[ai]);
-        ai++;
-        bi++;
-      } else if (b[bi] === "*" && a[ai] && (this.options.dot || !a[ai].startsWith(".")) && a[ai] !== "**") {
-        if (which === "a")
-          return false;
-        which = "b";
-        result.push(b[bi]);
-        ai++;
-        bi++;
-      } else {
-        return false;
-      }
-    }
-    return a.length === b.length && result;
-  }
-  parseNegate() {
-    if (this.nonegate)
-      return;
-    const pattern = this.pattern;
-    let negate = false;
-    let negateOffset = 0;
-    for (let i = 0; i < pattern.length && pattern.charAt(i) === "!"; i++) {
-      negate = !negate;
-      negateOffset++;
-    }
-    if (negateOffset)
-      this.pattern = pattern.slice(negateOffset);
-    this.negate = negate;
-  }
-  // set partial to true to test if, for example,
-  // "/a/b" matches the start of "/*/b/*/d"
-  // Partial means, if you run out of file before you run
-  // out of pattern, then that's fine, as long as all
-  // the parts match.
-  matchOne(file, pattern, partial = false) {
-    const options = this.options;
-    if (this.isWindows) {
-      const fileDrive = typeof file[0] === "string" && /^[a-z]:$/i.test(file[0]);
-      const fileUNC = !fileDrive && file[0] === "" && file[1] === "" && file[2] === "?" && /^[a-z]:$/i.test(file[3]);
-      const patternDrive = typeof pattern[0] === "string" && /^[a-z]:$/i.test(pattern[0]);
-      const patternUNC = !patternDrive && pattern[0] === "" && pattern[1] === "" && pattern[2] === "?" && typeof pattern[3] === "string" && /^[a-z]:$/i.test(pattern[3]);
-      const fdi = fileUNC ? 3 : fileDrive ? 0 : void 0;
-      const pdi = patternUNC ? 3 : patternDrive ? 0 : void 0;
-      if (typeof fdi === "number" && typeof pdi === "number") {
-        const [fd, pd] = [file[fdi], pattern[pdi]];
-        if (fd.toLowerCase() === pd.toLowerCase()) {
-          pattern[pdi] = fd;
-          if (pdi > fdi) {
-            pattern = pattern.slice(pdi);
-          } else if (fdi > pdi) {
-            file = file.slice(fdi);
-          }
-        }
-      }
-    }
-    const { optimizationLevel = 1 } = this.options;
-    if (optimizationLevel >= 2) {
-      file = this.levelTwoFileOptimize(file);
-    }
-    this.debug("matchOne", this, { file, pattern });
-    this.debug("matchOne", file.length, pattern.length);
-    for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
-      this.debug("matchOne loop");
-      var p = pattern[pi];
-      var f = file[fi];
-      this.debug(pattern, p, f);
-      if (p === false) {
-        return false;
-      }
-      if (p === GLOBSTAR) {
-        this.debug("GLOBSTAR", [pattern, p, f]);
-        var fr = fi;
-        var pr = pi + 1;
-        if (pr === pl) {
-          this.debug("** at the end");
-          for (; fi < fl; fi++) {
-            if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".")
-              return false;
-          }
-          return true;
-        }
-        while (fr < fl) {
-          var swallowee = file[fr];
-          this.debug("\nglobstar while", file, fr, pattern, pr, swallowee);
-          if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
-            this.debug("globstar found match!", fr, fl, swallowee);
-            return true;
-          } else {
-            if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") {
-              this.debug("dot detected!", file, fr, pattern, pr);
-              break;
-            }
-            this.debug("globstar swallow a segment, and continue");
-            fr++;
-          }
-        }
-        if (partial) {
-          this.debug("\n>>> no match, partial?", file, fr, pattern, pr);
-          if (fr === fl) {
-            return true;
-          }
-        }
-        return false;
-      }
-      let hit;
-      if (typeof p === "string") {
-        hit = f === p;
-        this.debug("string match", p, f, hit);
-      } else {
-        hit = p.test(f);
-        this.debug("pattern match", p, f, hit);
-      }
-      if (!hit)
-        return false;
-    }
-    if (fi === fl && pi === pl) {
-      return true;
-    } else if (fi === fl) {
-      return partial;
-    } else if (pi === pl) {
-      return fi === fl - 1 && file[fi] === "";
-    } else {
-      throw new Error("wtf?");
-    }
-  }
-  braceExpand() {
-    return braceExpand(this.pattern, this.options);
-  }
-  parse(pattern) {
-    assertValidPattern(pattern);
-    const options = this.options;
-    if (pattern === "**")
-      return GLOBSTAR;
-    if (pattern === "")
-      return "";
-    let m;
-    let fastTest = null;
-    if (m = pattern.match(starRE)) {
-      fastTest = options.dot ? starTestDot : starTest;
-    } else if (m = pattern.match(starDotExtRE)) {
-      fastTest = (options.nocase ? options.dot ? starDotExtTestNocaseDot : starDotExtTestNocase : options.dot ? starDotExtTestDot : starDotExtTest)(m[1]);
-    } else if (m = pattern.match(qmarksRE)) {
-      fastTest = (options.nocase ? options.dot ? qmarksTestNocaseDot : qmarksTestNocase : options.dot ? qmarksTestDot : qmarksTest)(m);
-    } else if (m = pattern.match(starDotStarRE)) {
-      fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
-    } else if (m = pattern.match(dotStarRE)) {
-      fastTest = dotStarTest;
-    }
-    const re = AST.fromGlob(pattern, this.options).toMMPattern();
-    return fastTest ? Object.assign(re, { test: fastTest }) : re;
-  }
-  makeRe() {
-    if (this.regexp || this.regexp === false)
-      return this.regexp;
-    const set = this.set;
-    if (!set.length) {
-      this.regexp = false;
-      return this.regexp;
-    }
-    const options = this.options;
-    const twoStar = options.noglobstar ? star2 : options.dot ? twoStarDot : twoStarNoDot;
-    const flags = new Set(options.nocase ? ["i"] : []);
-    let re = set.map((pattern) => {
-      const pp = pattern.map((p) => {
-        if (p instanceof RegExp) {
-          for (const f of p.flags.split(""))
-            flags.add(f);
-        }
-        return typeof p === "string" ? regExpEscape2(p) : p === GLOBSTAR ? GLOBSTAR : p._src;
-      });
-      pp.forEach((p, i) => {
-        const next = pp[i + 1];
-        const prev = pp[i - 1];
-        if (p !== GLOBSTAR || prev === GLOBSTAR) {
-          return;
-        }
-        if (prev === void 0) {
-          if (next !== void 0 && next !== GLOBSTAR) {
-            pp[i + 1] = "(?:\\/|" + twoStar + "\\/)?" + next;
-          } else {
-            pp[i] = twoStar;
-          }
-        } else if (next === void 0) {
-          pp[i - 1] = prev + "(?:\\/|" + twoStar + ")?";
-        } else if (next !== GLOBSTAR) {
-          pp[i - 1] = prev + "(?:\\/|\\/" + twoStar + "\\/)" + next;
-          pp[i + 1] = GLOBSTAR;
-        }
-      });
-      return pp.filter((p) => p !== GLOBSTAR).join("/");
-    }).join("|");
-    const [open, close] = set.length > 1 ? ["(?:", ")"] : ["", ""];
-    re = "^" + open + re + close + "$";
-    if (this.negate)
-      re = "^(?!" + re + ").+$";
-    try {
-      this.regexp = new RegExp(re, [...flags].join(""));
-    } catch (ex) {
-      this.regexp = false;
-    }
-    return this.regexp;
-  }
-  slashSplit(p) {
-    if (this.preserveMultipleSlashes) {
-      return p.split("/");
-    } else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
-      return ["", ...p.split(/\/+/)];
-    } else {
-      return p.split(/\/+/);
-    }
-  }
-  match(f, partial = this.partial) {
-    this.debug("match", f, this.pattern);
-    if (this.comment) {
-      return false;
-    }
-    if (this.empty) {
-      return f === "";
-    }
-    if (f === "/" && partial) {
-      return true;
-    }
-    const options = this.options;
-    if (this.isWindows) {
-      f = f.split("\\").join("/");
-    }
-    const ff = this.slashSplit(f);
-    this.debug(this.pattern, "split", ff);
-    const set = this.set;
-    this.debug(this.pattern, "set", set);
-    let filename = ff[ff.length - 1];
-    if (!filename) {
-      for (let i = ff.length - 2; !filename && i >= 0; i--) {
-        filename = ff[i];
-      }
-    }
-    for (let i = 0; i < set.length; i++) {
-      const pattern = set[i];
-      let file = ff;
-      if (options.matchBase && pattern.length === 1) {
-        file = [filename];
-      }
-      const hit = this.matchOne(file, pattern, partial);
-      if (hit) {
-        if (options.flipNegate) {
-          return true;
-        }
-        return !this.negate;
-      }
-    }
-    if (options.flipNegate) {
-      return false;
-    }
-    return this.negate;
-  }
-  static defaults(def) {
-    return minimatch.defaults(def).Minimatch;
-  }
-};
-minimatch.AST = AST;
-minimatch.Minimatch = Minimatch;
-minimatch.escape = escape;
-minimatch.unescape = unescape;
-
-// node_modules/it-glob/dist/src/index.js
-async function* glob(dir, pattern, options = {}) {
-  const absoluteDir = import_path.default.resolve(dir);
-  const relativeDir = import_path.default.relative(options.cwd ?? process.cwd(), dir);
-  const stats = await import_promises.default.stat(absoluteDir);
-  if (stats.isDirectory()) {
-    for await (const entry of _glob(absoluteDir, "", pattern, options)) {
-      yield entry;
-    }
-    return;
-  }
-  if (minimatch(relativeDir, pattern, options)) {
-    yield options.absolute === true ? absoluteDir : relativeDir;
-  }
-}
-async function* _glob(base3, dir, pattern, options) {
-  for await (const entry of await import_promises.default.opendir(import_path.default.join(base3, dir))) {
-    const relativeEntryPath = import_path.default.join(dir, entry.name);
-    const absoluteEntryPath = import_path.default.join(base3, dir, entry.name);
-    let match2 = minimatch(relativeEntryPath, pattern, options);
-    const isDirectory = entry.isDirectory();
-    if (isDirectory && options.nodir === true) {
-      match2 = false;
-    }
-    if (match2) {
-      yield options.absolute === true ? absoluteEntryPath : relativeEntryPath;
-    }
-    if (isDirectory) {
-      yield* _glob(base3, relativeEntryPath, pattern, options);
-    }
-  }
-}
-
-// node_modules/@helia/unixfs/dist/src/index.js
-var DefaultUnixFS = class {
-  components;
-  constructor(components) {
-    this.components = components;
-  }
-  async *addAll(source, options = {}) {
-    yield* addAll(source, this.components.blockstore, options);
-  }
-  async addBytes(bytes, options = {}) {
-    return addBytes(bytes, this.components.blockstore, options);
-  }
-  async addByteStream(bytes, options = {}) {
-    return addByteStream(bytes, this.components.blockstore, options);
-  }
-  async addFile(file, options = {}) {
-    return addFile(file, this.components.blockstore, options);
-  }
-  async addDirectory(dir = {}, options = {}) {
-    return addDirectory(dir, this.components.blockstore, options);
-  }
-  async *cat(cid, options = {}) {
-    yield* cat(cid, this.components.blockstore, options);
-  }
-  async chmod(cid, mode, options = {}) {
-    return chmod(cid, mode, this.components.blockstore, options);
-  }
-  async cp(source, target, name4, options = {}) {
-    return cp(source, target, name4, this.components.blockstore, options);
-  }
-  async *ls(cid, options = {}) {
-    yield* ls(cid, this.components.blockstore, options);
-  }
-  async mkdir(cid, dirname, options = {}) {
-    return mkdir(cid, dirname, this.components.blockstore, options);
-  }
-  async rm(cid, path6, options = {}) {
-    return rm(cid, path6, this.components.blockstore, options);
-  }
-  async stat(cid, options = {}) {
-    return stat(cid, this.components.blockstore, options);
-  }
-  async touch(cid, options = {}) {
-    return touch(cid, this.components.blockstore, options);
-  }
-};
-function unixfs(helia) {
-  return new DefaultUnixFS(helia);
-}
-
-// node_modules/blockstore-fs/dist/src/index.js
-var import_promises2 = __toESM(require("fs/promises"), 1);
-var import_node_path2 = __toESM(require("path"), 1);
-var import_node_util = require("util");
-
-// node_modules/blockstore-core/dist/src/errors.js
-var errors_exports = {};
-__export(errors_exports, {
-  abortedError: () => abortedError,
-  closeFailedError: () => closeFailedError,
-  deleteFailedError: () => deleteFailedError,
-  getFailedError: () => getFailedError,
-  hasFailedError: () => hasFailedError,
-  notFoundError: () => notFoundError,
-  openFailedError: () => openFailedError,
-  putFailedError: () => putFailedError
-});
-var import_err_code16 = __toESM(require_err_code(), 1);
-function openFailedError(err) {
-  err = err ?? new Error("Open failed");
-  return (0, import_err_code16.default)(err, "ERR_OPEN_FAILED");
-}
-function closeFailedError(err) {
-  err = err ?? new Error("Close failed");
-  return (0, import_err_code16.default)(err, "ERR_CLOSE_FAILED");
-}
-function putFailedError(err) {
-  err = err ?? new Error("Put failed");
-  return (0, import_err_code16.default)(err, "ERR_PUT_FAILED");
-}
-function getFailedError(err) {
-  err = err ?? new Error("Get failed");
-  return (0, import_err_code16.default)(err, "ERR_GET_FAILED");
-}
-function deleteFailedError(err) {
-  err = err ?? new Error("Delete failed");
-  return (0, import_err_code16.default)(err, "ERR_DELETE_FAILED");
-}
-function hasFailedError(err) {
-  err = err ?? new Error("Has failed");
-  return (0, import_err_code16.default)(err, "ERR_HAS_FAILED");
-}
-function notFoundError(err) {
-  err = err ?? new Error("Not Found");
-  return (0, import_err_code16.default)(err, "ERR_NOT_FOUND");
-}
-function abortedError(err) {
-  err = err ?? new Error("Aborted");
-  return (0, import_err_code16.default)(err, "ERR_ABORTED");
-}
-
-// node_modules/blockstore-core/node_modules/@libp2p/logger/dist/src/index.js
-var import_debug2 = __toESM(require_src2(), 1);
-import_debug2.default.formatters.b = (v) => {
-  return v == null ? "undefined" : base58btc2.baseEncode(v);
-};
-import_debug2.default.formatters.t = (v) => {
-  return v == null ? "undefined" : base322.baseEncode(v);
-};
-import_debug2.default.formatters.m = (v) => {
-  return v == null ? "undefined" : base64.baseEncode(v);
-};
-import_debug2.default.formatters.p = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-import_debug2.default.formatters.c = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-import_debug2.default.formatters.k = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-import_debug2.default.formatters.a = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-function createDisabledLogger2(namespace) {
-  const logger3 = () => {
-  };
-  logger3.enabled = false;
-  logger3.color = "";
-  logger3.diff = 0;
-  logger3.log = () => {
-  };
-  logger3.namespace = namespace;
-  logger3.destroy = () => true;
-  logger3.extend = () => logger3;
-  return logger3;
-}
-function logger2(name4) {
-  let trace = createDisabledLogger2(`${name4}:trace`);
-  if (import_debug2.default.enabled(`${name4}:trace`) && import_debug2.default.names.map((r) => r.toString()).find((n) => n.includes(":trace")) != null) {
-    trace = (0, import_debug2.default)(`${name4}:trace`);
-  }
-  return Object.assign((0, import_debug2.default)(name4), {
-    error: (0, import_debug2.default)(`${name4}:error`),
-    trace
-  });
-}
-
-// node_modules/blockstore-core/dist/src/tiered.js
-var log11 = logger2("blockstore:core:tiered");
-
-// node_modules/blockstore-core/dist/src/index.js
-var Errors = {
-  ...errors_exports
-};
-
-// node_modules/blockstore-fs/dist/src/index.js
-var import_fast_write_atomic = __toESM(require_fast_write_atomic(), 1);
-
-// node_modules/blockstore-fs/dist/src/sharding.js
-var import_node_path = __toESM(require("path"), 1);
-var NextToLast = class {
-  extension;
-  prefixLength;
-  base;
-  constructor(init = {}) {
-    this.extension = init.extension ?? ".data";
-    this.prefixLength = init.prefixLength ?? 2;
-    this.base = init.base ?? base32upper2;
-  }
-  encode(cid) {
-    const str = this.base.encoder.encode(cid.multihash.bytes);
-    const prefix = str.substring(str.length - this.prefixLength);
-    return {
-      dir: prefix,
-      file: `${str}${this.extension}`
-    };
-  }
-  decode(str) {
-    let fileName = import_node_path.default.basename(str);
-    if (fileName.endsWith(this.extension)) {
-      fileName = fileName.substring(0, fileName.length - this.extension.length);
-    }
-    return CID2.decode(this.base.decoder.decode(fileName));
-  }
-};
-
-// node_modules/blockstore-fs/dist/src/index.js
-var writeAtomic = (0, import_node_util.promisify)(import_fast_write_atomic.default);
-async function writeFile(file, contents) {
-  try {
-    await writeAtomic(file, contents);
-  } catch (err) {
-    if (err.code === "EPERM" && err.syscall === "rename") {
-      await import_promises2.default.access(file, import_promises2.default.constants.F_OK | import_promises2.default.constants.W_OK);
-      return;
-    }
-    throw err;
-  }
-}
-var FsBlockstore = class {
-  path;
-  createIfMissing;
-  errorIfExists;
-  putManyConcurrency;
-  getManyConcurrency;
-  deleteManyConcurrency;
-  shardingStrategy;
-  constructor(location, init = {}) {
-    this.path = import_node_path2.default.resolve(location);
-    this.createIfMissing = init.createIfMissing ?? true;
-    this.errorIfExists = init.errorIfExists ?? false;
-    this.deleteManyConcurrency = init.deleteManyConcurrency ?? 50;
-    this.getManyConcurrency = init.getManyConcurrency ?? 50;
-    this.putManyConcurrency = init.putManyConcurrency ?? 50;
-    this.shardingStrategy = init.shardingStrategy ?? new NextToLast();
-  }
-  async open() {
-    try {
-      await import_promises2.default.access(this.path, import_promises2.default.constants.F_OK | import_promises2.default.constants.W_OK);
-      if (this.errorIfExists) {
-        throw Errors.openFailedError(new Error(`Blockstore directory: ${this.path} already exists`));
-      }
-    } catch (err) {
-      if (err.code === "ENOENT") {
-        if (this.createIfMissing) {
-          await import_promises2.default.mkdir(this.path, { recursive: true });
-          return;
-        } else {
-          throw Errors.openFailedError(new Error(`Blockstore directory: ${this.path} does not exist`));
-        }
-      }
-      throw err;
-    }
-  }
-  async close() {
-    await Promise.resolve();
-  }
-  async put(key, val) {
-    const { dir, file } = this.shardingStrategy.encode(key);
-    try {
-      if (dir != null && dir !== "") {
-        await import_promises2.default.mkdir(import_node_path2.default.join(this.path, dir), {
-          recursive: true
-        });
-      }
-      await writeFile(import_node_path2.default.join(this.path, dir, file), val);
-      return key;
-    } catch (err) {
-      throw Errors.putFailedError(err);
-    }
-  }
-  async *putMany(source) {
-    yield* parallelBatch(src_default3(source, ({ cid, block }) => {
-      return async () => {
-        await this.put(cid, block);
-        return cid;
-      };
-    }), this.putManyConcurrency);
-  }
-  async get(key) {
-    const { dir, file } = this.shardingStrategy.encode(key);
-    try {
-      return await import_promises2.default.readFile(import_node_path2.default.join(this.path, dir, file));
-    } catch (err) {
-      throw Errors.notFoundError(err);
-    }
-  }
-  async *getMany(source) {
-    yield* parallelBatch(src_default3(source, (key) => {
-      return async () => {
-        return {
-          cid: key,
-          block: await this.get(key)
-        };
-      };
-    }), this.getManyConcurrency);
-  }
-  async delete(key) {
-    const { dir, file } = this.shardingStrategy.encode(key);
-    try {
-      await import_promises2.default.unlink(import_node_path2.default.join(this.path, dir, file));
-    } catch (err) {
-      if (err.code === "ENOENT") {
-        return;
-      }
-      throw Errors.deleteFailedError(err);
-    }
-  }
-  async *deleteMany(source) {
-    yield* parallelBatch(src_default3(source, (key) => {
-      return async () => {
-        await this.delete(key);
-        return key;
-      };
-    }), this.deleteManyConcurrency);
-  }
-  /**
-   * Check for the existence of the given key
-   */
-  async has(key) {
-    const { dir, file } = this.shardingStrategy.encode(key);
-    try {
-      await import_promises2.default.access(import_node_path2.default.join(this.path, dir, file));
-    } catch (err) {
-      return false;
-    }
-    return true;
-  }
-  async *getAll() {
-    const pattern = `**/*${this.shardingStrategy.extension}`.split(import_node_path2.default.sep).join("/");
-    const files = glob(this.path, pattern, {
-      absolute: true
-    });
-    for await (const file of files) {
-      try {
-        const buf2 = await import_promises2.default.readFile(file);
-        const pair = {
-          cid: this.shardingStrategy.decode(file),
-          block: buf2
-        };
-        yield pair;
-      } catch (err) {
-        if (err.code !== "ENOENT") {
-          throw err;
-        }
-      }
-    }
-  }
-};
-
-// src/objectManager.js
-var import_node_fs = require("fs");
-var import_promises3 = require("fs/promises");
-var import_node_os = __toESM(require("os"));
-var import_node_path3 = __toESM(require("path"));
-var import_node_stream = require("stream");
-var import_uuid = require("uuid");
-var ObjectManager = class {
-  #DEFAULT_ENDPOINT = "https://s3.filebase.com";
-  #DEFAULT_REGION = "us-east-1";
-  #DEFAULT_MAX_CONCURRENT_UPLOADS = 4;
-  #client;
-  #credentials;
-  #defaultBucket;
-  #gatewayConfiguration;
-  #maxConcurrentUploads;
-  /**
-   * @typedef {Object} objectManagerOptions Optional settings for the constructor.
-   * @property {string} [bucket] Default bucket to use.
-   * @property {objectDownloadOptions} [gateway] Default gateway to use.
-   * @property {number} [maxConcurrentUploads] The maximum number of concurrent uploads.
-   */
-  /**
-   * @typedef {Object} objectDownloadOptions Optional settings for downloading objects
-   * @property {string} endpoint Default gateway to use.
-   * @property {string} [token] Token for the default gateway.
-   * @property {number} [timeout=60000] Timeout for the default gateway
-   */
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @param {objectManagerOptions} options - Optional settings for the constructor.
-   * @tutorial quickstart-object
-   * @example
-   * import { ObjectManager } from "@filebase/sdk";
-   * const objectManager = new ObjectManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", {
-   *   bucket: "my-default-bucket",
-   *   maxConcurrentUploads: 4,
-   *   gateway: {
-   *     endpoint: "https://my-default-gateway.mydomain.com
-   *     token: SUPER_SECRET_GATEWAY_TOKEN
-   *   }
-   * });
-   */
-  constructor(clientKey, clientSecret, options) {
-    var _a, _b, _c;
-    const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, clientConfiguration = {
-      credentials: {
-        accessKeyId: clientKey,
-        secretAccessKey: clientSecret
-      },
-      endpoint: clientEndpoint,
-      region: this.#DEFAULT_REGION,
-      forcePathStyle: true
-    };
-    this.#defaultBucket = options == null ? void 0 : options.bucket;
-    this.#maxConcurrentUploads = (options == null ? void 0 : options.maxConcurrentUploads) || this.#DEFAULT_MAX_CONCURRENT_UPLOADS;
-    this.#credentials = {
-      key: clientKey,
-      secret: clientSecret
-    };
-    this.#client = new import_client_s32.S3Client(clientConfiguration);
-    this.#gatewayConfiguration = {
-      endpoint: (_a = options == null ? void 0 : options.gateway) == null ? void 0 : _a.endpoint,
-      token: (_b = options == null ? void 0 : options.gateway) == null ? void 0 : _b.token,
-      timeout: (_c = options == null ? void 0 : options.gateway) == null ? void 0 : _c.timeout
-    };
-  }
-  /**
-   * @typedef {Object} objectOptions
-   * @property {string} [bucket] - The bucket to pin the IPFS CID into.
-   */
-  /**
-   * @typedef {Object} objectHeadResult
-   * @property {string} cid The CID of the uploaded object
-   * @property {function} download Convenience function to download the object via S3 or the selected gateway
-   * @property {array} [entries] If a directory then returns an array of the containing objects
-   * @property {string} entries.cid The CID of the uploaded object
-   * @property {string} entries.path The path of the object
-   */
-  /**
-   * If the source parameter is an array of objects, it will pack multiple files into a CAR file for upload.
-   * The method returns a Promise that resolves to an object containing the CID (Content Identifier) of the uploaded file
-   * and an optional entries object when uploading a CAR file.
-   *
-   * @summary Uploads a file or a CAR file to the specified bucket.
-   * @param {string} key - The key or path of the file in the bucket.
-   * @param {Buffer|ReadableStream|Array} source - The content of the object to be uploaded.
-   *    If an array of files is provided, each file should have a 'path' property specifying the path of the file
-   *    and a 'content' property specifying the content of the file.  The SDK will then construct a CAR file locally
-   *    and use that as the content of the object to be uploaded.
-   * @param {Object} [metadata] Optional metadata for pin object
-   * @param {objectOptions} [options] - The options for uploading the object.
-   * @returns {Promise}
-   * @example
-   * // Upload Object
-   * await objectManager.upload("my-object", Buffer.from("Hello World!"));
-   * // Upload Object with Metadata
-   * await objectManager.upload("my-custom-object", Buffer.from("Hello Big World!"), {
-   *   "application": "my-filebase-app"
-   * });
-   * // Upload Directory
-   * await objectManager.upload("my-first-directory", [
-   *  {
-   *   path: "/testObjects/1.txt",
-   *   content: Buffer.from("upload test object", "utf-8"),
-   *  },
-   *  {
-   *   path: "/testObjects/deep/1.txt",
-   *   content: Buffer.from("upload deep test object", "utf-8"),
-   *  },
-   *  {
-   *   path: "/topLevel.txt",
-   *   content: Buffer.from("upload top level test object", "utf-8"),
-   *  },
-   * ]);
-   */
-  async upload(key, source, metadata, options) {
-    const uploadUUID = (0, import_uuid.v4)();
-    const bucket = (options == null ? void 0 : options.bucket) || this.#defaultBucket, uploadOptions = {
-      client: this.#client,
-      params: {
-        Bucket: bucket,
-        Key: key,
-        Body: source,
-        Metadata: metadata || {}
-      },
-      queueSize: this.#maxConcurrentUploads,
-      partSize: 26843546
-      //25.6Mb || 250Gb Max File Size
-    };
-    let parsedEntries = {};
-    if (Array.isArray(source)) {
-      uploadOptions.params.Metadata = {
-        ...uploadOptions.params.Metadata,
-        import: "car"
-      };
-      let temporaryCarFilePath, temporaryBlockstoreDir;
-      try {
-        temporaryBlockstoreDir = import_node_path3.default.resolve(
-          import_node_os.default.tmpdir(),
-          "filebase-sdk",
-          "uploads",
-          uploadUUID
-        );
-        temporaryCarFilePath = `${temporaryBlockstoreDir}/main.car`;
-        await (0, import_promises3.mkdir)(temporaryBlockstoreDir, { recursive: true });
-        const temporaryBlockstore = new FsBlockstore(temporaryBlockstoreDir);
-        const heliaFs = unixfs({
-          blockstore: temporaryBlockstore
-        });
-        for (let sourceEntry of source) {
-          sourceEntry.path = sourceEntry.path[0] === "/" ? `/${uploadUUID}${sourceEntry.path}` : `/${uploadUUID}/${sourceEntry.path}`;
-        }
-        for await (const entry of heliaFs.addAll(source)) {
-          parsedEntries[entry.path] = entry;
-        }
-        const rootEntry = parsedEntries[uploadUUID];
-        const carExporter = car({ blockstore: temporaryBlockstore }), { writer, out } = CarWriter2.create([rootEntry.cid]);
-        const output = (0, import_node_fs.createWriteStream)(temporaryCarFilePath);
-        import_node_stream.Readable.from(out).pipe(output);
-        await carExporter.export(rootEntry.cid, writer);
-        uploadOptions.params.Body = (0, import_node_fs.createReadStream)(temporaryCarFilePath);
-        const parallelUploads3 = new import_lib_storage.Upload(uploadOptions);
-        await parallelUploads3.done();
-        await temporaryBlockstore.close();
-      } finally {
-        if (typeof temporaryBlockstoreDir !== "undefined") {
-          await (0, import_promises3.rm)(temporaryBlockstoreDir, { recursive: true, force: true });
-        }
-      }
-    } else {
-      const parallelUploads3 = new import_lib_storage.Upload(uploadOptions);
-      await parallelUploads3.done();
-    }
-    const command = new import_client_s32.HeadObjectCommand({
-      Bucket: bucket,
-      Key: key,
-      Body: source
-    }), headResult = await this.#client.send(command), responseCid = headResult.Metadata.cid;
-    if (Object.keys(parsedEntries).length === 0) {
-      return {
-        cid: responseCid,
-        download: () => {
-          return this.#routeDownload(responseCid, key, options);
-        }
-      };
-    }
-    return {
-      cid: responseCid,
-      download: () => {
-        return this.#routeDownload(responseCid, key, options);
-      },
-      entries: parsedEntries
-    };
-  }
-  async #routeDownload(cid, key, options) {
-    return typeof this.#gatewayConfiguration.endpoint !== "undefined" ? downloadFromGateway(cid, this.#gatewayConfiguration) : this.download(key, options);
-  }
-  /**
-   * @summary Gets an objects info and metadata using the S3 API.
-   * @param {string} key - The key of the object to be inspected.
-   * @param {objectOptions} [options] - The options for inspecting the object.
-   * @returns {Promise}
-   */
-  async get(key, options) {
-    const bucket = (options == null ? void 0 : options.bucket) || this.#defaultBucket;
-    try {
-      const command = new import_client_s32.HeadObjectCommand({
-        Bucket: bucket,
-        Key: key
-      }), response = await this.#client.send(command);
-      response.download = () => {
-        return this.#routeDownload(response.Metadata.cid, key, options);
-      };
-      return response;
-    } catch (err) {
-      if (err.name === "NotFound") {
-        return false;
-      }
-      throw err;
-    }
-  }
-  /**
-   * @summary Downloads an object from the specified bucket using the provided key.
-   * @param {string} key - The key of the object to be downloaded.
-   * @param {objectOptions} [options] - The options for downloading the object..
-   * @returns {Promise} - A promise that resolves with the contents of the downloaded object as a Stream.
-   * @example
-   * // Download object with name of `download-object-example`
-   * await objectManager.download(`download-object-example`);
-   */
-  async download(key, options) {
-    if (typeof this.#gatewayConfiguration.endpoint === "string") {
-      const objectToFetch = await this.get(key, options);
-      return objectToFetch.download();
-    } else {
-      const command = new import_client_s32.GetObjectCommand({
-        Bucket: (options == null ? void 0 : options.bucket) || this.#defaultBucket,
-        Key: key
-      }), response = await this.#client.send(command);
-      return response.Body;
-    }
-  }
-  /**
-   * @typedef {Object} listObjectsResult
-   * @property {boolean} IsTruncated Indicates if more results exist on the server
-   * @property {string} NextContinuationToken ContinuationToken used to paginate list requests
-   * @property {Array} Contents List of Keys stored in the S3 Bucket
-   * @property {string} Contents.Key Key of the Object
-   * @property {string} Contents.LastModified Date Last Modified of the Object
-   * @property {string} Contents.CID CID of the Object
-   * @property {string} Contents.ETag ETag of the Object
-   * @property {number} Contents.Size Size in Bytes of the Object
-   * @property {string} Contents.StorageClass Class of Storage of the Object
-   * @property {function} Contents.download Convenience function to download the item using the S3 gateway
-   */
-  /**
-   * @typedef {Object} listObjectOptions
-   * @property {string} [Bucket] The name of the bucket. If not provided, the default bucket will be used.
-   * @property {string} [ContinuationToken=null] Continues listing from this objects name.
-   * @property {string} [Delimiter=null] Character used to group keys
-   * @property {number} [MaxKeys=1000] The maximum number of objects to retrieve. Defaults to 1000.
-   */
-  /**
-   * Retrieves a list of objects from a specified bucket.
-   *
-   * @param {listObjectOptions} options - The options for listing objects.
-   * @returns {Promise} - A promise that resolves to an array of objects.
-   * @example
-   * // List objects in bucket with a limit of 1000
-   * await objectManager.list({
-   *   MaxKeys: 1000
-   * });
-   */
-  async list(options = {
-    Bucket: this.#defaultBucket,
-    ContinuationToken: null,
-    Delimiter: null,
-    MaxKeys: 1e3
-  }) {
-    if ((options == null ? void 0 : options.MaxKeys) && options.MaxKeys > 1e5) {
-      throw new Error(`MaxKeys Maximum value is 100000`);
-    }
-    const bucket = (options == null ? void 0 : options.Bucket) || this.#defaultBucket, limit = (options == null ? void 0 : options.MaxKeys) || 1e3, commandOptions = {
-      Bucket: bucket,
-      MaxKeys: limit
-    }, command = new import_client_s32.ListObjectsV2Command({
-      ...options,
-      ...commandOptions
-    });
-    const { Contents, IsTruncated, NextContinuationToken } = await this.#client.send(command);
-    return { Contents, IsTruncated, NextContinuationToken };
-  }
-  /**
-   * @summary Deletes an object from the specified bucket using the provided key.
-   * @param {string} key - The key of the object to be deleted.
-   * @param {objectOptions} [options] - The options for deleting the file.
-   * @returns {Promise} - A Promise that resolves with the result of the delete operation.
-   * @example
-   * // Delete object with name of `delete-object-example`
-   * await objectManager.delete(`delete-object-example`);
-   */
-  async delete(key, options) {
-    const command = new import_client_s32.DeleteObjectCommand({
-      Bucket: (options == null ? void 0 : options.bucket) || this.#defaultBucket,
-      Key: key
-    });
-    await this.#client.send(command);
-    return true;
-  }
-  /**
-   * @typedef {Object} copyObjectOptions
-   * @property {string} [sourceBucket] The source bucket from where the object is to be copied.
-   * @property {string} [destinationKey] The key of the object in the destination bucket. By default, it is the same as the sourceKey.
-   */
-  /**
-   * If the destinationKey is not provided, the object will be copied with the same key as the sourceKey.
-   *
-   * @summary Copy the object from sourceKey in the sourceBucket to destinationKey in the destinationBucket.
-   * @param {string} sourceKey - The key of the object to be copied from the sourceBucket.
-   * @param {string} destinationBucket - The bucket where the object will be copied to.
-   * @param {copyObjectOptions} [options] - Additional options for the copy operation.
-   *
-   * @returns {Promise} - A Promise that resolves with the result of the copy operation.
-   * @example
-   * // Copy object `copy-object-test` from `copy-object-test-pass-src` to `copy-object-test-pass-dest`
-   * // TIP: Set bucket on constructor and it will be used as the default source for copying objects.
-   * await objectManager.copy(`copy-object-test`, `copy-object-dest`, {
-   *   sourceBucket: `copy-object-src`
-   * });
-   */
-  async copy(sourceKey, destinationBucket, options = {
-    sourceBucket: this.#defaultBucket,
-    destinationKey: void 0
-  }) {
-    const copySource = `${(options == null ? void 0 : options.sourceBucket) || this.#defaultBucket}/${sourceKey}`, command = new import_client_s32.CopyObjectCommand({
-      CopySource: copySource,
-      Bucket: destinationBucket,
-      Key: (options == null ? void 0 : options.destinationKey) || sourceKey
-    });
-    await this.#client.send(command);
-    return true;
-  }
-};
-var objectManager_default = ObjectManager;
-
-// src/pinManager.js
-var import_axios4 = __toESM(require("axios"));
-var PinManager = class {
-  #DEFAULT_ENDPOINT = "https://api.filebase.io";
-  #DEFAULT_TIMEOUT = 6e4;
-  #client;
-  #credentials;
-  #gatewayConfiguration;
-  #defaultBucket;
-  /**
-   * @typedef {Object} pinManagerOptions Optional settings for the constructor.
-   * @property {string} [bucket] Default bucket to use.
-   * @property {pinDownloadOptions} [gateway] Default gateway to use.
-   */
-  /**
-   * @typedef {Object} pinDownloadOptions Optional settings for downloading pins
-   * @property {string} endpoint Default gateway to use.
-   * @property {string} [token] Token for the default gateway.
-   * @property {number} [timeout=60000] Timeout for the default gateway
-   */
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @param {pinManagerOptions} [options] - Optional settings for the constructor.
-   * @tutorial quickstart-pin
-   * @example
-   * import { PinManager } from "@filebase/sdk";
-   * const pinManager = new PinManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", {
-   *   bucket: "my-default-bucket",
-   *   gateway: {
-   *     endpoint: "https://my-default-gateway.mydomain.com
-   *     token: SUPER_SECRET_GATEWAY_TOKEN
-   *   }
-   * });
-   */
-  constructor(clientKey, clientSecret, options) {
-    var _a, _b, _c;
-    this.#defaultBucket = options == null ? void 0 : options.bucket;
-    const PSAClientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, baseURL = `${PSAClientEndpoint}/v1/ipfs/pins`;
-    this.#credentials = {
-      key: clientKey,
-      secret: clientSecret
-    };
-    this.#client = import_axios4.default.create({
-      baseURL,
-      timeout: this.#DEFAULT_TIMEOUT
-    });
-    this.#gatewayConfiguration = {
-      endpoint: (_a = options == null ? void 0 : options.gateway) == null ? void 0 : _a.endpoint,
-      token: (_b = options == null ? void 0 : options.gateway) == null ? void 0 : _b.token,
-      timeout: ((_c = options == null ? void 0 : options.gateway) == null ? void 0 : _c.timeout) || this.#DEFAULT_TIMEOUT
-    };
-  }
-  /**
-   * @typedef {Object} pinStatus
-   * @property {string} requestid Globally unique identifier of the pin request; can be used to check the status of ongoing pinning, or pin removal
-   * @property {string} status Status a pin object can have at a pinning service. ("queued","pinning","pinned","failed")
-   * @property {string} created Immutable timestamp indicating when a pin request entered a pinning service; can be used for filtering results and pagination
-   * @property {Object} pin Pin object
-   * @property {string} pin.cid Content Identifier (CID) pinned recursively
-   * @property {string} pin.name Name for pinned data; can be used for lookups later
-   * @property {Array} pin.origins Optional list of multiaddrs known to provide the data
-   * @property {Object} pin.meta Optional metadata for pin object
-   * @property {Array} delegates List of multiaddrs designated by pinning service that will receive the pin data
-   * @property {object} [info] Optional info for PinStatus response
-   * @property {function} download Convenience function to download pin
-   */
-  /**
-   * @typedef {Object} pinOptions
-   * @property {string} [bucket] - The bucket to pin the IPFS CID into.
-   */
-  /**
-   * @typedef {Object} listPinOptions
-   * @property {Array} [cid] Return pin objects responsible for pinning the specified CID(s); be aware that using longer hash functions introduces further constraints on the number of CIDs that will fit under the limit of 2000 characters per URL in browser contexts
-   * @property {string} [name] Return pin objects with specified name (by default a case-sensitive, exact match)
-   * @property {string} [match] Customize the text matching strategy applied when the name filter is present; exact (the default) is a case-sensitive exact match, partial matches anywhere in the name, iexact and ipartial are case-insensitive versions of the exact and partial strategies
-   * @property {Array} [status] Return pin objects for pins with the specified status (when missing, service defaults to pinned only)
-   * @property {string} [before] Return results created (queued) before provided timestamp
-   * @property {string} [after] Return results created (queued) after provided timestamp
-   * @property {number} [limit] Max records to return
-   * @property {Object} [meta] Return pin objects that match specified metadata keys passed as a string representation of a JSON object; when implementing a client library, make sure the parameter is URL-encoded to ensure safe transport
-   */
-  /**
-   * @typedef {Object} listPinResults
-   * @property {number} count Total number of pin objects that exist for passed query filters
-   * @property {Array} Array of PinStatus results
-   */
-  /**
-   * @summary List the pins in a given bucket
-   * @param {listPinOptions} [listOptions]
-   * @param {pinOptions} [options]
-   * @returns {Promise}
-   * @example
-   * // List pins in bucket with a limit of 1000
-   * await pinManager.list({
-   *   limit: 1000
-   * });
-   */
-  async list(listOptions, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), getResponse = await this.#client.request({
-        method: "GET",
-        params: listOptions,
-        headers: { Authorization: `Bearer ${encodedToken}` }
-      });
-      for (let pinStatus of getResponse.data.results) {
-        pinStatus.download = () => {
-          return this.download(pinStatus.pin.cid);
-        };
-      }
-      return getResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Create a pin in the selected bucket
-   * @param {string} key Key or path of the file in the bucket
-   * @param {string} cid Content Identifier (CID) to be pinned recursively
-   * @param {Object} [metadata] Optional metadata for pin object
-   * @param {pinOptions} [options] Options for pinning the object
-   * @returns {Promise}
-   * @example
-   * // Create Pin with Metadata
-   * await pinManager.create("my-pin", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", {
-   *   "application": "my-custom-app-on-filebase"
-   * });
-   */
-  async create(key, cid, metadata, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), pinStatus = await this.#client.request({
-        method: "POST",
-        data: {
-          cid,
-          name: key,
-          meta: metadata
-        },
-        headers: { Authorization: `Bearer ${encodedToken}` }
-      });
-      pinStatus.data.download = () => {
-        return this.download(pinStatus.data.pin.cid);
-      };
-      return pinStatus.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @typedef {Object} replacePinOptions
-   * @augments pinOptions
-   * @property {Object} [metadata] Optional metadata to set on pin during replacement
-   * @property {string} [name] Optional name for pin to set during replacement
-   */
-  /**
-   * @summary Replace a pinned object in the selected bucket
-   * @param {string} requestid Unique ID for the pinned object
-   * @param {string} cid Content Identifier (CID) to be pinned recursively
-   * @param {replacePinOptions} [options] Options for pinning the object
-   * @returns {Promise}
-   * @example
-   * // Replace Pin with Metadata
-   * await pinManager.create("qr4231213", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", {
-   *   "revision": Date.now()
-   * }
-   */
-  async replace(requestid, cid, options) {
-    try {
-      let replaceData = {
-        cid,
-        meta: (options == null ? void 0 : options.metadata) || {}
-      };
-      if (options == null ? void 0 : options.name) {
-        replaceData.name = options.name;
-      }
-      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), pinStatusResult = await this.#client.request({
-        method: "POST",
-        url: `/${requestid}`,
-        data: replaceData,
-        validateStatus: (status) => {
-          return status === 200;
-        },
-        headers: { Authorization: `Bearer ${encodedToken}` }
-      });
-      const pinStatus = pinStatusResult.data;
-      pinStatus.download = () => {
-        return this.download(pinStatus.pin.cid);
-      };
-      return pinStatus;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Download a pin from the selected IPFS gateway
-   * @param {string} cid
-   * @param {pinDownloadOptions} [options]
-   * @returns {Promise}
-   * @example
-   * // Download Pin by CID
-   * await pinManager.download("QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF");
-   */
-  async download(cid, options) {
-    const downloadOptions = Object.assign(this.#gatewayConfiguration, options);
-    return downloadFromGateway(cid, downloadOptions);
-  }
-  /**
-   * @summary Get details about a pinned object
-   * @param {string} requestid Globally unique identifier of the pin request
-   * @param {pinOptions} [options] Options for getting the pin
-   * @returns {Promise}
-   * @example
-   * // Get Pin Info by RequestId
-   * await pinManager.get("qr4231214");
-   */
-  async get(requestid, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), getResponseResult = await this.#client.request({
-        method: "GET",
-        url: `/${requestid}`,
-        headers: { Authorization: `Bearer ${encodedToken}` },
-        validateStatus: (status) => {
-          return status === 200 || status === 404;
-        }
-      });
-      if (getResponseResult.status === 404) {
-        return false;
-      }
-      const pinStatus = getResponseResult.data;
-      pinStatus.download = () => {
-        return this.download(pinStatus.pin.cid);
-      };
-      return pinStatus;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Delete a pinned object from the selected bucket
-   * @param requestid Globally unique identifier of the pin request
-   * @param {pinOptions} [options] Options for deleting the pin
-   * @returns {Promise}
-   * @example
-   * // Delete Pin by RequestId
-   * await pinManager.delete("qr4231213");
-   */
-  async delete(requestid, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket);
-      await this.#client.request({
-        method: "DELETE",
-        url: `/${requestid}`,
-        headers: { Authorization: `Bearer ${encodedToken}` },
-        validateStatus: (status) => {
-          return status === 202;
-        }
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  #getEncodedToken(bucket) {
-    bucket = bucket || this.#defaultBucket;
-    return Buffer.from(
-      `${this.#credentials.key}:${this.#credentials.secret}:${bucket}`
-    ).toString("base64");
-  }
-};
-var pinManager_default = PinManager;
-// Annotate the CommonJS export names for ESM import in node:
-0 && (module.exports = {
-  BucketManager,
-  GatewayManager,
-  NameManager,
-  ObjectManager,
-  PinManager
-});
diff --git a/dist/index.mjs b/dist/index.mjs
deleted file mode 100644
index b5682db..0000000
--- a/dist/index.mjs
+++ /dev/null
@@ -1,18045 +0,0 @@
-var __create = Object.create;
-var __defProp = Object.defineProperty;
-var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
-var __getOwnPropNames = Object.getOwnPropertyNames;
-var __getProtoOf = Object.getPrototypeOf;
-var __hasOwnProp = Object.prototype.hasOwnProperty;
-var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
-  get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
-}) : x)(function(x) {
-  if (typeof require !== "undefined")
-    return require.apply(this, arguments);
-  throw Error('Dynamic require of "' + x + '" is not supported');
-});
-var __commonJS = (cb, mod) => function __require2() {
-  return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
-};
-var __export = (target, all) => {
-  for (var name4 in all)
-    __defProp(target, name4, { get: all[name4], enumerable: true });
-};
-var __copyProps = (to, from4, except, desc) => {
-  if (from4 && typeof from4 === "object" || typeof from4 === "function") {
-    for (let key of __getOwnPropNames(from4))
-      if (!__hasOwnProp.call(to, key) && key !== except)
-        __defProp(to, key, { get: () => from4[key], enumerable: !(desc = __getOwnPropDesc(from4, key)) || desc.enumerable });
-  }
-  return to;
-};
-var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
-  // If the importer is in node compatibility mode or this is not an ESM
-  // file that has been converted to a CommonJS file using a Babel-
-  // compatible transform (i.e. "__esModule" has not been set), then set
-  // "default" to the CommonJS "module.exports" for node compatibility.
-  isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
-  mod
-));
-
-// node_modules/varint/encode.js
-var require_encode = __commonJS({
-  "node_modules/varint/encode.js"(exports, module) {
-    module.exports = encode12;
-    var MSB3 = 128;
-    var REST3 = 127;
-    var MSBALL3 = ~REST3;
-    var INT3 = Math.pow(2, 31);
-    function encode12(num, out, offset) {
-      if (Number.MAX_SAFE_INTEGER && num > Number.MAX_SAFE_INTEGER) {
-        encode12.bytes = 0;
-        throw new RangeError("Could not encode varint");
-      }
-      out = out || [];
-      offset = offset || 0;
-      var oldOffset = offset;
-      while (num >= INT3) {
-        out[offset++] = num & 255 | MSB3;
-        num /= 128;
-      }
-      while (num & MSBALL3) {
-        out[offset++] = num & 255 | MSB3;
-        num >>>= 7;
-      }
-      out[offset] = num | 0;
-      encode12.bytes = offset - oldOffset + 1;
-      return out;
-    }
-  }
-});
-
-// node_modules/varint/decode.js
-var require_decode = __commonJS({
-  "node_modules/varint/decode.js"(exports, module) {
-    module.exports = read4;
-    var MSB3 = 128;
-    var REST3 = 127;
-    function read4(buf2, offset) {
-      var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf2.length;
-      do {
-        if (counter >= l || shift > 49) {
-          read4.bytes = 0;
-          throw new RangeError("Could not decode varint");
-        }
-        b = buf2[counter++];
-        res += shift < 28 ? (b & REST3) << shift : (b & REST3) * Math.pow(2, shift);
-        shift += 7;
-      } while (b >= MSB3);
-      read4.bytes = counter - offset;
-      return res;
-    }
-  }
-});
-
-// node_modules/varint/length.js
-var require_length = __commonJS({
-  "node_modules/varint/length.js"(exports, module) {
-    var N13 = Math.pow(2, 7);
-    var N23 = Math.pow(2, 14);
-    var N33 = Math.pow(2, 21);
-    var N43 = Math.pow(2, 28);
-    var N53 = Math.pow(2, 35);
-    var N63 = Math.pow(2, 42);
-    var N73 = Math.pow(2, 49);
-    var N83 = Math.pow(2, 56);
-    var N93 = Math.pow(2, 63);
-    module.exports = function(value) {
-      return value < N13 ? 1 : value < N23 ? 2 : value < N33 ? 3 : value < N43 ? 4 : value < N53 ? 5 : value < N63 ? 6 : value < N73 ? 7 : value < N83 ? 8 : value < N93 ? 9 : 10;
-    };
-  }
-});
-
-// node_modules/varint/index.js
-var require_varint = __commonJS({
-  "node_modules/varint/index.js"(exports, module) {
-    module.exports = {
-      encode: require_encode(),
-      decode: require_decode(),
-      encodingLength: require_length()
-    };
-  }
-});
-
-// node_modules/eventemitter3/index.js
-var require_eventemitter3 = __commonJS({
-  "node_modules/eventemitter3/index.js"(exports, module) {
-    "use strict";
-    var has = Object.prototype.hasOwnProperty;
-    var prefix = "~";
-    function Events() {
-    }
-    if (Object.create) {
-      Events.prototype = /* @__PURE__ */ Object.create(null);
-      if (!new Events().__proto__)
-        prefix = false;
-    }
-    function EE(fn, context, once) {
-      this.fn = fn;
-      this.context = context;
-      this.once = once || false;
-    }
-    function addListener(emitter, event, fn, context, once) {
-      if (typeof fn !== "function") {
-        throw new TypeError("The listener must be a function");
-      }
-      var listener = new EE(fn, context || emitter, once), evt = prefix ? prefix + event : event;
-      if (!emitter._events[evt])
-        emitter._events[evt] = listener, emitter._eventsCount++;
-      else if (!emitter._events[evt].fn)
-        emitter._events[evt].push(listener);
-      else
-        emitter._events[evt] = [emitter._events[evt], listener];
-      return emitter;
-    }
-    function clearEvent(emitter, evt) {
-      if (--emitter._eventsCount === 0)
-        emitter._events = new Events();
-      else
-        delete emitter._events[evt];
-    }
-    function EventEmitter2() {
-      this._events = new Events();
-      this._eventsCount = 0;
-    }
-    EventEmitter2.prototype.eventNames = function eventNames() {
-      var names = [], events, name4;
-      if (this._eventsCount === 0)
-        return names;
-      for (name4 in events = this._events) {
-        if (has.call(events, name4))
-          names.push(prefix ? name4.slice(1) : name4);
-      }
-      if (Object.getOwnPropertySymbols) {
-        return names.concat(Object.getOwnPropertySymbols(events));
-      }
-      return names;
-    };
-    EventEmitter2.prototype.listeners = function listeners(event) {
-      var evt = prefix ? prefix + event : event, handlers = this._events[evt];
-      if (!handlers)
-        return [];
-      if (handlers.fn)
-        return [handlers.fn];
-      for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) {
-        ee[i] = handlers[i].fn;
-      }
-      return ee;
-    };
-    EventEmitter2.prototype.listenerCount = function listenerCount(event) {
-      var evt = prefix ? prefix + event : event, listeners = this._events[evt];
-      if (!listeners)
-        return 0;
-      if (listeners.fn)
-        return 1;
-      return listeners.length;
-    };
-    EventEmitter2.prototype.emit = function emit(event, a1, a2, a3, a4, a5) {
-      var evt = prefix ? prefix + event : event;
-      if (!this._events[evt])
-        return false;
-      var listeners = this._events[evt], len = arguments.length, args, i;
-      if (listeners.fn) {
-        if (listeners.once)
-          this.removeListener(event, listeners.fn, void 0, true);
-        switch (len) {
-          case 1:
-            return listeners.fn.call(listeners.context), true;
-          case 2:
-            return listeners.fn.call(listeners.context, a1), true;
-          case 3:
-            return listeners.fn.call(listeners.context, a1, a2), true;
-          case 4:
-            return listeners.fn.call(listeners.context, a1, a2, a3), true;
-          case 5:
-            return listeners.fn.call(listeners.context, a1, a2, a3, a4), true;
-          case 6:
-            return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true;
-        }
-        for (i = 1, args = new Array(len - 1); i < len; i++) {
-          args[i - 1] = arguments[i];
-        }
-        listeners.fn.apply(listeners.context, args);
-      } else {
-        var length4 = listeners.length, j;
-        for (i = 0; i < length4; i++) {
-          if (listeners[i].once)
-            this.removeListener(event, listeners[i].fn, void 0, true);
-          switch (len) {
-            case 1:
-              listeners[i].fn.call(listeners[i].context);
-              break;
-            case 2:
-              listeners[i].fn.call(listeners[i].context, a1);
-              break;
-            case 3:
-              listeners[i].fn.call(listeners[i].context, a1, a2);
-              break;
-            case 4:
-              listeners[i].fn.call(listeners[i].context, a1, a2, a3);
-              break;
-            default:
-              if (!args)
-                for (j = 1, args = new Array(len - 1); j < len; j++) {
-                  args[j - 1] = arguments[j];
-                }
-              listeners[i].fn.apply(listeners[i].context, args);
-          }
-        }
-      }
-      return true;
-    };
-    EventEmitter2.prototype.on = function on(event, fn, context) {
-      return addListener(this, event, fn, context, false);
-    };
-    EventEmitter2.prototype.once = function once(event, fn, context) {
-      return addListener(this, event, fn, context, true);
-    };
-    EventEmitter2.prototype.removeListener = function removeListener(event, fn, context, once) {
-      var evt = prefix ? prefix + event : event;
-      if (!this._events[evt])
-        return this;
-      if (!fn) {
-        clearEvent(this, evt);
-        return this;
-      }
-      var listeners = this._events[evt];
-      if (listeners.fn) {
-        if (listeners.fn === fn && (!once || listeners.once) && (!context || listeners.context === context)) {
-          clearEvent(this, evt);
-        }
-      } else {
-        for (var i = 0, events = [], length4 = listeners.length; i < length4; i++) {
-          if (listeners[i].fn !== fn || once && !listeners[i].once || context && listeners[i].context !== context) {
-            events.push(listeners[i]);
-          }
-        }
-        if (events.length)
-          this._events[evt] = events.length === 1 ? events[0] : events;
-        else
-          clearEvent(this, evt);
-      }
-      return this;
-    };
-    EventEmitter2.prototype.removeAllListeners = function removeAllListeners(event) {
-      var evt;
-      if (event) {
-        evt = prefix ? prefix + event : event;
-        if (this._events[evt])
-          clearEvent(this, evt);
-      } else {
-        this._events = new Events();
-        this._eventsCount = 0;
-      }
-      return this;
-    };
-    EventEmitter2.prototype.off = EventEmitter2.prototype.removeListener;
-    EventEmitter2.prototype.addListener = EventEmitter2.prototype.on;
-    EventEmitter2.prefixed = prefix;
-    EventEmitter2.EventEmitter = EventEmitter2;
-    if ("undefined" !== typeof module) {
-      module.exports = EventEmitter2;
-    }
-  }
-});
-
-// node_modules/err-code/index.js
-var require_err_code = __commonJS({
-  "node_modules/err-code/index.js"(exports, module) {
-    "use strict";
-    function assign(obj, props) {
-      for (const key in props) {
-        Object.defineProperty(obj, key, {
-          value: props[key],
-          enumerable: true,
-          configurable: true
-        });
-      }
-      return obj;
-    }
-    function createError(err, code5, props) {
-      if (!err || typeof err === "string") {
-        throw new TypeError("Please pass an Error to err-code");
-      }
-      if (!props) {
-        props = {};
-      }
-      if (typeof code5 === "object") {
-        props = code5;
-        code5 = "";
-      }
-      if (code5) {
-        props.code = code5;
-      }
-      try {
-        return assign(err, props);
-      } catch (_) {
-        props.message = err.message;
-        props.stack = err.stack;
-        const ErrClass = function() {
-        };
-        ErrClass.prototype = Object.create(Object.getPrototypeOf(err));
-        const output = assign(new ErrClass(), props);
-        return output;
-      }
-    }
-    module.exports = createError;
-  }
-});
-
-// node_modules/murmurhash3js-revisited/lib/murmurHash3js.js
-var require_murmurHash3js = __commonJS({
-  "node_modules/murmurhash3js-revisited/lib/murmurHash3js.js"(exports, module) {
-    (function(root, undefined2) {
-      "use strict";
-      var library = {
-        "version": "3.0.0",
-        "x86": {},
-        "x64": {},
-        "inputValidation": true
-      };
-      function _validBytes(bytes) {
-        if (!Array.isArray(bytes) && !ArrayBuffer.isView(bytes)) {
-          return false;
-        }
-        for (var i = 0; i < bytes.length; i++) {
-          if (!Number.isInteger(bytes[i]) || bytes[i] < 0 || bytes[i] > 255) {
-            return false;
-          }
-        }
-        return true;
-      }
-      function _x86Multiply(m, n) {
-        return (m & 65535) * n + (((m >>> 16) * n & 65535) << 16);
-      }
-      function _x86Rotl(m, n) {
-        return m << n | m >>> 32 - n;
-      }
-      function _x86Fmix(h) {
-        h ^= h >>> 16;
-        h = _x86Multiply(h, 2246822507);
-        h ^= h >>> 13;
-        h = _x86Multiply(h, 3266489909);
-        h ^= h >>> 16;
-        return h;
-      }
-      function _x64Add(m, n) {
-        m = [m[0] >>> 16, m[0] & 65535, m[1] >>> 16, m[1] & 65535];
-        n = [n[0] >>> 16, n[0] & 65535, n[1] >>> 16, n[1] & 65535];
-        var o = [0, 0, 0, 0];
-        o[3] += m[3] + n[3];
-        o[2] += o[3] >>> 16;
-        o[3] &= 65535;
-        o[2] += m[2] + n[2];
-        o[1] += o[2] >>> 16;
-        o[2] &= 65535;
-        o[1] += m[1] + n[1];
-        o[0] += o[1] >>> 16;
-        o[1] &= 65535;
-        o[0] += m[0] + n[0];
-        o[0] &= 65535;
-        return [o[0] << 16 | o[1], o[2] << 16 | o[3]];
-      }
-      function _x64Multiply(m, n) {
-        m = [m[0] >>> 16, m[0] & 65535, m[1] >>> 16, m[1] & 65535];
-        n = [n[0] >>> 16, n[0] & 65535, n[1] >>> 16, n[1] & 65535];
-        var o = [0, 0, 0, 0];
-        o[3] += m[3] * n[3];
-        o[2] += o[3] >>> 16;
-        o[3] &= 65535;
-        o[2] += m[2] * n[3];
-        o[1] += o[2] >>> 16;
-        o[2] &= 65535;
-        o[2] += m[3] * n[2];
-        o[1] += o[2] >>> 16;
-        o[2] &= 65535;
-        o[1] += m[1] * n[3];
-        o[0] += o[1] >>> 16;
-        o[1] &= 65535;
-        o[1] += m[2] * n[2];
-        o[0] += o[1] >>> 16;
-        o[1] &= 65535;
-        o[1] += m[3] * n[1];
-        o[0] += o[1] >>> 16;
-        o[1] &= 65535;
-        o[0] += m[0] * n[3] + m[1] * n[2] + m[2] * n[1] + m[3] * n[0];
-        o[0] &= 65535;
-        return [o[0] << 16 | o[1], o[2] << 16 | o[3]];
-      }
-      function _x64Rotl(m, n) {
-        n %= 64;
-        if (n === 32) {
-          return [m[1], m[0]];
-        } else if (n < 32) {
-          return [m[0] << n | m[1] >>> 32 - n, m[1] << n | m[0] >>> 32 - n];
-        } else {
-          n -= 32;
-          return [m[1] << n | m[0] >>> 32 - n, m[0] << n | m[1] >>> 32 - n];
-        }
-      }
-      function _x64LeftShift(m, n) {
-        n %= 64;
-        if (n === 0) {
-          return m;
-        } else if (n < 32) {
-          return [m[0] << n | m[1] >>> 32 - n, m[1] << n];
-        } else {
-          return [m[1] << n - 32, 0];
-        }
-      }
-      function _x64Xor(m, n) {
-        return [m[0] ^ n[0], m[1] ^ n[1]];
-      }
-      function _x64Fmix(h) {
-        h = _x64Xor(h, [0, h[0] >>> 1]);
-        h = _x64Multiply(h, [4283543511, 3981806797]);
-        h = _x64Xor(h, [0, h[0] >>> 1]);
-        h = _x64Multiply(h, [3301882366, 444984403]);
-        h = _x64Xor(h, [0, h[0] >>> 1]);
-        return h;
-      }
-      library.x86.hash32 = function(bytes, seed) {
-        if (library.inputValidation && !_validBytes(bytes)) {
-          return undefined2;
-        }
-        seed = seed || 0;
-        var remainder = bytes.length % 4;
-        var blocks = bytes.length - remainder;
-        var h1 = seed;
-        var k1 = 0;
-        var c1 = 3432918353;
-        var c2 = 461845907;
-        for (var i = 0; i < blocks; i = i + 4) {
-          k1 = bytes[i] | bytes[i + 1] << 8 | bytes[i + 2] << 16 | bytes[i + 3] << 24;
-          k1 = _x86Multiply(k1, c1);
-          k1 = _x86Rotl(k1, 15);
-          k1 = _x86Multiply(k1, c2);
-          h1 ^= k1;
-          h1 = _x86Rotl(h1, 13);
-          h1 = _x86Multiply(h1, 5) + 3864292196;
-        }
-        k1 = 0;
-        switch (remainder) {
-          case 3:
-            k1 ^= bytes[i + 2] << 16;
-          case 2:
-            k1 ^= bytes[i + 1] << 8;
-          case 1:
-            k1 ^= bytes[i];
-            k1 = _x86Multiply(k1, c1);
-            k1 = _x86Rotl(k1, 15);
-            k1 = _x86Multiply(k1, c2);
-            h1 ^= k1;
-        }
-        h1 ^= bytes.length;
-        h1 = _x86Fmix(h1);
-        return h1 >>> 0;
-      };
-      library.x86.hash128 = function(bytes, seed) {
-        if (library.inputValidation && !_validBytes(bytes)) {
-          return undefined2;
-        }
-        seed = seed || 0;
-        var remainder = bytes.length % 16;
-        var blocks = bytes.length - remainder;
-        var h1 = seed;
-        var h2 = seed;
-        var h3 = seed;
-        var h4 = seed;
-        var k1 = 0;
-        var k2 = 0;
-        var k3 = 0;
-        var k4 = 0;
-        var c1 = 597399067;
-        var c2 = 2869860233;
-        var c3 = 951274213;
-        var c4 = 2716044179;
-        for (var i = 0; i < blocks; i = i + 16) {
-          k1 = bytes[i] | bytes[i + 1] << 8 | bytes[i + 2] << 16 | bytes[i + 3] << 24;
-          k2 = bytes[i + 4] | bytes[i + 5] << 8 | bytes[i + 6] << 16 | bytes[i + 7] << 24;
-          k3 = bytes[i + 8] | bytes[i + 9] << 8 | bytes[i + 10] << 16 | bytes[i + 11] << 24;
-          k4 = bytes[i + 12] | bytes[i + 13] << 8 | bytes[i + 14] << 16 | bytes[i + 15] << 24;
-          k1 = _x86Multiply(k1, c1);
-          k1 = _x86Rotl(k1, 15);
-          k1 = _x86Multiply(k1, c2);
-          h1 ^= k1;
-          h1 = _x86Rotl(h1, 19);
-          h1 += h2;
-          h1 = _x86Multiply(h1, 5) + 1444728091;
-          k2 = _x86Multiply(k2, c2);
-          k2 = _x86Rotl(k2, 16);
-          k2 = _x86Multiply(k2, c3);
-          h2 ^= k2;
-          h2 = _x86Rotl(h2, 17);
-          h2 += h3;
-          h2 = _x86Multiply(h2, 5) + 197830471;
-          k3 = _x86Multiply(k3, c3);
-          k3 = _x86Rotl(k3, 17);
-          k3 = _x86Multiply(k3, c4);
-          h3 ^= k3;
-          h3 = _x86Rotl(h3, 15);
-          h3 += h4;
-          h3 = _x86Multiply(h3, 5) + 2530024501;
-          k4 = _x86Multiply(k4, c4);
-          k4 = _x86Rotl(k4, 18);
-          k4 = _x86Multiply(k4, c1);
-          h4 ^= k4;
-          h4 = _x86Rotl(h4, 13);
-          h4 += h1;
-          h4 = _x86Multiply(h4, 5) + 850148119;
-        }
-        k1 = 0;
-        k2 = 0;
-        k3 = 0;
-        k4 = 0;
-        switch (remainder) {
-          case 15:
-            k4 ^= bytes[i + 14] << 16;
-          case 14:
-            k4 ^= bytes[i + 13] << 8;
-          case 13:
-            k4 ^= bytes[i + 12];
-            k4 = _x86Multiply(k4, c4);
-            k4 = _x86Rotl(k4, 18);
-            k4 = _x86Multiply(k4, c1);
-            h4 ^= k4;
-          case 12:
-            k3 ^= bytes[i + 11] << 24;
-          case 11:
-            k3 ^= bytes[i + 10] << 16;
-          case 10:
-            k3 ^= bytes[i + 9] << 8;
-          case 9:
-            k3 ^= bytes[i + 8];
-            k3 = _x86Multiply(k3, c3);
-            k3 = _x86Rotl(k3, 17);
-            k3 = _x86Multiply(k3, c4);
-            h3 ^= k3;
-          case 8:
-            k2 ^= bytes[i + 7] << 24;
-          case 7:
-            k2 ^= bytes[i + 6] << 16;
-          case 6:
-            k2 ^= bytes[i + 5] << 8;
-          case 5:
-            k2 ^= bytes[i + 4];
-            k2 = _x86Multiply(k2, c2);
-            k2 = _x86Rotl(k2, 16);
-            k2 = _x86Multiply(k2, c3);
-            h2 ^= k2;
-          case 4:
-            k1 ^= bytes[i + 3] << 24;
-          case 3:
-            k1 ^= bytes[i + 2] << 16;
-          case 2:
-            k1 ^= bytes[i + 1] << 8;
-          case 1:
-            k1 ^= bytes[i];
-            k1 = _x86Multiply(k1, c1);
-            k1 = _x86Rotl(k1, 15);
-            k1 = _x86Multiply(k1, c2);
-            h1 ^= k1;
-        }
-        h1 ^= bytes.length;
-        h2 ^= bytes.length;
-        h3 ^= bytes.length;
-        h4 ^= bytes.length;
-        h1 += h2;
-        h1 += h3;
-        h1 += h4;
-        h2 += h1;
-        h3 += h1;
-        h4 += h1;
-        h1 = _x86Fmix(h1);
-        h2 = _x86Fmix(h2);
-        h3 = _x86Fmix(h3);
-        h4 = _x86Fmix(h4);
-        h1 += h2;
-        h1 += h3;
-        h1 += h4;
-        h2 += h1;
-        h3 += h1;
-        h4 += h1;
-        return ("00000000" + (h1 >>> 0).toString(16)).slice(-8) + ("00000000" + (h2 >>> 0).toString(16)).slice(-8) + ("00000000" + (h3 >>> 0).toString(16)).slice(-8) + ("00000000" + (h4 >>> 0).toString(16)).slice(-8);
-      };
-      library.x64.hash128 = function(bytes, seed) {
-        if (library.inputValidation && !_validBytes(bytes)) {
-          return undefined2;
-        }
-        seed = seed || 0;
-        var remainder = bytes.length % 16;
-        var blocks = bytes.length - remainder;
-        var h1 = [0, seed];
-        var h2 = [0, seed];
-        var k1 = [0, 0];
-        var k2 = [0, 0];
-        var c1 = [2277735313, 289559509];
-        var c2 = [1291169091, 658871167];
-        for (var i = 0; i < blocks; i = i + 16) {
-          k1 = [bytes[i + 4] | bytes[i + 5] << 8 | bytes[i + 6] << 16 | bytes[i + 7] << 24, bytes[i] | bytes[i + 1] << 8 | bytes[i + 2] << 16 | bytes[i + 3] << 24];
-          k2 = [bytes[i + 12] | bytes[i + 13] << 8 | bytes[i + 14] << 16 | bytes[i + 15] << 24, bytes[i + 8] | bytes[i + 9] << 8 | bytes[i + 10] << 16 | bytes[i + 11] << 24];
-          k1 = _x64Multiply(k1, c1);
-          k1 = _x64Rotl(k1, 31);
-          k1 = _x64Multiply(k1, c2);
-          h1 = _x64Xor(h1, k1);
-          h1 = _x64Rotl(h1, 27);
-          h1 = _x64Add(h1, h2);
-          h1 = _x64Add(_x64Multiply(h1, [0, 5]), [0, 1390208809]);
-          k2 = _x64Multiply(k2, c2);
-          k2 = _x64Rotl(k2, 33);
-          k2 = _x64Multiply(k2, c1);
-          h2 = _x64Xor(h2, k2);
-          h2 = _x64Rotl(h2, 31);
-          h2 = _x64Add(h2, h1);
-          h2 = _x64Add(_x64Multiply(h2, [0, 5]), [0, 944331445]);
-        }
-        k1 = [0, 0];
-        k2 = [0, 0];
-        switch (remainder) {
-          case 15:
-            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 14]], 48));
-          case 14:
-            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 13]], 40));
-          case 13:
-            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 12]], 32));
-          case 12:
-            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 11]], 24));
-          case 11:
-            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 10]], 16));
-          case 10:
-            k2 = _x64Xor(k2, _x64LeftShift([0, bytes[i + 9]], 8));
-          case 9:
-            k2 = _x64Xor(k2, [0, bytes[i + 8]]);
-            k2 = _x64Multiply(k2, c2);
-            k2 = _x64Rotl(k2, 33);
-            k2 = _x64Multiply(k2, c1);
-            h2 = _x64Xor(h2, k2);
-          case 8:
-            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 7]], 56));
-          case 7:
-            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 6]], 48));
-          case 6:
-            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 5]], 40));
-          case 5:
-            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 4]], 32));
-          case 4:
-            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 3]], 24));
-          case 3:
-            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 2]], 16));
-          case 2:
-            k1 = _x64Xor(k1, _x64LeftShift([0, bytes[i + 1]], 8));
-          case 1:
-            k1 = _x64Xor(k1, [0, bytes[i]]);
-            k1 = _x64Multiply(k1, c1);
-            k1 = _x64Rotl(k1, 31);
-            k1 = _x64Multiply(k1, c2);
-            h1 = _x64Xor(h1, k1);
-        }
-        h1 = _x64Xor(h1, [0, bytes.length]);
-        h2 = _x64Xor(h2, [0, bytes.length]);
-        h1 = _x64Add(h1, h2);
-        h2 = _x64Add(h2, h1);
-        h1 = _x64Fmix(h1);
-        h2 = _x64Fmix(h2);
-        h1 = _x64Add(h1, h2);
-        h2 = _x64Add(h2, h1);
-        return ("00000000" + (h1[0] >>> 0).toString(16)).slice(-8) + ("00000000" + (h1[1] >>> 0).toString(16)).slice(-8) + ("00000000" + (h2[0] >>> 0).toString(16)).slice(-8) + ("00000000" + (h2[1] >>> 0).toString(16)).slice(-8);
-      };
-      if (typeof exports !== "undefined") {
-        if (typeof module !== "undefined" && module.exports) {
-          exports = module.exports = library;
-        }
-        exports.murmurHash3 = library;
-      } else if (typeof define === "function" && define.amd) {
-        define([], function() {
-          return library;
-        });
-      } else {
-        library._murmurHash3 = root.murmurHash3;
-        library.noConflict = function() {
-          root.murmurHash3 = library._murmurHash3;
-          library._murmurHash3 = undefined2;
-          library.noConflict = undefined2;
-          return library;
-        };
-        root.murmurHash3 = library;
-      }
-    })(exports);
-  }
-});
-
-// node_modules/murmurhash3js-revisited/index.js
-var require_murmurhash3js_revisited = __commonJS({
-  "node_modules/murmurhash3js-revisited/index.js"(exports, module) {
-    module.exports = require_murmurHash3js();
-  }
-});
-
-// node_modules/sparse-array/index.js
-var require_sparse_array = __commonJS({
-  "node_modules/sparse-array/index.js"(exports, module) {
-    "use strict";
-    var BITS_PER_BYTE = 7;
-    module.exports = class SparseArray {
-      constructor() {
-        this._bitArrays = [];
-        this._data = [];
-        this._length = 0;
-        this._changedLength = false;
-        this._changedData = false;
-      }
-      set(index, value) {
-        let pos = this._internalPositionFor(index, false);
-        if (value === void 0) {
-          if (pos !== -1) {
-            this._unsetInternalPos(pos);
-            this._unsetBit(index);
-            this._changedLength = true;
-            this._changedData = true;
-          }
-        } else {
-          let needsSort = false;
-          if (pos === -1) {
-            pos = this._data.length;
-            this._setBit(index);
-            this._changedData = true;
-          } else {
-            needsSort = true;
-          }
-          this._setInternalPos(pos, index, value, needsSort);
-          this._changedLength = true;
-        }
-      }
-      unset(index) {
-        this.set(index, void 0);
-      }
-      get(index) {
-        this._sortData();
-        const pos = this._internalPositionFor(index, true);
-        if (pos === -1) {
-          return void 0;
-        }
-        return this._data[pos][1];
-      }
-      push(value) {
-        this.set(this.length, value);
-        return this.length;
-      }
-      get length() {
-        this._sortData();
-        if (this._changedLength) {
-          const last2 = this._data[this._data.length - 1];
-          this._length = last2 ? last2[0] + 1 : 0;
-          this._changedLength = false;
-        }
-        return this._length;
-      }
-      forEach(iterator) {
-        let i = 0;
-        while (i < this.length) {
-          iterator(this.get(i), i, this);
-          i++;
-        }
-      }
-      map(iterator) {
-        let i = 0;
-        let mapped = new Array(this.length);
-        while (i < this.length) {
-          mapped[i] = iterator(this.get(i), i, this);
-          i++;
-        }
-        return mapped;
-      }
-      reduce(reducer, initialValue) {
-        let i = 0;
-        let acc = initialValue;
-        while (i < this.length) {
-          const value = this.get(i);
-          acc = reducer(acc, value, i);
-          i++;
-        }
-        return acc;
-      }
-      find(finder) {
-        let i = 0, found, last2;
-        while (i < this.length && !found) {
-          last2 = this.get(i);
-          found = finder(last2);
-          i++;
-        }
-        return found ? last2 : void 0;
-      }
-      _internalPositionFor(index, noCreate) {
-        const bytePos = this._bytePosFor(index, noCreate);
-        if (bytePos >= this._bitArrays.length) {
-          return -1;
-        }
-        const byte = this._bitArrays[bytePos];
-        const bitPos = index - bytePos * BITS_PER_BYTE;
-        const exists2 = (byte & 1 << bitPos) > 0;
-        if (!exists2) {
-          return -1;
-        }
-        const previousPopCount = this._bitArrays.slice(0, bytePos).reduce(popCountReduce, 0);
-        const mask = ~(4294967295 << bitPos + 1);
-        const bytePopCount = popCount(byte & mask);
-        const arrayPos = previousPopCount + bytePopCount - 1;
-        return arrayPos;
-      }
-      _bytePosFor(index, noCreate) {
-        const bytePos = Math.floor(index / BITS_PER_BYTE);
-        const targetLength = bytePos + 1;
-        while (!noCreate && this._bitArrays.length < targetLength) {
-          this._bitArrays.push(0);
-        }
-        return bytePos;
-      }
-      _setBit(index) {
-        const bytePos = this._bytePosFor(index, false);
-        this._bitArrays[bytePos] |= 1 << index - bytePos * BITS_PER_BYTE;
-      }
-      _unsetBit(index) {
-        const bytePos = this._bytePosFor(index, false);
-        this._bitArrays[bytePos] &= ~(1 << index - bytePos * BITS_PER_BYTE);
-      }
-      _setInternalPos(pos, index, value, needsSort) {
-        const data = this._data;
-        const elem = [index, value];
-        if (needsSort) {
-          this._sortData();
-          data[pos] = elem;
-        } else {
-          if (data.length) {
-            if (data[data.length - 1][0] >= index) {
-              data.push(elem);
-            } else if (data[0][0] <= index) {
-              data.unshift(elem);
-            } else {
-              const randomIndex = Math.round(data.length / 2);
-              this._data = data.slice(0, randomIndex).concat(elem).concat(data.slice(randomIndex));
-            }
-          } else {
-            this._data.push(elem);
-          }
-          this._changedData = true;
-          this._changedLength = true;
-        }
-      }
-      _unsetInternalPos(pos) {
-        this._data.splice(pos, 1);
-      }
-      _sortData() {
-        if (this._changedData) {
-          this._data.sort(sortInternal);
-        }
-        this._changedData = false;
-      }
-      bitField() {
-        const bytes = [];
-        let pendingBitsForResultingByte = 8;
-        let pendingBitsForNewByte = 0;
-        let resultingByte = 0;
-        let newByte;
-        const pending = this._bitArrays.slice();
-        while (pending.length || pendingBitsForNewByte) {
-          if (pendingBitsForNewByte === 0) {
-            newByte = pending.shift();
-            pendingBitsForNewByte = 7;
-          }
-          const usingBits = Math.min(pendingBitsForNewByte, pendingBitsForResultingByte);
-          const mask = ~(255 << usingBits);
-          const masked = newByte & mask;
-          resultingByte |= masked << 8 - pendingBitsForResultingByte;
-          newByte = newByte >>> usingBits;
-          pendingBitsForNewByte -= usingBits;
-          pendingBitsForResultingByte -= usingBits;
-          if (!pendingBitsForResultingByte || !pendingBitsForNewByte && !pending.length) {
-            bytes.push(resultingByte);
-            resultingByte = 0;
-            pendingBitsForResultingByte = 8;
-          }
-        }
-        for (var i = bytes.length - 1; i > 0; i--) {
-          const value = bytes[i];
-          if (value === 0) {
-            bytes.pop();
-          } else {
-            break;
-          }
-        }
-        return bytes;
-      }
-      compactArray() {
-        this._sortData();
-        return this._data.map(valueOnly);
-      }
-    };
-    function popCountReduce(count, byte) {
-      return count + popCount(byte);
-    }
-    function popCount(_v) {
-      let v = _v;
-      v = v - (v >> 1 & 1431655765);
-      v = (v & 858993459) + (v >> 2 & 858993459);
-      return (v + (v >> 4) & 252645135) * 16843009 >> 24;
-    }
-    function sortInternal(a, b) {
-      return a[0] - b[0];
-    }
-    function valueOnly(elem) {
-      return elem[1];
-    }
-  }
-});
-
-// node_modules/rabin-wasm/src/rabin.js
-var require_rabin = __commonJS({
-  "node_modules/rabin-wasm/src/rabin.js"(exports, module) {
-    var Rabin = class {
-      /**
-       * Creates an instance of Rabin.
-       * @param { import("./../dist/rabin-wasm") } asModule
-       * @param {number} [bits=12]
-       * @param {number} [min=8 * 1024]
-       * @param {number} [max=32 * 1024]
-       * @param {number} polynomial
-       * @memberof Rabin
-       */
-      constructor(asModule, bits = 12, min = 8 * 1024, max = 32 * 1024, windowSize = 64, polynomial) {
-        this.bits = bits;
-        this.min = min;
-        this.max = max;
-        this.asModule = asModule;
-        this.rabin = new asModule.Rabin(bits, min, max, windowSize, polynomial);
-        this.polynomial = polynomial;
-      }
-      /**
-       * Fingerprints the buffer
-       *
-       * @param {Uint8Array} buf
-       * @returns {Array}
-       * @memberof Rabin
-       */
-      fingerprint(buf2) {
-        const {
-          __retain,
-          __release,
-          __allocArray,
-          __getInt32Array,
-          Int32Array_ID,
-          Uint8Array_ID
-        } = this.asModule;
-        const lengths = new Int32Array(Math.ceil(buf2.length / this.min));
-        const lengthsPtr = __retain(__allocArray(Int32Array_ID, lengths));
-        const pointer = __retain(__allocArray(Uint8Array_ID, buf2));
-        const out = this.rabin.fingerprint(pointer, lengthsPtr);
-        const processed = __getInt32Array(out);
-        __release(pointer);
-        __release(lengthsPtr);
-        const end = processed.indexOf(0);
-        return end >= 0 ? processed.subarray(0, end) : processed;
-      }
-    };
-    module.exports = Rabin;
-  }
-});
-
-// node_modules/@assemblyscript/loader/index.js
-var require_loader = __commonJS({
-  "node_modules/@assemblyscript/loader/index.js"(exports) {
-    "use strict";
-    var ID_OFFSET = -8;
-    var SIZE_OFFSET = -4;
-    var ARRAYBUFFER_ID = 0;
-    var STRING_ID = 1;
-    var ARRAYBUFFERVIEW = 1 << 0;
-    var ARRAY = 1 << 1;
-    var SET = 1 << 2;
-    var MAP = 1 << 3;
-    var VAL_ALIGN_OFFSET = 5;
-    var VAL_ALIGN = 1 << VAL_ALIGN_OFFSET;
-    var VAL_SIGNED = 1 << 10;
-    var VAL_FLOAT = 1 << 11;
-    var VAL_NULLABLE = 1 << 12;
-    var VAL_MANAGED = 1 << 13;
-    var KEY_ALIGN_OFFSET = 14;
-    var KEY_ALIGN = 1 << KEY_ALIGN_OFFSET;
-    var KEY_SIGNED = 1 << 19;
-    var KEY_FLOAT = 1 << 20;
-    var KEY_NULLABLE = 1 << 21;
-    var KEY_MANAGED = 1 << 22;
-    var ARRAYBUFFERVIEW_BUFFER_OFFSET = 0;
-    var ARRAYBUFFERVIEW_DATASTART_OFFSET = 4;
-    var ARRAYBUFFERVIEW_DATALENGTH_OFFSET = 8;
-    var ARRAYBUFFERVIEW_SIZE = 12;
-    var ARRAY_LENGTH_OFFSET = 12;
-    var ARRAY_SIZE = 16;
-    var BIGINT = typeof BigUint64Array !== "undefined";
-    var THIS = Symbol();
-    var CHUNKSIZE = 1024;
-    function getStringImpl(buffer2, ptr) {
-      const U32 = new Uint32Array(buffer2);
-      const U16 = new Uint16Array(buffer2);
-      var length4 = U32[ptr + SIZE_OFFSET >>> 2] >>> 1;
-      var offset = ptr >>> 1;
-      if (length4 <= CHUNKSIZE)
-        return String.fromCharCode.apply(String, U16.subarray(offset, offset + length4));
-      const parts = [];
-      do {
-        const last2 = U16[offset + CHUNKSIZE - 1];
-        const size = last2 >= 55296 && last2 < 56320 ? CHUNKSIZE - 1 : CHUNKSIZE;
-        parts.push(String.fromCharCode.apply(String, U16.subarray(offset, offset += size)));
-        length4 -= size;
-      } while (length4 > CHUNKSIZE);
-      return parts.join("") + String.fromCharCode.apply(String, U16.subarray(offset, offset + length4));
-    }
-    function preInstantiate(imports) {
-      const baseModule = {};
-      function getString(memory, ptr) {
-        if (!memory)
-          return "";
-        return getStringImpl(memory.buffer, ptr);
-      }
-      const env = imports.env = imports.env || {};
-      env.abort = env.abort || function abort(mesg, file, line, colm) {
-        const memory = baseModule.memory || env.memory;
-        throw Error("abort: " + getString(memory, mesg) + " at " + getString(memory, file) + ":" + line + ":" + colm);
-      };
-      env.trace = env.trace || function trace(mesg, n) {
-        const memory = baseModule.memory || env.memory;
-        console.log("trace: " + getString(memory, mesg) + (n ? " " : "") + Array.prototype.slice.call(arguments, 2, 2 + n).join(", "));
-      };
-      imports.Math = imports.Math || Math;
-      imports.Date = imports.Date || Date;
-      return baseModule;
-    }
-    function postInstantiate(baseModule, instance) {
-      const rawExports = instance.exports;
-      const memory = rawExports.memory;
-      const table = rawExports.table;
-      const alloc4 = rawExports["__alloc"];
-      const retain = rawExports["__retain"];
-      const rttiBase = rawExports["__rtti_base"] || ~0;
-      function getInfo(id) {
-        const U32 = new Uint32Array(memory.buffer);
-        const count = U32[rttiBase >>> 2];
-        if ((id >>>= 0) >= count)
-          throw Error("invalid id: " + id);
-        return U32[(rttiBase + 4 >>> 2) + id * 2];
-      }
-      function getBase(id) {
-        const U32 = new Uint32Array(memory.buffer);
-        const count = U32[rttiBase >>> 2];
-        if ((id >>>= 0) >= count)
-          throw Error("invalid id: " + id);
-        return U32[(rttiBase + 4 >>> 2) + id * 2 + 1];
-      }
-      function getValueAlign(info) {
-        return 31 - Math.clz32(info >>> VAL_ALIGN_OFFSET & 31);
-      }
-      function getKeyAlign(info) {
-        return 31 - Math.clz32(info >>> KEY_ALIGN_OFFSET & 31);
-      }
-      function __allocString(str) {
-        const length4 = str.length;
-        const ptr = alloc4(length4 << 1, STRING_ID);
-        const U16 = new Uint16Array(memory.buffer);
-        for (var i = 0, p = ptr >>> 1; i < length4; ++i)
-          U16[p + i] = str.charCodeAt(i);
-        return ptr;
-      }
-      baseModule.__allocString = __allocString;
-      function __getString(ptr) {
-        const buffer2 = memory.buffer;
-        const id = new Uint32Array(buffer2)[ptr + ID_OFFSET >>> 2];
-        if (id !== STRING_ID)
-          throw Error("not a string: " + ptr);
-        return getStringImpl(buffer2, ptr);
-      }
-      baseModule.__getString = __getString;
-      function getView(alignLog2, signed, float) {
-        const buffer2 = memory.buffer;
-        if (float) {
-          switch (alignLog2) {
-            case 2:
-              return new Float32Array(buffer2);
-            case 3:
-              return new Float64Array(buffer2);
-          }
-        } else {
-          switch (alignLog2) {
-            case 0:
-              return new (signed ? Int8Array : Uint8Array)(buffer2);
-            case 1:
-              return new (signed ? Int16Array : Uint16Array)(buffer2);
-            case 2:
-              return new (signed ? Int32Array : Uint32Array)(buffer2);
-            case 3:
-              return new (signed ? BigInt64Array : BigUint64Array)(buffer2);
-          }
-        }
-        throw Error("unsupported align: " + alignLog2);
-      }
-      function __allocArray(id, values) {
-        const info = getInfo(id);
-        if (!(info & (ARRAYBUFFERVIEW | ARRAY)))
-          throw Error("not an array: " + id + " @ " + info);
-        const align = getValueAlign(info);
-        const length4 = values.length;
-        const buf2 = alloc4(length4 << align, ARRAYBUFFER_ID);
-        const arr = alloc4(info & ARRAY ? ARRAY_SIZE : ARRAYBUFFERVIEW_SIZE, id);
-        const U32 = new Uint32Array(memory.buffer);
-        U32[arr + ARRAYBUFFERVIEW_BUFFER_OFFSET >>> 2] = retain(buf2);
-        U32[arr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2] = buf2;
-        U32[arr + ARRAYBUFFERVIEW_DATALENGTH_OFFSET >>> 2] = length4 << align;
-        if (info & ARRAY)
-          U32[arr + ARRAY_LENGTH_OFFSET >>> 2] = length4;
-        const view = getView(align, info & VAL_SIGNED, info & VAL_FLOAT);
-        if (info & VAL_MANAGED) {
-          for (let i = 0; i < length4; ++i)
-            view[(buf2 >>> align) + i] = retain(values[i]);
-        } else {
-          view.set(values, buf2 >>> align);
-        }
-        return arr;
-      }
-      baseModule.__allocArray = __allocArray;
-      function __getArrayView(arr) {
-        const U32 = new Uint32Array(memory.buffer);
-        const id = U32[arr + ID_OFFSET >>> 2];
-        const info = getInfo(id);
-        if (!(info & ARRAYBUFFERVIEW))
-          throw Error("not an array: " + id);
-        const align = getValueAlign(info);
-        var buf2 = U32[arr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2];
-        const length4 = info & ARRAY ? U32[arr + ARRAY_LENGTH_OFFSET >>> 2] : U32[buf2 + SIZE_OFFSET >>> 2] >>> align;
-        return getView(align, info & VAL_SIGNED, info & VAL_FLOAT).subarray(buf2 >>>= align, buf2 + length4);
-      }
-      baseModule.__getArrayView = __getArrayView;
-      function __getArray(arr) {
-        const input = __getArrayView(arr);
-        const len = input.length;
-        const out = new Array(len);
-        for (let i = 0; i < len; i++)
-          out[i] = input[i];
-        return out;
-      }
-      baseModule.__getArray = __getArray;
-      function __getArrayBuffer(ptr) {
-        const buffer2 = memory.buffer;
-        const length4 = new Uint32Array(buffer2)[ptr + SIZE_OFFSET >>> 2];
-        return buffer2.slice(ptr, ptr + length4);
-      }
-      baseModule.__getArrayBuffer = __getArrayBuffer;
-      function getTypedArray(Type2, alignLog2, ptr) {
-        return new Type2(getTypedArrayView(Type2, alignLog2, ptr));
-      }
-      function getTypedArrayView(Type2, alignLog2, ptr) {
-        const buffer2 = memory.buffer;
-        const U32 = new Uint32Array(buffer2);
-        const bufPtr = U32[ptr + ARRAYBUFFERVIEW_DATASTART_OFFSET >>> 2];
-        return new Type2(buffer2, bufPtr, U32[bufPtr + SIZE_OFFSET >>> 2] >>> alignLog2);
-      }
-      baseModule.__getInt8Array = getTypedArray.bind(null, Int8Array, 0);
-      baseModule.__getInt8ArrayView = getTypedArrayView.bind(null, Int8Array, 0);
-      baseModule.__getUint8Array = getTypedArray.bind(null, Uint8Array, 0);
-      baseModule.__getUint8ArrayView = getTypedArrayView.bind(null, Uint8Array, 0);
-      baseModule.__getUint8ClampedArray = getTypedArray.bind(null, Uint8ClampedArray, 0);
-      baseModule.__getUint8ClampedArrayView = getTypedArrayView.bind(null, Uint8ClampedArray, 0);
-      baseModule.__getInt16Array = getTypedArray.bind(null, Int16Array, 1);
-      baseModule.__getInt16ArrayView = getTypedArrayView.bind(null, Int16Array, 1);
-      baseModule.__getUint16Array = getTypedArray.bind(null, Uint16Array, 1);
-      baseModule.__getUint16ArrayView = getTypedArrayView.bind(null, Uint16Array, 1);
-      baseModule.__getInt32Array = getTypedArray.bind(null, Int32Array, 2);
-      baseModule.__getInt32ArrayView = getTypedArrayView.bind(null, Int32Array, 2);
-      baseModule.__getUint32Array = getTypedArray.bind(null, Uint32Array, 2);
-      baseModule.__getUint32ArrayView = getTypedArrayView.bind(null, Uint32Array, 2);
-      if (BIGINT) {
-        baseModule.__getInt64Array = getTypedArray.bind(null, BigInt64Array, 3);
-        baseModule.__getInt64ArrayView = getTypedArrayView.bind(null, BigInt64Array, 3);
-        baseModule.__getUint64Array = getTypedArray.bind(null, BigUint64Array, 3);
-        baseModule.__getUint64ArrayView = getTypedArrayView.bind(null, BigUint64Array, 3);
-      }
-      baseModule.__getFloat32Array = getTypedArray.bind(null, Float32Array, 2);
-      baseModule.__getFloat32ArrayView = getTypedArrayView.bind(null, Float32Array, 2);
-      baseModule.__getFloat64Array = getTypedArray.bind(null, Float64Array, 3);
-      baseModule.__getFloat64ArrayView = getTypedArrayView.bind(null, Float64Array, 3);
-      function __instanceof(ptr, baseId) {
-        const U32 = new Uint32Array(memory.buffer);
-        var id = U32[ptr + ID_OFFSET >>> 2];
-        if (id <= U32[rttiBase >>> 2]) {
-          do
-            if (id == baseId)
-              return true;
-          while (id = getBase(id));
-        }
-        return false;
-      }
-      baseModule.__instanceof = __instanceof;
-      baseModule.memory = baseModule.memory || memory;
-      baseModule.table = baseModule.table || table;
-      return demangle(rawExports, baseModule);
-    }
-    function isResponse(o) {
-      return typeof Response !== "undefined" && o instanceof Response;
-    }
-    async function instantiate(source, imports) {
-      if (isResponse(source = await source))
-        return instantiateStreaming(source, imports);
-      return postInstantiate(
-        preInstantiate(imports || (imports = {})),
-        await WebAssembly.instantiate(
-          source instanceof WebAssembly.Module ? source : await WebAssembly.compile(source),
-          imports
-        )
-      );
-    }
-    exports.instantiate = instantiate;
-    function instantiateSync(source, imports) {
-      return postInstantiate(
-        preInstantiate(imports || (imports = {})),
-        new WebAssembly.Instance(
-          source instanceof WebAssembly.Module ? source : new WebAssembly.Module(source),
-          imports
-        )
-      );
-    }
-    exports.instantiateSync = instantiateSync;
-    async function instantiateStreaming(source, imports) {
-      if (!WebAssembly.instantiateStreaming) {
-        return instantiate(
-          isResponse(source = await source) ? source.arrayBuffer() : source,
-          imports
-        );
-      }
-      return postInstantiate(
-        preInstantiate(imports || (imports = {})),
-        (await WebAssembly.instantiateStreaming(source, imports)).instance
-      );
-    }
-    exports.instantiateStreaming = instantiateStreaming;
-    function demangle(exports2, baseModule) {
-      var module2 = baseModule ? Object.create(baseModule) : {};
-      var setArgumentsLength = exports2["__argumentsLength"] ? function(length4) {
-        exports2["__argumentsLength"].value = length4;
-      } : exports2["__setArgumentsLength"] || exports2["__setargc"] || function() {
-      };
-      for (let internalName in exports2) {
-        if (!Object.prototype.hasOwnProperty.call(exports2, internalName))
-          continue;
-        const elem = exports2[internalName];
-        let parts = internalName.split(".");
-        let curr = module2;
-        while (parts.length > 1) {
-          let part = parts.shift();
-          if (!Object.prototype.hasOwnProperty.call(curr, part))
-            curr[part] = {};
-          curr = curr[part];
-        }
-        let name4 = parts[0];
-        let hash = name4.indexOf("#");
-        if (hash >= 0) {
-          let className = name4.substring(0, hash);
-          let classElem = curr[className];
-          if (typeof classElem === "undefined" || !classElem.prototype) {
-            let ctor = function(...args) {
-              return ctor.wrap(ctor.prototype.constructor(0, ...args));
-            };
-            ctor.prototype = {
-              valueOf: function valueOf() {
-                return this[THIS];
-              }
-            };
-            ctor.wrap = function(thisValue) {
-              return Object.create(ctor.prototype, { [THIS]: { value: thisValue, writable: false } });
-            };
-            if (classElem)
-              Object.getOwnPropertyNames(classElem).forEach(
-                (name5) => Object.defineProperty(ctor, name5, Object.getOwnPropertyDescriptor(classElem, name5))
-              );
-            curr[className] = ctor;
-          }
-          name4 = name4.substring(hash + 1);
-          curr = curr[className].prototype;
-          if (/^(get|set):/.test(name4)) {
-            if (!Object.prototype.hasOwnProperty.call(curr, name4 = name4.substring(4))) {
-              let getter = exports2[internalName.replace("set:", "get:")];
-              let setter = exports2[internalName.replace("get:", "set:")];
-              Object.defineProperty(curr, name4, {
-                get: function() {
-                  return getter(this[THIS]);
-                },
-                set: function(value) {
-                  setter(this[THIS], value);
-                },
-                enumerable: true
-              });
-            }
-          } else {
-            if (name4 === "constructor") {
-              (curr[name4] = (...args) => {
-                setArgumentsLength(args.length);
-                return elem(...args);
-              }).original = elem;
-            } else {
-              (curr[name4] = function(...args) {
-                setArgumentsLength(args.length);
-                return elem(this[THIS], ...args);
-              }).original = elem;
-            }
-          }
-        } else {
-          if (/^(get|set):/.test(name4)) {
-            if (!Object.prototype.hasOwnProperty.call(curr, name4 = name4.substring(4))) {
-              Object.defineProperty(curr, name4, {
-                get: exports2[internalName.replace("set:", "get:")],
-                set: exports2[internalName.replace("get:", "set:")],
-                enumerable: true
-              });
-            }
-          } else if (typeof elem === "function" && elem !== setArgumentsLength) {
-            (curr[name4] = (...args) => {
-              setArgumentsLength(args.length);
-              return elem(...args);
-            }).original = elem;
-          } else {
-            curr[name4] = elem;
-          }
-        }
-      }
-      return module2;
-    }
-    exports.demangle = demangle;
-  }
-});
-
-// node_modules/rabin-wasm/dist/rabin-wasm.node.js
-var require_rabin_wasm_node = __commonJS({
-  "node_modules/rabin-wasm/dist/rabin-wasm.node.js"(exports, module) {
-    var { instantiateSync } = require_loader();
-    var fs6 = __require("fs");
-    loadWebAssembly.supported = typeof WebAssembly !== "undefined";
-    async function loadWebAssembly(imp = {}) {
-      if (!loadWebAssembly.supported)
-        return null;
-      return instantiateSync(fs6.readFileSync(__dirname + "/../dist/rabin.wasm"), imp);
-    }
-    module.exports = loadWebAssembly;
-  }
-});
-
-// node_modules/rabin-wasm/src/index.js
-var require_src = __commonJS({
-  "node_modules/rabin-wasm/src/index.js"(exports, module) {
-    var Rabin = require_rabin();
-    var getRabin = require_rabin_wasm_node();
-    var create5 = async (avg, min, max, windowSize, polynomial) => {
-      const compiled = await getRabin();
-      return new Rabin(compiled, avg, min, max, windowSize, polynomial);
-    };
-    module.exports = {
-      Rabin,
-      create: create5
-    };
-  }
-});
-
-// node_modules/is-plain-obj/index.js
-var require_is_plain_obj = __commonJS({
-  "node_modules/is-plain-obj/index.js"(exports, module) {
-    "use strict";
-    module.exports = (value) => {
-      if (Object.prototype.toString.call(value) !== "[object Object]") {
-        return false;
-      }
-      const prototype = Object.getPrototypeOf(value);
-      return prototype === null || prototype === Object.prototype;
-    };
-  }
-});
-
-// node_modules/merge-options/index.js
-var require_merge_options = __commonJS({
-  "node_modules/merge-options/index.js"(exports, module) {
-    "use strict";
-    var isOptionObject = require_is_plain_obj();
-    var { hasOwnProperty } = Object.prototype;
-    var { propertyIsEnumerable } = Object;
-    var defineProperty = (object, name4, value) => Object.defineProperty(object, name4, {
-      value,
-      writable: true,
-      enumerable: true,
-      configurable: true
-    });
-    var globalThis2 = exports;
-    var defaultMergeOptions = {
-      concatArrays: false,
-      ignoreUndefined: false
-    };
-    var getEnumerableOwnPropertyKeys = (value) => {
-      const keys = [];
-      for (const key in value) {
-        if (hasOwnProperty.call(value, key)) {
-          keys.push(key);
-        }
-      }
-      if (Object.getOwnPropertySymbols) {
-        const symbols = Object.getOwnPropertySymbols(value);
-        for (const symbol2 of symbols) {
-          if (propertyIsEnumerable.call(value, symbol2)) {
-            keys.push(symbol2);
-          }
-        }
-      }
-      return keys;
-    };
-    function clone(value) {
-      if (Array.isArray(value)) {
-        return cloneArray(value);
-      }
-      if (isOptionObject(value)) {
-        return cloneOptionObject(value);
-      }
-      return value;
-    }
-    function cloneArray(array) {
-      const result = array.slice(0, 0);
-      getEnumerableOwnPropertyKeys(array).forEach((key) => {
-        defineProperty(result, key, clone(array[key]));
-      });
-      return result;
-    }
-    function cloneOptionObject(object) {
-      const result = Object.getPrototypeOf(object) === null ? /* @__PURE__ */ Object.create(null) : {};
-      getEnumerableOwnPropertyKeys(object).forEach((key) => {
-        defineProperty(result, key, clone(object[key]));
-      });
-      return result;
-    }
-    var mergeKeys = (merged, source, keys, config) => {
-      keys.forEach((key) => {
-        if (typeof source[key] === "undefined" && config.ignoreUndefined) {
-          return;
-        }
-        if (key in merged && merged[key] !== Object.getPrototypeOf(merged)) {
-          defineProperty(merged, key, merge2(merged[key], source[key], config));
-        } else {
-          defineProperty(merged, key, clone(source[key]));
-        }
-      });
-      return merged;
-    };
-    var concatArrays = (merged, source, config) => {
-      let result = merged.slice(0, 0);
-      let resultIndex = 0;
-      [merged, source].forEach((array) => {
-        const indices = [];
-        for (let k = 0; k < array.length; k++) {
-          if (!hasOwnProperty.call(array, k)) {
-            continue;
-          }
-          indices.push(String(k));
-          if (array === merged) {
-            defineProperty(result, resultIndex++, array[k]);
-          } else {
-            defineProperty(result, resultIndex++, clone(array[k]));
-          }
-        }
-        result = mergeKeys(result, array, getEnumerableOwnPropertyKeys(array).filter((key) => !indices.includes(key)), config);
-      });
-      return result;
-    };
-    function merge2(merged, source, config) {
-      if (config.concatArrays && Array.isArray(merged) && Array.isArray(source)) {
-        return concatArrays(merged, source, config);
-      }
-      if (!isOptionObject(source) || !isOptionObject(merged)) {
-        return clone(source);
-      }
-      return mergeKeys(merged, source, getEnumerableOwnPropertyKeys(source), config);
-    }
-    module.exports = function(...options) {
-      const config = merge2(clone(defaultMergeOptions), this !== globalThis2 && this || {}, defaultMergeOptions);
-      let merged = { _: {} };
-      for (const option of options) {
-        if (option === void 0) {
-          continue;
-        }
-        if (!isOptionObject(option)) {
-          throw new TypeError("`" + option + "` is not an Option Object");
-        }
-        merged = merge2(merged, { _: option }, config);
-      }
-      return merged._;
-    };
-  }
-});
-
-// node_modules/ms/index.js
-var require_ms = __commonJS({
-  "node_modules/ms/index.js"(exports, module) {
-    var s = 1e3;
-    var m = s * 60;
-    var h = m * 60;
-    var d = h * 24;
-    var w = d * 7;
-    var y = d * 365.25;
-    module.exports = function(val, options) {
-      options = options || {};
-      var type = typeof val;
-      if (type === "string" && val.length > 0) {
-        return parse(val);
-      } else if (type === "number" && isFinite(val)) {
-        return options.long ? fmtLong(val) : fmtShort(val);
-      }
-      throw new Error(
-        "val is not a non-empty string or a valid number. val=" + JSON.stringify(val)
-      );
-    };
-    function parse(str) {
-      str = String(str);
-      if (str.length > 100) {
-        return;
-      }
-      var match2 = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
-        str
-      );
-      if (!match2) {
-        return;
-      }
-      var n = parseFloat(match2[1]);
-      var type = (match2[2] || "ms").toLowerCase();
-      switch (type) {
-        case "years":
-        case "year":
-        case "yrs":
-        case "yr":
-        case "y":
-          return n * y;
-        case "weeks":
-        case "week":
-        case "w":
-          return n * w;
-        case "days":
-        case "day":
-        case "d":
-          return n * d;
-        case "hours":
-        case "hour":
-        case "hrs":
-        case "hr":
-        case "h":
-          return n * h;
-        case "minutes":
-        case "minute":
-        case "mins":
-        case "min":
-        case "m":
-          return n * m;
-        case "seconds":
-        case "second":
-        case "secs":
-        case "sec":
-        case "s":
-          return n * s;
-        case "milliseconds":
-        case "millisecond":
-        case "msecs":
-        case "msec":
-        case "ms":
-          return n;
-        default:
-          return void 0;
-      }
-    }
-    function fmtShort(ms) {
-      var msAbs = Math.abs(ms);
-      if (msAbs >= d) {
-        return Math.round(ms / d) + "d";
-      }
-      if (msAbs >= h) {
-        return Math.round(ms / h) + "h";
-      }
-      if (msAbs >= m) {
-        return Math.round(ms / m) + "m";
-      }
-      if (msAbs >= s) {
-        return Math.round(ms / s) + "s";
-      }
-      return ms + "ms";
-    }
-    function fmtLong(ms) {
-      var msAbs = Math.abs(ms);
-      if (msAbs >= d) {
-        return plural(ms, msAbs, d, "day");
-      }
-      if (msAbs >= h) {
-        return plural(ms, msAbs, h, "hour");
-      }
-      if (msAbs >= m) {
-        return plural(ms, msAbs, m, "minute");
-      }
-      if (msAbs >= s) {
-        return plural(ms, msAbs, s, "second");
-      }
-      return ms + " ms";
-    }
-    function plural(ms, msAbs, n, name4) {
-      var isPlural = msAbs >= n * 1.5;
-      return Math.round(ms / n) + " " + name4 + (isPlural ? "s" : "");
-    }
-  }
-});
-
-// node_modules/debug/src/common.js
-var require_common = __commonJS({
-  "node_modules/debug/src/common.js"(exports, module) {
-    function setup(env) {
-      createDebug.debug = createDebug;
-      createDebug.default = createDebug;
-      createDebug.coerce = coerce3;
-      createDebug.disable = disable;
-      createDebug.enable = enable;
-      createDebug.enabled = enabled;
-      createDebug.humanize = require_ms();
-      createDebug.destroy = destroy;
-      Object.keys(env).forEach((key) => {
-        createDebug[key] = env[key];
-      });
-      createDebug.names = [];
-      createDebug.skips = [];
-      createDebug.formatters = {};
-      function selectColor(namespace) {
-        let hash = 0;
-        for (let i = 0; i < namespace.length; i++) {
-          hash = (hash << 5) - hash + namespace.charCodeAt(i);
-          hash |= 0;
-        }
-        return createDebug.colors[Math.abs(hash) % createDebug.colors.length];
-      }
-      createDebug.selectColor = selectColor;
-      function createDebug(namespace) {
-        let prevTime;
-        let enableOverride = null;
-        let namespacesCache;
-        let enabledCache;
-        function debug3(...args) {
-          if (!debug3.enabled) {
-            return;
-          }
-          const self = debug3;
-          const curr = Number(/* @__PURE__ */ new Date());
-          const ms = curr - (prevTime || curr);
-          self.diff = ms;
-          self.prev = prevTime;
-          self.curr = curr;
-          prevTime = curr;
-          args[0] = createDebug.coerce(args[0]);
-          if (typeof args[0] !== "string") {
-            args.unshift("%O");
-          }
-          let index = 0;
-          args[0] = args[0].replace(/%([a-zA-Z%])/g, (match2, format3) => {
-            if (match2 === "%%") {
-              return "%";
-            }
-            index++;
-            const formatter = createDebug.formatters[format3];
-            if (typeof formatter === "function") {
-              const val = args[index];
-              match2 = formatter.call(self, val);
-              args.splice(index, 1);
-              index--;
-            }
-            return match2;
-          });
-          createDebug.formatArgs.call(self, args);
-          const logFn = self.log || createDebug.log;
-          logFn.apply(self, args);
-        }
-        debug3.namespace = namespace;
-        debug3.useColors = createDebug.useColors();
-        debug3.color = createDebug.selectColor(namespace);
-        debug3.extend = extend;
-        debug3.destroy = createDebug.destroy;
-        Object.defineProperty(debug3, "enabled", {
-          enumerable: true,
-          configurable: false,
-          get: () => {
-            if (enableOverride !== null) {
-              return enableOverride;
-            }
-            if (namespacesCache !== createDebug.namespaces) {
-              namespacesCache = createDebug.namespaces;
-              enabledCache = createDebug.enabled(namespace);
-            }
-            return enabledCache;
-          },
-          set: (v) => {
-            enableOverride = v;
-          }
-        });
-        if (typeof createDebug.init === "function") {
-          createDebug.init(debug3);
-        }
-        return debug3;
-      }
-      function extend(namespace, delimiter) {
-        const newDebug = createDebug(this.namespace + (typeof delimiter === "undefined" ? ":" : delimiter) + namespace);
-        newDebug.log = this.log;
-        return newDebug;
-      }
-      function enable(namespaces) {
-        createDebug.save(namespaces);
-        createDebug.namespaces = namespaces;
-        createDebug.names = [];
-        createDebug.skips = [];
-        let i;
-        const split = (typeof namespaces === "string" ? namespaces : "").split(/[\s,]+/);
-        const len = split.length;
-        for (i = 0; i < len; i++) {
-          if (!split[i]) {
-            continue;
-          }
-          namespaces = split[i].replace(/\*/g, ".*?");
-          if (namespaces[0] === "-") {
-            createDebug.skips.push(new RegExp("^" + namespaces.slice(1) + "$"));
-          } else {
-            createDebug.names.push(new RegExp("^" + namespaces + "$"));
-          }
-        }
-      }
-      function disable() {
-        const namespaces = [
-          ...createDebug.names.map(toNamespace),
-          ...createDebug.skips.map(toNamespace).map((namespace) => "-" + namespace)
-        ].join(",");
-        createDebug.enable("");
-        return namespaces;
-      }
-      function enabled(name4) {
-        if (name4[name4.length - 1] === "*") {
-          return true;
-        }
-        let i;
-        let len;
-        for (i = 0, len = createDebug.skips.length; i < len; i++) {
-          if (createDebug.skips[i].test(name4)) {
-            return false;
-          }
-        }
-        for (i = 0, len = createDebug.names.length; i < len; i++) {
-          if (createDebug.names[i].test(name4)) {
-            return true;
-          }
-        }
-        return false;
-      }
-      function toNamespace(regexp) {
-        return regexp.toString().substring(2, regexp.toString().length - 2).replace(/\.\*\?$/, "*");
-      }
-      function coerce3(val) {
-        if (val instanceof Error) {
-          return val.stack || val.message;
-        }
-        return val;
-      }
-      function destroy() {
-        console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.");
-      }
-      createDebug.enable(createDebug.load());
-      return createDebug;
-    }
-    module.exports = setup;
-  }
-});
-
-// node_modules/debug/src/browser.js
-var require_browser = __commonJS({
-  "node_modules/debug/src/browser.js"(exports, module) {
-    exports.formatArgs = formatArgs;
-    exports.save = save;
-    exports.load = load;
-    exports.useColors = useColors;
-    exports.storage = localstorage();
-    exports.destroy = /* @__PURE__ */ (() => {
-      let warned = false;
-      return () => {
-        if (!warned) {
-          warned = true;
-          console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.");
-        }
-      };
-    })();
-    exports.colors = [
-      "#0000CC",
-      "#0000FF",
-      "#0033CC",
-      "#0033FF",
-      "#0066CC",
-      "#0066FF",
-      "#0099CC",
-      "#0099FF",
-      "#00CC00",
-      "#00CC33",
-      "#00CC66",
-      "#00CC99",
-      "#00CCCC",
-      "#00CCFF",
-      "#3300CC",
-      "#3300FF",
-      "#3333CC",
-      "#3333FF",
-      "#3366CC",
-      "#3366FF",
-      "#3399CC",
-      "#3399FF",
-      "#33CC00",
-      "#33CC33",
-      "#33CC66",
-      "#33CC99",
-      "#33CCCC",
-      "#33CCFF",
-      "#6600CC",
-      "#6600FF",
-      "#6633CC",
-      "#6633FF",
-      "#66CC00",
-      "#66CC33",
-      "#9900CC",
-      "#9900FF",
-      "#9933CC",
-      "#9933FF",
-      "#99CC00",
-      "#99CC33",
-      "#CC0000",
-      "#CC0033",
-      "#CC0066",
-      "#CC0099",
-      "#CC00CC",
-      "#CC00FF",
-      "#CC3300",
-      "#CC3333",
-      "#CC3366",
-      "#CC3399",
-      "#CC33CC",
-      "#CC33FF",
-      "#CC6600",
-      "#CC6633",
-      "#CC9900",
-      "#CC9933",
-      "#CCCC00",
-      "#CCCC33",
-      "#FF0000",
-      "#FF0033",
-      "#FF0066",
-      "#FF0099",
-      "#FF00CC",
-      "#FF00FF",
-      "#FF3300",
-      "#FF3333",
-      "#FF3366",
-      "#FF3399",
-      "#FF33CC",
-      "#FF33FF",
-      "#FF6600",
-      "#FF6633",
-      "#FF9900",
-      "#FF9933",
-      "#FFCC00",
-      "#FFCC33"
-    ];
-    function useColors() {
-      if (typeof window !== "undefined" && window.process && (window.process.type === "renderer" || window.process.__nwjs)) {
-        return true;
-      }
-      if (typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) {
-        return false;
-      }
-      return typeof document !== "undefined" && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773
-      typeof window !== "undefined" && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31?
-      // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
-      typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker
-      typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/);
-    }
-    function formatArgs(args) {
-      args[0] = (this.useColors ? "%c" : "") + this.namespace + (this.useColors ? " %c" : " ") + args[0] + (this.useColors ? "%c " : " ") + "+" + module.exports.humanize(this.diff);
-      if (!this.useColors) {
-        return;
-      }
-      const c = "color: " + this.color;
-      args.splice(1, 0, c, "color: inherit");
-      let index = 0;
-      let lastC = 0;
-      args[0].replace(/%[a-zA-Z%]/g, (match2) => {
-        if (match2 === "%%") {
-          return;
-        }
-        index++;
-        if (match2 === "%c") {
-          lastC = index;
-        }
-      });
-      args.splice(lastC, 0, c);
-    }
-    exports.log = console.debug || console.log || (() => {
-    });
-    function save(namespaces) {
-      try {
-        if (namespaces) {
-          exports.storage.setItem("debug", namespaces);
-        } else {
-          exports.storage.removeItem("debug");
-        }
-      } catch (error) {
-      }
-    }
-    function load() {
-      let r;
-      try {
-        r = exports.storage.getItem("debug");
-      } catch (error) {
-      }
-      if (!r && typeof process !== "undefined" && "env" in process) {
-        r = process.env.DEBUG;
-      }
-      return r;
-    }
-    function localstorage() {
-      try {
-        return localStorage;
-      } catch (error) {
-      }
-    }
-    module.exports = require_common()(exports);
-    var { formatters } = module.exports;
-    formatters.j = function(v) {
-      try {
-        return JSON.stringify(v);
-      } catch (error) {
-        return "[UnexpectedJSONParseError]: " + error.message;
-      }
-    };
-  }
-});
-
-// node_modules/debug/src/node.js
-var require_node = __commonJS({
-  "node_modules/debug/src/node.js"(exports, module) {
-    var tty = __require("tty");
-    var util = __require("util");
-    exports.init = init;
-    exports.log = log12;
-    exports.formatArgs = formatArgs;
-    exports.save = save;
-    exports.load = load;
-    exports.useColors = useColors;
-    exports.destroy = util.deprecate(
-      () => {
-      },
-      "Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."
-    );
-    exports.colors = [6, 2, 3, 4, 5, 1];
-    try {
-      const supportsColor = __require("supports-color");
-      if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {
-        exports.colors = [
-          20,
-          21,
-          26,
-          27,
-          32,
-          33,
-          38,
-          39,
-          40,
-          41,
-          42,
-          43,
-          44,
-          45,
-          56,
-          57,
-          62,
-          63,
-          68,
-          69,
-          74,
-          75,
-          76,
-          77,
-          78,
-          79,
-          80,
-          81,
-          92,
-          93,
-          98,
-          99,
-          112,
-          113,
-          128,
-          129,
-          134,
-          135,
-          148,
-          149,
-          160,
-          161,
-          162,
-          163,
-          164,
-          165,
-          166,
-          167,
-          168,
-          169,
-          170,
-          171,
-          172,
-          173,
-          178,
-          179,
-          184,
-          185,
-          196,
-          197,
-          198,
-          199,
-          200,
-          201,
-          202,
-          203,
-          204,
-          205,
-          206,
-          207,
-          208,
-          209,
-          214,
-          215,
-          220,
-          221
-        ];
-      }
-    } catch (error) {
-    }
-    exports.inspectOpts = Object.keys(process.env).filter((key) => {
-      return /^debug_/i.test(key);
-    }).reduce((obj, key) => {
-      const prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, (_, k) => {
-        return k.toUpperCase();
-      });
-      let val = process.env[key];
-      if (/^(yes|on|true|enabled)$/i.test(val)) {
-        val = true;
-      } else if (/^(no|off|false|disabled)$/i.test(val)) {
-        val = false;
-      } else if (val === "null") {
-        val = null;
-      } else {
-        val = Number(val);
-      }
-      obj[prop] = val;
-      return obj;
-    }, {});
-    function useColors() {
-      return "colors" in exports.inspectOpts ? Boolean(exports.inspectOpts.colors) : tty.isatty(process.stderr.fd);
-    }
-    function formatArgs(args) {
-      const { namespace: name4, useColors: useColors2 } = this;
-      if (useColors2) {
-        const c = this.color;
-        const colorCode = "\x1B[3" + (c < 8 ? c : "8;5;" + c);
-        const prefix = `  ${colorCode};1m${name4} \x1B[0m`;
-        args[0] = prefix + args[0].split("\n").join("\n" + prefix);
-        args.push(colorCode + "m+" + module.exports.humanize(this.diff) + "\x1B[0m");
-      } else {
-        args[0] = getDate() + name4 + " " + args[0];
-      }
-    }
-    function getDate() {
-      if (exports.inspectOpts.hideDate) {
-        return "";
-      }
-      return (/* @__PURE__ */ new Date()).toISOString() + " ";
-    }
-    function log12(...args) {
-      return process.stderr.write(util.format(...args) + "\n");
-    }
-    function save(namespaces) {
-      if (namespaces) {
-        process.env.DEBUG = namespaces;
-      } else {
-        delete process.env.DEBUG;
-      }
-    }
-    function load() {
-      return process.env.DEBUG;
-    }
-    function init(debug3) {
-      debug3.inspectOpts = {};
-      const keys = Object.keys(exports.inspectOpts);
-      for (let i = 0; i < keys.length; i++) {
-        debug3.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
-      }
-    }
-    module.exports = require_common()(exports);
-    var { formatters } = module.exports;
-    formatters.o = function(v) {
-      this.inspectOpts.colors = this.useColors;
-      return util.inspect(v, this.inspectOpts).split("\n").map((str) => str.trim()).join(" ");
-    };
-    formatters.O = function(v) {
-      this.inspectOpts.colors = this.useColors;
-      return util.inspect(v, this.inspectOpts);
-    };
-  }
-});
-
-// node_modules/debug/src/index.js
-var require_src2 = __commonJS({
-  "node_modules/debug/src/index.js"(exports, module) {
-    if (typeof process === "undefined" || process.type === "renderer" || process.browser === true || process.__nwjs) {
-      module.exports = require_browser();
-    } else {
-      module.exports = require_node();
-    }
-  }
-});
-
-// node_modules/balanced-match/index.js
-var require_balanced_match = __commonJS({
-  "node_modules/balanced-match/index.js"(exports, module) {
-    "use strict";
-    module.exports = balanced2;
-    function balanced2(a, b, str) {
-      if (a instanceof RegExp)
-        a = maybeMatch(a, str);
-      if (b instanceof RegExp)
-        b = maybeMatch(b, str);
-      var r = range(a, b, str);
-      return r && {
-        start: r[0],
-        end: r[1],
-        pre: str.slice(0, r[0]),
-        body: str.slice(r[0] + a.length, r[1]),
-        post: str.slice(r[1] + b.length)
-      };
-    }
-    function maybeMatch(reg, str) {
-      var m = str.match(reg);
-      return m ? m[0] : null;
-    }
-    balanced2.range = range;
-    function range(a, b, str) {
-      var begs, beg, left, right, result;
-      var ai = str.indexOf(a);
-      var bi = str.indexOf(b, ai + 1);
-      var i = ai;
-      if (ai >= 0 && bi > 0) {
-        if (a === b) {
-          return [ai, bi];
-        }
-        begs = [];
-        left = str.length;
-        while (i >= 0 && !result) {
-          if (i == ai) {
-            begs.push(i);
-            ai = str.indexOf(a, i + 1);
-          } else if (begs.length == 1) {
-            result = [begs.pop(), bi];
-          } else {
-            beg = begs.pop();
-            if (beg < left) {
-              left = beg;
-              right = bi;
-            }
-            bi = str.indexOf(b, i + 1);
-          }
-          i = ai < bi && ai >= 0 ? ai : bi;
-        }
-        if (begs.length) {
-          result = [left, right];
-        }
-      }
-      return result;
-    }
-  }
-});
-
-// node_modules/brace-expansion/index.js
-var require_brace_expansion = __commonJS({
-  "node_modules/brace-expansion/index.js"(exports, module) {
-    var balanced2 = require_balanced_match();
-    module.exports = expandTop;
-    var escSlash = "\0SLASH" + Math.random() + "\0";
-    var escOpen = "\0OPEN" + Math.random() + "\0";
-    var escClose = "\0CLOSE" + Math.random() + "\0";
-    var escComma = "\0COMMA" + Math.random() + "\0";
-    var escPeriod = "\0PERIOD" + Math.random() + "\0";
-    function numeric(str) {
-      return parseInt(str, 10) == str ? parseInt(str, 10) : str.charCodeAt(0);
-    }
-    function escapeBraces(str) {
-      return str.split("\\\\").join(escSlash).split("\\{").join(escOpen).split("\\}").join(escClose).split("\\,").join(escComma).split("\\.").join(escPeriod);
-    }
-    function unescapeBraces(str) {
-      return str.split(escSlash).join("\\").split(escOpen).join("{").split(escClose).join("}").split(escComma).join(",").split(escPeriod).join(".");
-    }
-    function parseCommaParts(str) {
-      if (!str)
-        return [""];
-      var parts = [];
-      var m = balanced2("{", "}", str);
-      if (!m)
-        return str.split(",");
-      var pre = m.pre;
-      var body = m.body;
-      var post = m.post;
-      var p = pre.split(",");
-      p[p.length - 1] += "{" + body + "}";
-      var postParts = parseCommaParts(post);
-      if (post.length) {
-        p[p.length - 1] += postParts.shift();
-        p.push.apply(p, postParts);
-      }
-      parts.push.apply(parts, p);
-      return parts;
-    }
-    function expandTop(str) {
-      if (!str)
-        return [];
-      if (str.substr(0, 2) === "{}") {
-        str = "\\{\\}" + str.substr(2);
-      }
-      return expand2(escapeBraces(str), true).map(unescapeBraces);
-    }
-    function embrace(str) {
-      return "{" + str + "}";
-    }
-    function isPadded(el) {
-      return /^-?0\d/.test(el);
-    }
-    function lte(i, y) {
-      return i <= y;
-    }
-    function gte(i, y) {
-      return i >= y;
-    }
-    function expand2(str, isTop) {
-      var expansions = [];
-      var m = balanced2("{", "}", str);
-      if (!m)
-        return [str];
-      var pre = m.pre;
-      var post = m.post.length ? expand2(m.post, false) : [""];
-      if (/\$$/.test(m.pre)) {
-        for (var k = 0; k < post.length; k++) {
-          var expansion = pre + "{" + m.body + "}" + post[k];
-          expansions.push(expansion);
-        }
-      } else {
-        var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
-        var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
-        var isSequence = isNumericSequence || isAlphaSequence;
-        var isOptions = m.body.indexOf(",") >= 0;
-        if (!isSequence && !isOptions) {
-          if (m.post.match(/,.*\}/)) {
-            str = m.pre + "{" + m.body + escClose + m.post;
-            return expand2(str);
-          }
-          return [str];
-        }
-        var n;
-        if (isSequence) {
-          n = m.body.split(/\.\./);
-        } else {
-          n = parseCommaParts(m.body);
-          if (n.length === 1) {
-            n = expand2(n[0], false).map(embrace);
-            if (n.length === 1) {
-              return post.map(function(p) {
-                return m.pre + n[0] + p;
-              });
-            }
-          }
-        }
-        var N;
-        if (isSequence) {
-          var x = numeric(n[0]);
-          var y = numeric(n[1]);
-          var width = Math.max(n[0].length, n[1].length);
-          var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1;
-          var test = lte;
-          var reverse = y < x;
-          if (reverse) {
-            incr *= -1;
-            test = gte;
-          }
-          var pad = n.some(isPadded);
-          N = [];
-          for (var i = x; test(i, y); i += incr) {
-            var c;
-            if (isAlphaSequence) {
-              c = String.fromCharCode(i);
-              if (c === "\\")
-                c = "";
-            } else {
-              c = String(i);
-              if (pad) {
-                var need = width - c.length;
-                if (need > 0) {
-                  var z = new Array(need + 1).join("0");
-                  if (i < 0)
-                    c = "-" + z + c.slice(1);
-                  else
-                    c = z + c;
-                }
-              }
-            }
-            N.push(c);
-          }
-        } else {
-          N = [];
-          for (var j = 0; j < n.length; j++) {
-            N.push.apply(N, expand2(n[j], false));
-          }
-        }
-        for (var j = 0; j < N.length; j++) {
-          for (var k = 0; k < post.length; k++) {
-            var expansion = pre + N[j] + post[k];
-            if (!isTop || isSequence || expansion)
-              expansions.push(expansion);
-          }
-        }
-      }
-      return expansions;
-    }
-  }
-});
-
-// node_modules/fast-write-atomic/index.js
-var require_fast_write_atomic = __commonJS({
-  "node_modules/fast-write-atomic/index.js"(exports, module) {
-    "use strict";
-    var { open, write: write2, close, rename, fsync, unlink } = __require("fs");
-    var { join, dirname } = __require("path");
-    var counter = 0;
-    function cleanup(dest, err, cb) {
-      unlink(dest, function() {
-        cb(err);
-      });
-    }
-    function closeAndCleanup(fd, dest, err, cb) {
-      close(fd, cleanup.bind(null, dest, err, cb));
-    }
-    function writeLoop(fd, content, contentLength, offset, cb) {
-      write2(fd, content, offset, function(err, bytesWritten) {
-        if (err) {
-          cb(err);
-          return;
-        }
-        return bytesWritten < contentLength - offset ? writeLoop(fd, content, contentLength, offset + bytesWritten, cb) : cb(null);
-      });
-    }
-    function openLoop(dest, cb) {
-      open(dest, "w", function(err, fd) {
-        if (err) {
-          return err.code === "EMFILE" ? openLoop(dest, cb) : cb(err);
-        }
-        cb(null, fd);
-      });
-    }
-    function writeAtomic2(path6, content, cb) {
-      const tmp = join(dirname(path6), "." + process.pid + "." + counter++);
-      openLoop(tmp, function(err, fd) {
-        if (err) {
-          cb(err);
-          return;
-        }
-        const contentLength = Buffer.byteLength(content);
-        writeLoop(fd, content, contentLength, 0, function(err2) {
-          if (err2) {
-            closeAndCleanup(fd, tmp, err2, cb);
-            return;
-          }
-          fsync(fd, function(err3) {
-            if (err3) {
-              closeAndCleanup(fd, tmp, err3, cb);
-              return;
-            }
-            close(fd, function(err4) {
-              if (err4) {
-                cleanup(tmp, err4, cb);
-                return;
-              }
-              rename(tmp, path6, (err5) => {
-                if (err5) {
-                  cleanup(tmp, err5, cb);
-                  return;
-                }
-                cb(null);
-              });
-            });
-          });
-        });
-        content = null;
-      });
-    }
-    module.exports = writeAtomic2;
-  }
-});
-
-// src/bucketManager.js
-import {
-  CreateBucketCommand,
-  DeleteBucketCommand,
-  GetBucketAclCommand,
-  ListBucketsCommand,
-  PutBucketAclCommand,
-  S3Client
-} from "@aws-sdk/client-s3";
-var BucketManager = class {
-  #DEFAULT_ENDPOINT = "https://s3.filebase.com";
-  #DEFAULT_REGION = "us-east-1";
-  #client;
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @tutorial quickstart-bucket
-   * @example
-   * import { BucketManager } from "@filebase/sdk";
-   * const bucketManager = new BucketManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
-   */
-  constructor(clientKey, clientSecret) {
-    const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, clientConfiguration = {
-      credentials: {
-        accessKeyId: clientKey,
-        secretAccessKey: clientSecret
-      },
-      endpoint: clientEndpoint,
-      region: this.#DEFAULT_REGION,
-      forcePathStyle: true
-    };
-    this.#client = new S3Client(clientConfiguration);
-  }
-  /**
-   * @typedef {Object} bucket
-   * @property {string} Name The name of the bucket
-   * @property {date} Date the bucket was created
-   */
-  /**
-   * @summary Creates a new bucket with the specified name.
-   * @param {string} name - The name of the bucket to create.
-   * @returns {Promise} - A promise that resolves when the bucket is created.
-   * @example
-   * // Create bucket with name of `create-bucket-example`
-   * await bucketManager.create(`create-bucket-example`);
-   */
-  async create(name4) {
-    const command = new CreateBucketCommand({
-      Bucket: name4
-    });
-    return await this.#client.send(command);
-  }
-  /**
-   * @summary Lists the buckets in the client.
-   * @returns {Promise>} - A promise that resolves with an array of objects representing the buckets in the client.
-   * @example
-   * // List all buckets
-   * await bucketManager.list();
-   */
-  async list() {
-    const command = new ListBucketsCommand({}), { Buckets } = await this.#client.send(command);
-    return Buckets;
-  }
-  /**
-   * @summary Deletes the specified bucket.
-   * @param {string} name - The name of the bucket to delete.
-   * @returns {Promise} - A promise that resolves when the bucket is deleted.
-   * @example
-   * // Delete bucket with name of `bucket-name-to-delete`
-   * await bucketManager.delete(`bucket-name-to-delete`);
-   */
-  async delete(name4) {
-    const command = new DeleteBucketCommand({
-      Bucket: name4
-    });
-    await this.#client.send(command);
-    return true;
-  }
-  /**
-   * @summary Sets the privacy of a given bucket.
-   * @param {string} name - The name of the bucket to toggle.
-   * @param {boolean} targetState - The new target state. [true=private,false=public]
-   * @returns {Promise} A promise that resolves to true if the bucket was successfully toggled.
-   * @example
-   * // Toggle bucket with label of `toggle-bucket-example`
-   * await bucketManager.setPrivacy(`toggle-bucket-example`, true);  // Enabled
-   * await bucketManager.setPrivacy(`toggle-bucket-example`, false); // Disabled
-   */
-  async setPrivacy(name4, targetState) {
-    const command = new PutBucketAclCommand({
-      Bucket: name4,
-      ACL: targetState ? "private" : "public-read"
-    });
-    await this.#client.send(command);
-    return true;
-  }
-  /**
-   * @summary Gets the privacy of a given bucket
-   * @param {string} name - The name of the bucket to query.
-   * @returns {Promise} A promise that resolves to true if the bucket is private.
-   */
-  async getPrivacy(name4) {
-    const command = new GetBucketAclCommand({
-      Bucket: name4
-    });
-    const response = await this.#client.send(command), readPermission = response.Grants.find((grant) => {
-      return grant.Grantee.Type === "Group" && grant.Permission === "READ";
-    });
-    return !(typeof readPermission !== "undefined");
-  }
-};
-var bucketManager_default = BucketManager;
-
-// src/gatewayManager.js
-import axios2 from "axios";
-
-// src/helpers.js
-import axios from "axios";
-var GATEWAY_DEFAULT_TIMEOUT = 6e4;
-async function downloadFromGateway(cid, options) {
-  if (typeof options.endpoint !== "string") {
-    throw new Error(`Default Gateway must be set`);
-  }
-  const downloadHeaders = {};
-  if (options.token) {
-    downloadHeaders["x-filebase-gateway-token"] = options.token;
-  }
-  const downloadResponse = await axios.request({
-    method: "GET",
-    baseURL: options.endpoint,
-    url: `/ipfs/${cid}`,
-    headers: downloadHeaders,
-    type: "stream",
-    timeout: (options == null ? void 0 : options.timeout) || GATEWAY_DEFAULT_TIMEOUT
-  });
-  return downloadResponse.data;
-}
-function apiErrorHandler(err) {
-  var _a, _b, _c;
-  if ((err == null ? void 0 : err.response) && ((_a = err == null ? void 0 : err.response) == null ? void 0 : _a.status) && (err.response.status.toString()[0] === "4" || err.response.status.toString()[0] === "5")) {
-    throw new Error(
-      ((_b = err.response.data.error) == null ? void 0 : _b.details) || ((_c = err.response.data.error) == null ? void 0 : _c.reason) || err
-    );
-  }
-  throw err;
-}
-
-// src/gatewayManager.js
-var GatewayManager = class {
-  #DEFAULT_ENDPOINT = "https://api.filebase.io";
-  #DEFAULT_TIMEOUT = 6e4;
-  #client;
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @tutorial quickstart-gateway
-   * @example
-   * import { GatewayManager } from "@filebase/sdk";
-   * const gatewayManager = new GatewayManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
-   */
-  constructor(clientKey, clientSecret) {
-    const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_GW_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString(
-      "base64"
-    ), baseURL = `${clientEndpoint}/v1/gateways`;
-    this.#client = axios2.create({
-      baseURL,
-      timeout: this.#DEFAULT_TIMEOUT,
-      headers: { Authorization: `Bearer ${encodedToken}` }
-    });
-  }
-  /**
-   * @typedef {Object} gateway
-   * @property {string} name Name for the gateway
-   * @property {string} domain Custom Domain for the gateway
-   * @property {boolean} enabled Whether the gateway is enabled or not
-   * @property {string} private Whether the gateway is scoped to users content
-   * @property {date} created_at Date the gateway was created
-   * @property {date} updated_at Date the gateway was last updated
-   */
-  /**
-   * @typedef {Object} gatewayOptions
-   * @property {boolean} [domain] Optional Domain to allow for using a Custom Domain
-   * @property {string} [enabled] Optional Toggle to use for enabling the gateway
-   * @property {boolean} [private] Optional Boolean determining if gateway is Public or Private
-   */
-  /**
-   * @summary Creates a gateway with the given name and options
-   * @param {string} name Unique name across entire platform for the gateway.  Must be a valid subdomain name.
-   * @param {gatewayOptions} [options]
-   * @returns {Promise} - A promise that resolves to the value of a gateway.
-   * @example
-   * // Create gateway with name of `create-gateway-example` and a custom domain of `cname.mycustomdomain.com`.
-   * // The custom domain must already exist and have a CNAME record pointed at `create-gateway-example.myfilebase.com`.
-   * await gatewayManager.create(`create-gateway-example`, {
-   *   domain: `cname.mycustomdomain.com`
-   * });
-   */
-  async create(name4, options = {}) {
-    try {
-      let createOptions = {
-        name: name4
-      };
-      if (typeof options.domain === "string") {
-        createOptions.domain = options.domain;
-      }
-      if (typeof options.enabled === "boolean") {
-        createOptions.enabled = options.enabled;
-      }
-      if (typeof options.private === "boolean") {
-        createOptions.private = options.private;
-      }
-      const createResponse = await this.#client.request({
-        method: "POST",
-        data: createOptions
-      });
-      return createResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Deletes a gateway with the given name.
-   * @param {string} name - The name of the gateway to delete.
-   * @returns {Promise} - A promise that resolves to true if the gateway was successfully deleted.
-   * @example
-   * // Delete gateway with name of `delete-gateway-example`
-   * await gatewayManager.delete(`delete-name-example`);
-   */
-  async delete(name4) {
-    try {
-      await this.#client.request({
-        method: "DELETE",
-        url: `/${name4}`,
-        validateStatus: (status) => {
-          return status === 204;
-        }
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Returns the value of a gateway
-   * @param {string} name - Parameter representing the name to get.
-   * @returns {Promise} - A promise that resolves to the value of a gateway.
-   * @example
-   * // Get gateway with name of `gateway-get-example`
-   * await gatewayManager.get(`gateway-get-example`);
-   */
-  async get(name4) {
-    try {
-      const getResponse = await this.#client.request({
-        method: "GET",
-        url: `/${name4}`,
-        validateStatus: (status) => {
-          return status === 200 || status === 404;
-        }
-      });
-      return getResponse.status === 200 ? getResponse.data : false;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Returns a list of gateways
-   * @returns {Promise>} - A promise that resolves to an array of gateways.
-   * @example
-   * // List all gateways
-   * await gatewayManager.list();
-   */
-  async list() {
-    try {
-      const getResponse = await this.#client.request({
-        method: "GET"
-      });
-      return getResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Updates the specified gateway.
-   * @param {string} name - The name of the gateway to update.
-   * @param {gatewayOptions} options - The options for the update operation.
-   *
-   * @returns {Promise} - A Promise that resolves to true if the gateway was updated.
-   * @example
-   * // Update gateway with name of `update-gateway-example` and set the gateway to only serve CIDs pinned by user.
-   * await gatewayManager.update(`update-gateway-example`, {
-   *   private: true
-   * });
-   */
-  async update(name4, options) {
-    try {
-      const updateOptions = {
-        name: name4
-      };
-      if (options == null ? void 0 : options.domain) {
-        updateOptions.domain = String(options.private);
-      }
-      if (options == null ? void 0 : options.enabled) {
-        updateOptions.enabled = Boolean(options.enabled);
-      }
-      if (options == null ? void 0 : options.private) {
-        updateOptions.private = Boolean(options.private);
-      }
-      await this.#client.request({
-        method: "PUT",
-        url: `/${name4}`,
-        data: updateOptions,
-        validateStatus: (status) => {
-          return status === 200;
-        }
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Toggles the enabled state of a given gateway.
-   * @param {string} name - The name of the gateway to toggle.
-   * @param {boolean} targetState - The new target state.
-   * @returns {Promise} A promise that resolves to true if the gateway was successfully toggled.
-   * @example
-   * // Toggle gateway with label of `toggle-gateway-example`
-   * await gatewayManager.toggle(`toggle-gateway-example`, true);  // Enabled
-   * await gatewayManager.toggle(`toggle-gateway-example`, false); // Disabled
-   */
-  async toggle(name4, targetState) {
-    try {
-      await this.#client.request({
-        method: "PUT",
-        url: `/${name4}`,
-        data: {
-          enabled: Boolean(targetState)
-        },
-        validateStatus: (status) => {
-          return status === 200;
-        }
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-};
-var gatewayManager_default = GatewayManager;
-
-// src/nameManager.js
-import axios3 from "axios";
-var NameManager = class {
-  #DEFAULT_ENDPOINT = "https://api.filebase.io";
-  #DEFAULT_TIMEOUT = 6e4;
-  #client;
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @tutorial quickstart-name
-   * @example
-   * import { NameManager } from "@filebase/sdk";
-   * const nameManager = new NameManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD");
-   */
-  constructor(clientKey, clientSecret) {
-    const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, encodedToken = Buffer.from(`${clientKey}:${clientSecret}`).toString(
-      "base64"
-    ), baseURL = `${clientEndpoint}/v1/names`;
-    this.#client = axios3.create({
-      baseURL,
-      timeout: this.#DEFAULT_TIMEOUT,
-      headers: { Authorization: `Bearer ${encodedToken}` }
-    });
-  }
-  /**
-   * @typedef {Object} name
-   * @property {string} label Descriptive label for the Key
-   * @property {string} network_key IPNS Key CID
-   * @property {string} cid Value that name Publishes
-   * @property {number} sequence Version Number for the name
-   * @property {boolean} enabled Whether the name is being Published or not
-   * @property {date} published_at Date the name was last published to the DHT
-   * @property {date} created_at Date the name was created
-   * @property {date} updated_at Date the name was last updated
-   */
-  /**
-   * @typedef {Object} nameOptions
-   * @property {boolean} [enabled] Whether the name is enabled or not.
-   */
-  /**
-   * @summary Creates a new IPNS name with the given name as the label and CID.
-   * @param {string} label - The label of the new IPNS name.
-   * @param {string} cid - The CID of the IPNS name.
-   * @param {nameOptions} [options] - Additional options for the IPNS name.
-   * @returns {Promise} - A Promise that resolves with the response JSON.
-   * @example
-   * // Create IPNS name with label of `create-name-example` and CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
-   * await nameManager.create(`create-name-example`, `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`);
-   */
-  async create(label, cid, options = {
-    enabled: true
-  }) {
-    try {
-      const createResponse = await this.#client.request({
-        method: "POST",
-        data: {
-          label,
-          cid,
-          enabled: (options == null ? void 0 : options.enabled) !== false
-        }
-      });
-      return createResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Imports a user's IPNS private key.
-   * @param {string} label - The label for the IPNS name.
-   * @param {string} cid - The CID (Content Identifier) of the data.
-   * @param {string} privateKey - The existing private key encoded in Base64.
-   * @param {nameOptions} [options] - Additional options for the IPNS name.
-   * @returns {Promise} - A Promise that resolves to the server response.
-   * @example
-   * // Import IPNS private key with label of `create-name-example`, CID of `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
-   * // and a private key encoded with base64
-   * await nameManager.import(
-   *  `create-name-example`,
-   *  `QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm`
-   *  `BASE64_ENCODED_PRIVATEKEY`
-   * );
-   */
-  async import(label, cid, privateKey, options = {
-    enabled: true
-  }) {
-    try {
-      const importResponse = await this.#client.request({
-        method: "POST",
-        data: {
-          label,
-          cid,
-          network_private_key: privateKey,
-          enabled: (options == null ? void 0 : options.enabled) !== false
-        }
-      });
-      return importResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Updates the specified name with the given CID.
-   * @param {string} label - The label of the name to update.
-   * @param {string} cid - The cid to associate with the name.
-   * @param {nameOptions} options - The options for the set operation.
-   *
-   * @returns {Promise} - A Promise that resolves to true if the IPNS name was updated.
-   * @example
-   * // Update name with label of `update-name-example` and set the value of the IPNS name.
-   * await nameManager.update(`update-name-example`, `bafybeidt4nmaci476lyon2mvgfmwyzysdazienhxs2bqnfpdainzjuwjom`);
-   */
-  async update(label, cid, options = {}) {
-    try {
-      const updateOptions = {
-        cid
-      };
-      if (options == null ? void 0 : options.enabled) {
-        updateOptions.enabled = Boolean(options.enabled);
-      }
-      await this.#client.request({
-        method: "PUT",
-        url: `/${label}`,
-        data: updateOptions,
-        validateStatus: (status) => {
-          return status === 200;
-        }
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Returns the value of an IPNS name
-   * @param {string} label - Parameter representing the label of the name to resolve.
-   * @returns {Promise} - A promise that resolves to the value of a name.
-   * @example
-   * // Get IPNS name with label of `list-name-example`
-   * await nameManager.get(`list-name-example`);
-   */
-  async get(label) {
-    try {
-      const getResponse = await this.#client.request({
-        method: "GET",
-        url: `/${label}`,
-        validateStatus: (status) => {
-          return status === 200 || status === 404;
-        }
-      });
-      return getResponse.status === 200 ? getResponse.data : false;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Returns a list of IPNS names
-   * @returns {Promise>} - A promise that resolves to an array of names.
-   * @example
-   * // List all IPNS names
-   * await nameManager.list();
-   */
-  async list() {
-    try {
-      const listResponse = await this.#client.request({
-        method: "GET"
-      });
-      return listResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Deletes an IPNS name with the given label.
-   * @param {string} label - The label of the IPNS name to delete.
-   * @returns {Promise} - A promise that resolves to true if the IPNS name was successfully deleted.
-   * @example
-   * // List IPNS name with label of `delete-name-example`
-   * await nameManager.delete(`delete-name-example`);
-   */
-  async delete(label) {
-    try {
-      await this.#client.request({
-        method: "DELETE",
-        url: `/${label}`,
-        validateStatus: (status) => {
-          return status === 204;
-        }
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Toggles the enabled state of a given IPNS name.
-   * @param {string} label - The label of the IPNS name to toggle.
-   * @param {boolean} targetState - The new target state.
-   * @returns {Promise} A promise that resolves to true if the IPNS name was successfully toggled.
-   * @example
-   * // Toggle IPNS name with label of `toggle-name-example`
-   * await nameManager.toggle(`toggle-name-example`, true);  // Enabled
-   * await nameManager.toggle(`toggle-name-example`, false); // Disabled
-   */
-  async toggle(label, targetState) {
-    try {
-      await this.#client.request({
-        method: "PUT",
-        url: `/${label}`,
-        data: {
-          enabled: targetState
-        },
-        validateStatus: (status) => {
-          return status === 200;
-        }
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-};
-var nameManager_default = NameManager;
-
-// src/objectManager.js
-import {
-  CopyObjectCommand,
-  DeleteObjectCommand,
-  GetObjectCommand,
-  HeadObjectCommand,
-  ListObjectsV2Command,
-  S3Client as S3Client2
-} from "@aws-sdk/client-s3";
-import { Upload } from "@aws-sdk/lib-storage";
-
-// node_modules/@ipld/car/src/buffer-reader.js
-import fs from "fs";
-
-// node_modules/cborg/lib/is.js
-var typeofs = [
-  "string",
-  "number",
-  "bigint",
-  "symbol"
-];
-var objectTypeNames = [
-  "Function",
-  "Generator",
-  "AsyncGenerator",
-  "GeneratorFunction",
-  "AsyncGeneratorFunction",
-  "AsyncFunction",
-  "Observable",
-  "Array",
-  "Buffer",
-  "Object",
-  "RegExp",
-  "Date",
-  "Error",
-  "Map",
-  "Set",
-  "WeakMap",
-  "WeakSet",
-  "ArrayBuffer",
-  "SharedArrayBuffer",
-  "DataView",
-  "Promise",
-  "URL",
-  "HTMLElement",
-  "Int8Array",
-  "Uint8Array",
-  "Uint8ClampedArray",
-  "Int16Array",
-  "Uint16Array",
-  "Int32Array",
-  "Uint32Array",
-  "Float32Array",
-  "Float64Array",
-  "BigInt64Array",
-  "BigUint64Array"
-];
-function is(value) {
-  if (value === null) {
-    return "null";
-  }
-  if (value === void 0) {
-    return "undefined";
-  }
-  if (value === true || value === false) {
-    return "boolean";
-  }
-  const typeOf = typeof value;
-  if (typeofs.includes(typeOf)) {
-    return typeOf;
-  }
-  if (typeOf === "function") {
-    return "Function";
-  }
-  if (Array.isArray(value)) {
-    return "Array";
-  }
-  if (isBuffer(value)) {
-    return "Buffer";
-  }
-  const objectType = getObjectType(value);
-  if (objectType) {
-    return objectType;
-  }
-  return "Object";
-}
-function isBuffer(value) {
-  return value && value.constructor && value.constructor.isBuffer && value.constructor.isBuffer.call(null, value);
-}
-function getObjectType(value) {
-  const objectTypeName = Object.prototype.toString.call(value).slice(8, -1);
-  if (objectTypeNames.includes(objectTypeName)) {
-    return objectTypeName;
-  }
-  return void 0;
-}
-
-// node_modules/cborg/lib/token.js
-var Type = class {
-  /**
-   * @param {number} major
-   * @param {string} name
-   * @param {boolean} terminal
-   */
-  constructor(major, name4, terminal) {
-    this.major = major;
-    this.majorEncoded = major << 5;
-    this.name = name4;
-    this.terminal = terminal;
-  }
-  /* c8 ignore next 3 */
-  toString() {
-    return `Type[${this.major}].${this.name}`;
-  }
-  /**
-   * @param {Type} typ
-   * @returns {number}
-   */
-  compare(typ) {
-    return this.major < typ.major ? -1 : this.major > typ.major ? 1 : 0;
-  }
-};
-Type.uint = new Type(0, "uint", true);
-Type.negint = new Type(1, "negint", true);
-Type.bytes = new Type(2, "bytes", true);
-Type.string = new Type(3, "string", true);
-Type.array = new Type(4, "array", false);
-Type.map = new Type(5, "map", false);
-Type.tag = new Type(6, "tag", false);
-Type.float = new Type(7, "float", true);
-Type.false = new Type(7, "false", true);
-Type.true = new Type(7, "true", true);
-Type.null = new Type(7, "null", true);
-Type.undefined = new Type(7, "undefined", true);
-Type.break = new Type(7, "break", true);
-var Token = class {
-  /**
-   * @param {Type} type
-   * @param {any} [value]
-   * @param {number} [encodedLength]
-   */
-  constructor(type, value, encodedLength) {
-    this.type = type;
-    this.value = value;
-    this.encodedLength = encodedLength;
-    this.encodedBytes = void 0;
-    this.byteValue = void 0;
-  }
-  /* c8 ignore next 3 */
-  toString() {
-    return `Token[${this.type}].${this.value}`;
-  }
-};
-
-// node_modules/cborg/lib/byte-utils.js
-var useBuffer = globalThis.process && // @ts-ignore
-!globalThis.process.browser && // @ts-ignore
-globalThis.Buffer && // @ts-ignore
-typeof globalThis.Buffer.isBuffer === "function";
-var textDecoder = new TextDecoder();
-var textEncoder = new TextEncoder();
-function isBuffer2(buf2) {
-  return useBuffer && globalThis.Buffer.isBuffer(buf2);
-}
-function asU8A(buf2) {
-  if (!(buf2 instanceof Uint8Array)) {
-    return Uint8Array.from(buf2);
-  }
-  return isBuffer2(buf2) ? new Uint8Array(buf2.buffer, buf2.byteOffset, buf2.byteLength) : buf2;
-}
-var toString = useBuffer ? (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {Uint8Array} bytes
-   * @param {number} start
-   * @param {number} end
-   */
-  (bytes, start, end) => {
-    return end - start > 64 ? (
-      // eslint-disable-line operator-linebreak
-      // @ts-ignore
-      globalThis.Buffer.from(bytes.subarray(start, end)).toString("utf8")
-    ) : utf8Slice(bytes, start, end);
-  }
-) : (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {Uint8Array} bytes
-   * @param {number} start
-   * @param {number} end
-   */
-  (bytes, start, end) => {
-    return end - start > 64 ? textDecoder.decode(bytes.subarray(start, end)) : utf8Slice(bytes, start, end);
-  }
-);
-var fromString = useBuffer ? (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {string} string
-   */
-  (string2) => {
-    return string2.length > 64 ? (
-      // eslint-disable-line operator-linebreak
-      // @ts-ignore
-      globalThis.Buffer.from(string2)
-    ) : utf8ToBytes(string2);
-  }
-) : (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {string} string
-   */
-  (string2) => {
-    return string2.length > 64 ? textEncoder.encode(string2) : utf8ToBytes(string2);
-  }
-);
-var fromArray = (arr) => {
-  return Uint8Array.from(arr);
-};
-var slice = useBuffer ? (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {Uint8Array} bytes
-   * @param {number} start
-   * @param {number} end
-   */
-  (bytes, start, end) => {
-    if (isBuffer2(bytes)) {
-      return new Uint8Array(bytes.subarray(start, end));
-    }
-    return bytes.slice(start, end);
-  }
-) : (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {Uint8Array} bytes
-   * @param {number} start
-   * @param {number} end
-   */
-  (bytes, start, end) => {
-    return bytes.slice(start, end);
-  }
-);
-var concat = useBuffer ? (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {Uint8Array[]} chunks
-   * @param {number} length
-   * @returns {Uint8Array}
-   */
-  (chunks, length4) => {
-    chunks = chunks.map((c) => c instanceof Uint8Array ? c : (
-      // eslint-disable-line operator-linebreak
-      // @ts-ignore
-      globalThis.Buffer.from(c)
-    ));
-    return asU8A(globalThis.Buffer.concat(chunks, length4));
-  }
-) : (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {Uint8Array[]} chunks
-   * @param {number} length
-   * @returns {Uint8Array}
-   */
-  (chunks, length4) => {
-    const out = new Uint8Array(length4);
-    let off = 0;
-    for (let b of chunks) {
-      if (off + b.length > out.length) {
-        b = b.subarray(0, out.length - off);
-      }
-      out.set(b, off);
-      off += b.length;
-    }
-    return out;
-  }
-);
-var alloc = useBuffer ? (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {number} size
-   * @returns {Uint8Array}
-   */
-  (size) => {
-    return globalThis.Buffer.allocUnsafe(size);
-  }
-) : (
-  // eslint-disable-line operator-linebreak
-  /**
-   * @param {number} size
-   * @returns {Uint8Array}
-   */
-  (size) => {
-    return new Uint8Array(size);
-  }
-);
-function compare(b1, b2) {
-  if (isBuffer2(b1) && isBuffer2(b2)) {
-    return b1.compare(b2);
-  }
-  for (let i = 0; i < b1.length; i++) {
-    if (b1[i] === b2[i]) {
-      continue;
-    }
-    return b1[i] < b2[i] ? -1 : 1;
-  }
-  return 0;
-}
-function utf8ToBytes(str) {
-  const out = [];
-  let p = 0;
-  for (let i = 0; i < str.length; i++) {
-    let c = str.charCodeAt(i);
-    if (c < 128) {
-      out[p++] = c;
-    } else if (c < 2048) {
-      out[p++] = c >> 6 | 192;
-      out[p++] = c & 63 | 128;
-    } else if ((c & 64512) === 55296 && i + 1 < str.length && (str.charCodeAt(i + 1) & 64512) === 56320) {
-      c = 65536 + ((c & 1023) << 10) + (str.charCodeAt(++i) & 1023);
-      out[p++] = c >> 18 | 240;
-      out[p++] = c >> 12 & 63 | 128;
-      out[p++] = c >> 6 & 63 | 128;
-      out[p++] = c & 63 | 128;
-    } else {
-      out[p++] = c >> 12 | 224;
-      out[p++] = c >> 6 & 63 | 128;
-      out[p++] = c & 63 | 128;
-    }
-  }
-  return out;
-}
-function utf8Slice(buf2, offset, end) {
-  const res = [];
-  while (offset < end) {
-    const firstByte = buf2[offset];
-    let codePoint = null;
-    let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
-    if (offset + bytesPerSequence <= end) {
-      let secondByte, thirdByte, fourthByte, tempCodePoint;
-      switch (bytesPerSequence) {
-        case 1:
-          if (firstByte < 128) {
-            codePoint = firstByte;
-          }
-          break;
-        case 2:
-          secondByte = buf2[offset + 1];
-          if ((secondByte & 192) === 128) {
-            tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
-            if (tempCodePoint > 127) {
-              codePoint = tempCodePoint;
-            }
-          }
-          break;
-        case 3:
-          secondByte = buf2[offset + 1];
-          thirdByte = buf2[offset + 2];
-          if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
-            tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
-            if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
-              codePoint = tempCodePoint;
-            }
-          }
-          break;
-        case 4:
-          secondByte = buf2[offset + 1];
-          thirdByte = buf2[offset + 2];
-          fourthByte = buf2[offset + 3];
-          if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
-            tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
-            if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
-              codePoint = tempCodePoint;
-            }
-          }
-      }
-    }
-    if (codePoint === null) {
-      codePoint = 65533;
-      bytesPerSequence = 1;
-    } else if (codePoint > 65535) {
-      codePoint -= 65536;
-      res.push(codePoint >>> 10 & 1023 | 55296);
-      codePoint = 56320 | codePoint & 1023;
-    }
-    res.push(codePoint);
-    offset += bytesPerSequence;
-  }
-  return decodeCodePointsArray(res);
-}
-var MAX_ARGUMENTS_LENGTH = 4096;
-function decodeCodePointsArray(codePoints) {
-  const len = codePoints.length;
-  if (len <= MAX_ARGUMENTS_LENGTH) {
-    return String.fromCharCode.apply(String, codePoints);
-  }
-  let res = "";
-  let i = 0;
-  while (i < len) {
-    res += String.fromCharCode.apply(
-      String,
-      codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)
-    );
-  }
-  return res;
-}
-
-// node_modules/cborg/lib/bl.js
-var defaultChunkSize = 256;
-var Bl = class {
-  /**
-   * @param {number} [chunkSize]
-   */
-  constructor(chunkSize = defaultChunkSize) {
-    this.chunkSize = chunkSize;
-    this.cursor = 0;
-    this.maxCursor = -1;
-    this.chunks = [];
-    this._initReuseChunk = null;
-  }
-  reset() {
-    this.cursor = 0;
-    this.maxCursor = -1;
-    if (this.chunks.length) {
-      this.chunks = [];
-    }
-    if (this._initReuseChunk !== null) {
-      this.chunks.push(this._initReuseChunk);
-      this.maxCursor = this._initReuseChunk.length - 1;
-    }
-  }
-  /**
-   * @param {Uint8Array|number[]} bytes
-   */
-  push(bytes) {
-    let topChunk = this.chunks[this.chunks.length - 1];
-    const newMax = this.cursor + bytes.length;
-    if (newMax <= this.maxCursor + 1) {
-      const chunkPos = topChunk.length - (this.maxCursor - this.cursor) - 1;
-      topChunk.set(bytes, chunkPos);
-    } else {
-      if (topChunk) {
-        const chunkPos = topChunk.length - (this.maxCursor - this.cursor) - 1;
-        if (chunkPos < topChunk.length) {
-          this.chunks[this.chunks.length - 1] = topChunk.subarray(0, chunkPos);
-          this.maxCursor = this.cursor - 1;
-        }
-      }
-      if (bytes.length < 64 && bytes.length < this.chunkSize) {
-        topChunk = alloc(this.chunkSize);
-        this.chunks.push(topChunk);
-        this.maxCursor += topChunk.length;
-        if (this._initReuseChunk === null) {
-          this._initReuseChunk = topChunk;
-        }
-        topChunk.set(bytes, 0);
-      } else {
-        this.chunks.push(bytes);
-        this.maxCursor += bytes.length;
-      }
-    }
-    this.cursor += bytes.length;
-  }
-  /**
-   * @param {boolean} [reset]
-   * @returns {Uint8Array}
-   */
-  toBytes(reset = false) {
-    let byts;
-    if (this.chunks.length === 1) {
-      const chunk = this.chunks[0];
-      if (reset && this.cursor > chunk.length / 2) {
-        byts = this.cursor === chunk.length ? chunk : chunk.subarray(0, this.cursor);
-        this._initReuseChunk = null;
-        this.chunks = [];
-      } else {
-        byts = slice(chunk, 0, this.cursor);
-      }
-    } else {
-      byts = concat(this.chunks, this.cursor);
-    }
-    if (reset) {
-      this.reset();
-    }
-    return byts;
-  }
-};
-
-// node_modules/cborg/lib/common.js
-var decodeErrPrefix = "CBOR decode error:";
-var encodeErrPrefix = "CBOR encode error:";
-var uintMinorPrefixBytes = [];
-uintMinorPrefixBytes[23] = 1;
-uintMinorPrefixBytes[24] = 2;
-uintMinorPrefixBytes[25] = 3;
-uintMinorPrefixBytes[26] = 5;
-uintMinorPrefixBytes[27] = 9;
-function assertEnoughData(data, pos, need) {
-  if (data.length - pos < need) {
-    throw new Error(`${decodeErrPrefix} not enough data for type`);
-  }
-}
-
-// node_modules/cborg/lib/0uint.js
-var uintBoundaries = [24, 256, 65536, 4294967296, BigInt("18446744073709551616")];
-function readUint8(data, offset, options) {
-  assertEnoughData(data, offset, 1);
-  const value = data[offset];
-  if (options.strict === true && value < uintBoundaries[0]) {
-    throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`);
-  }
-  return value;
-}
-function readUint16(data, offset, options) {
-  assertEnoughData(data, offset, 2);
-  const value = data[offset] << 8 | data[offset + 1];
-  if (options.strict === true && value < uintBoundaries[1]) {
-    throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`);
-  }
-  return value;
-}
-function readUint32(data, offset, options) {
-  assertEnoughData(data, offset, 4);
-  const value = data[offset] * 16777216 + (data[offset + 1] << 16) + (data[offset + 2] << 8) + data[offset + 3];
-  if (options.strict === true && value < uintBoundaries[2]) {
-    throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`);
-  }
-  return value;
-}
-function readUint64(data, offset, options) {
-  assertEnoughData(data, offset, 8);
-  const hi = data[offset] * 16777216 + (data[offset + 1] << 16) + (data[offset + 2] << 8) + data[offset + 3];
-  const lo = data[offset + 4] * 16777216 + (data[offset + 5] << 16) + (data[offset + 6] << 8) + data[offset + 7];
-  const value = (BigInt(hi) << BigInt(32)) + BigInt(lo);
-  if (options.strict === true && value < uintBoundaries[3]) {
-    throw new Error(`${decodeErrPrefix} integer encoded in more bytes than necessary (strict decode)`);
-  }
-  if (value <= Number.MAX_SAFE_INTEGER) {
-    return Number(value);
-  }
-  if (options.allowBigInt === true) {
-    return value;
-  }
-  throw new Error(`${decodeErrPrefix} integers outside of the safe integer range are not supported`);
-}
-function decodeUint8(data, pos, _minor, options) {
-  return new Token(Type.uint, readUint8(data, pos + 1, options), 2);
-}
-function decodeUint16(data, pos, _minor, options) {
-  return new Token(Type.uint, readUint16(data, pos + 1, options), 3);
-}
-function decodeUint32(data, pos, _minor, options) {
-  return new Token(Type.uint, readUint32(data, pos + 1, options), 5);
-}
-function decodeUint64(data, pos, _minor, options) {
-  return new Token(Type.uint, readUint64(data, pos + 1, options), 9);
-}
-function encodeUint(buf2, token) {
-  return encodeUintValue(buf2, 0, token.value);
-}
-function encodeUintValue(buf2, major, uint) {
-  if (uint < uintBoundaries[0]) {
-    const nuint = Number(uint);
-    buf2.push([major | nuint]);
-  } else if (uint < uintBoundaries[1]) {
-    const nuint = Number(uint);
-    buf2.push([major | 24, nuint]);
-  } else if (uint < uintBoundaries[2]) {
-    const nuint = Number(uint);
-    buf2.push([major | 25, nuint >>> 8, nuint & 255]);
-  } else if (uint < uintBoundaries[3]) {
-    const nuint = Number(uint);
-    buf2.push([major | 26, nuint >>> 24 & 255, nuint >>> 16 & 255, nuint >>> 8 & 255, nuint & 255]);
-  } else {
-    const buint = BigInt(uint);
-    if (buint < uintBoundaries[4]) {
-      const set = [major | 27, 0, 0, 0, 0, 0, 0, 0];
-      let lo = Number(buint & BigInt(4294967295));
-      let hi = Number(buint >> BigInt(32) & BigInt(4294967295));
-      set[8] = lo & 255;
-      lo = lo >> 8;
-      set[7] = lo & 255;
-      lo = lo >> 8;
-      set[6] = lo & 255;
-      lo = lo >> 8;
-      set[5] = lo & 255;
-      set[4] = hi & 255;
-      hi = hi >> 8;
-      set[3] = hi & 255;
-      hi = hi >> 8;
-      set[2] = hi & 255;
-      hi = hi >> 8;
-      set[1] = hi & 255;
-      buf2.push(set);
-    } else {
-      throw new Error(`${decodeErrPrefix} encountered BigInt larger than allowable range`);
-    }
-  }
-}
-encodeUint.encodedSize = function encodedSize(token) {
-  return encodeUintValue.encodedSize(token.value);
-};
-encodeUintValue.encodedSize = function encodedSize2(uint) {
-  if (uint < uintBoundaries[0]) {
-    return 1;
-  }
-  if (uint < uintBoundaries[1]) {
-    return 2;
-  }
-  if (uint < uintBoundaries[2]) {
-    return 3;
-  }
-  if (uint < uintBoundaries[3]) {
-    return 5;
-  }
-  return 9;
-};
-encodeUint.compareTokens = function compareTokens(tok1, tok2) {
-  return tok1.value < tok2.value ? -1 : tok1.value > tok2.value ? 1 : (
-    /* c8 ignore next */
-    0
-  );
-};
-
-// node_modules/cborg/lib/1negint.js
-function decodeNegint8(data, pos, _minor, options) {
-  return new Token(Type.negint, -1 - readUint8(data, pos + 1, options), 2);
-}
-function decodeNegint16(data, pos, _minor, options) {
-  return new Token(Type.negint, -1 - readUint16(data, pos + 1, options), 3);
-}
-function decodeNegint32(data, pos, _minor, options) {
-  return new Token(Type.negint, -1 - readUint32(data, pos + 1, options), 5);
-}
-var neg1b = BigInt(-1);
-var pos1b = BigInt(1);
-function decodeNegint64(data, pos, _minor, options) {
-  const int = readUint64(data, pos + 1, options);
-  if (typeof int !== "bigint") {
-    const value = -1 - int;
-    if (value >= Number.MIN_SAFE_INTEGER) {
-      return new Token(Type.negint, value, 9);
-    }
-  }
-  if (options.allowBigInt !== true) {
-    throw new Error(`${decodeErrPrefix} integers outside of the safe integer range are not supported`);
-  }
-  return new Token(Type.negint, neg1b - BigInt(int), 9);
-}
-function encodeNegint(buf2, token) {
-  const negint = token.value;
-  const unsigned = typeof negint === "bigint" ? negint * neg1b - pos1b : negint * -1 - 1;
-  encodeUintValue(buf2, token.type.majorEncoded, unsigned);
-}
-encodeNegint.encodedSize = function encodedSize3(token) {
-  const negint = token.value;
-  const unsigned = typeof negint === "bigint" ? negint * neg1b - pos1b : negint * -1 - 1;
-  if (unsigned < uintBoundaries[0]) {
-    return 1;
-  }
-  if (unsigned < uintBoundaries[1]) {
-    return 2;
-  }
-  if (unsigned < uintBoundaries[2]) {
-    return 3;
-  }
-  if (unsigned < uintBoundaries[3]) {
-    return 5;
-  }
-  return 9;
-};
-encodeNegint.compareTokens = function compareTokens2(tok1, tok2) {
-  return tok1.value < tok2.value ? 1 : tok1.value > tok2.value ? -1 : (
-    /* c8 ignore next */
-    0
-  );
-};
-
-// node_modules/cborg/lib/2bytes.js
-function toToken(data, pos, prefix, length4) {
-  assertEnoughData(data, pos, prefix + length4);
-  const buf2 = slice(data, pos + prefix, pos + prefix + length4);
-  return new Token(Type.bytes, buf2, prefix + length4);
-}
-function decodeBytesCompact(data, pos, minor, _options) {
-  return toToken(data, pos, 1, minor);
-}
-function decodeBytes8(data, pos, _minor, options) {
-  return toToken(data, pos, 2, readUint8(data, pos + 1, options));
-}
-function decodeBytes16(data, pos, _minor, options) {
-  return toToken(data, pos, 3, readUint16(data, pos + 1, options));
-}
-function decodeBytes32(data, pos, _minor, options) {
-  return toToken(data, pos, 5, readUint32(data, pos + 1, options));
-}
-function decodeBytes64(data, pos, _minor, options) {
-  const l = readUint64(data, pos + 1, options);
-  if (typeof l === "bigint") {
-    throw new Error(`${decodeErrPrefix} 64-bit integer bytes lengths not supported`);
-  }
-  return toToken(data, pos, 9, l);
-}
-function tokenBytes(token) {
-  if (token.encodedBytes === void 0) {
-    token.encodedBytes = token.type === Type.string ? fromString(token.value) : token.value;
-  }
-  return token.encodedBytes;
-}
-function encodeBytes(buf2, token) {
-  const bytes = tokenBytes(token);
-  encodeUintValue(buf2, token.type.majorEncoded, bytes.length);
-  buf2.push(bytes);
-}
-encodeBytes.encodedSize = function encodedSize4(token) {
-  const bytes = tokenBytes(token);
-  return encodeUintValue.encodedSize(bytes.length) + bytes.length;
-};
-encodeBytes.compareTokens = function compareTokens3(tok1, tok2) {
-  return compareBytes(tokenBytes(tok1), tokenBytes(tok2));
-};
-function compareBytes(b1, b2) {
-  return b1.length < b2.length ? -1 : b1.length > b2.length ? 1 : compare(b1, b2);
-}
-
-// node_modules/cborg/lib/3string.js
-function toToken2(data, pos, prefix, length4, options) {
-  const totLength = prefix + length4;
-  assertEnoughData(data, pos, totLength);
-  const tok = new Token(Type.string, toString(data, pos + prefix, pos + totLength), totLength);
-  if (options.retainStringBytes === true) {
-    tok.byteValue = slice(data, pos + prefix, pos + totLength);
-  }
-  return tok;
-}
-function decodeStringCompact(data, pos, minor, options) {
-  return toToken2(data, pos, 1, minor, options);
-}
-function decodeString8(data, pos, _minor, options) {
-  return toToken2(data, pos, 2, readUint8(data, pos + 1, options), options);
-}
-function decodeString16(data, pos, _minor, options) {
-  return toToken2(data, pos, 3, readUint16(data, pos + 1, options), options);
-}
-function decodeString32(data, pos, _minor, options) {
-  return toToken2(data, pos, 5, readUint32(data, pos + 1, options), options);
-}
-function decodeString64(data, pos, _minor, options) {
-  const l = readUint64(data, pos + 1, options);
-  if (typeof l === "bigint") {
-    throw new Error(`${decodeErrPrefix} 64-bit integer string lengths not supported`);
-  }
-  return toToken2(data, pos, 9, l, options);
-}
-var encodeString = encodeBytes;
-
-// node_modules/cborg/lib/4array.js
-function toToken3(_data, _pos, prefix, length4) {
-  return new Token(Type.array, length4, prefix);
-}
-function decodeArrayCompact(data, pos, minor, _options) {
-  return toToken3(data, pos, 1, minor);
-}
-function decodeArray8(data, pos, _minor, options) {
-  return toToken3(data, pos, 2, readUint8(data, pos + 1, options));
-}
-function decodeArray16(data, pos, _minor, options) {
-  return toToken3(data, pos, 3, readUint16(data, pos + 1, options));
-}
-function decodeArray32(data, pos, _minor, options) {
-  return toToken3(data, pos, 5, readUint32(data, pos + 1, options));
-}
-function decodeArray64(data, pos, _minor, options) {
-  const l = readUint64(data, pos + 1, options);
-  if (typeof l === "bigint") {
-    throw new Error(`${decodeErrPrefix} 64-bit integer array lengths not supported`);
-  }
-  return toToken3(data, pos, 9, l);
-}
-function decodeArrayIndefinite(data, pos, _minor, options) {
-  if (options.allowIndefinite === false) {
-    throw new Error(`${decodeErrPrefix} indefinite length items not allowed`);
-  }
-  return toToken3(data, pos, 1, Infinity);
-}
-function encodeArray(buf2, token) {
-  encodeUintValue(buf2, Type.array.majorEncoded, token.value);
-}
-encodeArray.compareTokens = encodeUint.compareTokens;
-encodeArray.encodedSize = function encodedSize5(token) {
-  return encodeUintValue.encodedSize(token.value);
-};
-
-// node_modules/cborg/lib/5map.js
-function toToken4(_data, _pos, prefix, length4) {
-  return new Token(Type.map, length4, prefix);
-}
-function decodeMapCompact(data, pos, minor, _options) {
-  return toToken4(data, pos, 1, minor);
-}
-function decodeMap8(data, pos, _minor, options) {
-  return toToken4(data, pos, 2, readUint8(data, pos + 1, options));
-}
-function decodeMap16(data, pos, _minor, options) {
-  return toToken4(data, pos, 3, readUint16(data, pos + 1, options));
-}
-function decodeMap32(data, pos, _minor, options) {
-  return toToken4(data, pos, 5, readUint32(data, pos + 1, options));
-}
-function decodeMap64(data, pos, _minor, options) {
-  const l = readUint64(data, pos + 1, options);
-  if (typeof l === "bigint") {
-    throw new Error(`${decodeErrPrefix} 64-bit integer map lengths not supported`);
-  }
-  return toToken4(data, pos, 9, l);
-}
-function decodeMapIndefinite(data, pos, _minor, options) {
-  if (options.allowIndefinite === false) {
-    throw new Error(`${decodeErrPrefix} indefinite length items not allowed`);
-  }
-  return toToken4(data, pos, 1, Infinity);
-}
-function encodeMap(buf2, token) {
-  encodeUintValue(buf2, Type.map.majorEncoded, token.value);
-}
-encodeMap.compareTokens = encodeUint.compareTokens;
-encodeMap.encodedSize = function encodedSize6(token) {
-  return encodeUintValue.encodedSize(token.value);
-};
-
-// node_modules/cborg/lib/6tag.js
-function decodeTagCompact(_data, _pos, minor, _options) {
-  return new Token(Type.tag, minor, 1);
-}
-function decodeTag8(data, pos, _minor, options) {
-  return new Token(Type.tag, readUint8(data, pos + 1, options), 2);
-}
-function decodeTag16(data, pos, _minor, options) {
-  return new Token(Type.tag, readUint16(data, pos + 1, options), 3);
-}
-function decodeTag32(data, pos, _minor, options) {
-  return new Token(Type.tag, readUint32(data, pos + 1, options), 5);
-}
-function decodeTag64(data, pos, _minor, options) {
-  return new Token(Type.tag, readUint64(data, pos + 1, options), 9);
-}
-function encodeTag(buf2, token) {
-  encodeUintValue(buf2, Type.tag.majorEncoded, token.value);
-}
-encodeTag.compareTokens = encodeUint.compareTokens;
-encodeTag.encodedSize = function encodedSize7(token) {
-  return encodeUintValue.encodedSize(token.value);
-};
-
-// node_modules/cborg/lib/7float.js
-var MINOR_FALSE = 20;
-var MINOR_TRUE = 21;
-var MINOR_NULL = 22;
-var MINOR_UNDEFINED = 23;
-function decodeUndefined(_data, _pos, _minor, options) {
-  if (options.allowUndefined === false) {
-    throw new Error(`${decodeErrPrefix} undefined values are not supported`);
-  } else if (options.coerceUndefinedToNull === true) {
-    return new Token(Type.null, null, 1);
-  }
-  return new Token(Type.undefined, void 0, 1);
-}
-function decodeBreak(_data, _pos, _minor, options) {
-  if (options.allowIndefinite === false) {
-    throw new Error(`${decodeErrPrefix} indefinite length items not allowed`);
-  }
-  return new Token(Type.break, void 0, 1);
-}
-function createToken(value, bytes, options) {
-  if (options) {
-    if (options.allowNaN === false && Number.isNaN(value)) {
-      throw new Error(`${decodeErrPrefix} NaN values are not supported`);
-    }
-    if (options.allowInfinity === false && (value === Infinity || value === -Infinity)) {
-      throw new Error(`${decodeErrPrefix} Infinity values are not supported`);
-    }
-  }
-  return new Token(Type.float, value, bytes);
-}
-function decodeFloat16(data, pos, _minor, options) {
-  return createToken(readFloat16(data, pos + 1), 3, options);
-}
-function decodeFloat32(data, pos, _minor, options) {
-  return createToken(readFloat32(data, pos + 1), 5, options);
-}
-function decodeFloat64(data, pos, _minor, options) {
-  return createToken(readFloat64(data, pos + 1), 9, options);
-}
-function encodeFloat(buf2, token, options) {
-  const float = token.value;
-  if (float === false) {
-    buf2.push([Type.float.majorEncoded | MINOR_FALSE]);
-  } else if (float === true) {
-    buf2.push([Type.float.majorEncoded | MINOR_TRUE]);
-  } else if (float === null) {
-    buf2.push([Type.float.majorEncoded | MINOR_NULL]);
-  } else if (float === void 0) {
-    buf2.push([Type.float.majorEncoded | MINOR_UNDEFINED]);
-  } else {
-    let decoded;
-    let success = false;
-    if (!options || options.float64 !== true) {
-      encodeFloat16(float);
-      decoded = readFloat16(ui8a, 1);
-      if (float === decoded || Number.isNaN(float)) {
-        ui8a[0] = 249;
-        buf2.push(ui8a.slice(0, 3));
-        success = true;
-      } else {
-        encodeFloat32(float);
-        decoded = readFloat32(ui8a, 1);
-        if (float === decoded) {
-          ui8a[0] = 250;
-          buf2.push(ui8a.slice(0, 5));
-          success = true;
-        }
-      }
-    }
-    if (!success) {
-      encodeFloat64(float);
-      decoded = readFloat64(ui8a, 1);
-      ui8a[0] = 251;
-      buf2.push(ui8a.slice(0, 9));
-    }
-  }
-}
-encodeFloat.encodedSize = function encodedSize8(token, options) {
-  const float = token.value;
-  if (float === false || float === true || float === null || float === void 0) {
-    return 1;
-  }
-  if (!options || options.float64 !== true) {
-    encodeFloat16(float);
-    let decoded = readFloat16(ui8a, 1);
-    if (float === decoded || Number.isNaN(float)) {
-      return 3;
-    }
-    encodeFloat32(float);
-    decoded = readFloat32(ui8a, 1);
-    if (float === decoded) {
-      return 5;
-    }
-  }
-  return 9;
-};
-var buffer = new ArrayBuffer(9);
-var dataView = new DataView(buffer, 1);
-var ui8a = new Uint8Array(buffer, 0);
-function encodeFloat16(inp) {
-  if (inp === Infinity) {
-    dataView.setUint16(0, 31744, false);
-  } else if (inp === -Infinity) {
-    dataView.setUint16(0, 64512, false);
-  } else if (Number.isNaN(inp)) {
-    dataView.setUint16(0, 32256, false);
-  } else {
-    dataView.setFloat32(0, inp);
-    const valu32 = dataView.getUint32(0);
-    const exponent = (valu32 & 2139095040) >> 23;
-    const mantissa = valu32 & 8388607;
-    if (exponent === 255) {
-      dataView.setUint16(0, 31744, false);
-    } else if (exponent === 0) {
-      dataView.setUint16(0, (inp & 2147483648) >> 16 | mantissa >> 13, false);
-    } else {
-      const logicalExponent = exponent - 127;
-      if (logicalExponent < -24) {
-        dataView.setUint16(0, 0);
-      } else if (logicalExponent < -14) {
-        dataView.setUint16(0, (valu32 & 2147483648) >> 16 | /* sign bit */
-        1 << 24 + logicalExponent, false);
-      } else {
-        dataView.setUint16(0, (valu32 & 2147483648) >> 16 | logicalExponent + 15 << 10 | mantissa >> 13, false);
-      }
-    }
-  }
-}
-function readFloat16(ui8a2, pos) {
-  if (ui8a2.length - pos < 2) {
-    throw new Error(`${decodeErrPrefix} not enough data for float16`);
-  }
-  const half = (ui8a2[pos] << 8) + ui8a2[pos + 1];
-  if (half === 31744) {
-    return Infinity;
-  }
-  if (half === 64512) {
-    return -Infinity;
-  }
-  if (half === 32256) {
-    return NaN;
-  }
-  const exp = half >> 10 & 31;
-  const mant = half & 1023;
-  let val;
-  if (exp === 0) {
-    val = mant * 2 ** -24;
-  } else if (exp !== 31) {
-    val = (mant + 1024) * 2 ** (exp - 25);
-  } else {
-    val = mant === 0 ? Infinity : NaN;
-  }
-  return half & 32768 ? -val : val;
-}
-function encodeFloat32(inp) {
-  dataView.setFloat32(0, inp, false);
-}
-function readFloat32(ui8a2, pos) {
-  if (ui8a2.length - pos < 4) {
-    throw new Error(`${decodeErrPrefix} not enough data for float32`);
-  }
-  const offset = (ui8a2.byteOffset || 0) + pos;
-  return new DataView(ui8a2.buffer, offset, 4).getFloat32(0, false);
-}
-function encodeFloat64(inp) {
-  dataView.setFloat64(0, inp, false);
-}
-function readFloat64(ui8a2, pos) {
-  if (ui8a2.length - pos < 8) {
-    throw new Error(`${decodeErrPrefix} not enough data for float64`);
-  }
-  const offset = (ui8a2.byteOffset || 0) + pos;
-  return new DataView(ui8a2.buffer, offset, 8).getFloat64(0, false);
-}
-encodeFloat.compareTokens = encodeUint.compareTokens;
-
-// node_modules/cborg/lib/jump.js
-function invalidMinor(data, pos, minor) {
-  throw new Error(`${decodeErrPrefix} encountered invalid minor (${minor}) for major ${data[pos] >>> 5}`);
-}
-function errorer(msg) {
-  return () => {
-    throw new Error(`${decodeErrPrefix} ${msg}`);
-  };
-}
-var jump = [];
-for (let i = 0; i <= 23; i++) {
-  jump[i] = invalidMinor;
-}
-jump[24] = decodeUint8;
-jump[25] = decodeUint16;
-jump[26] = decodeUint32;
-jump[27] = decodeUint64;
-jump[28] = invalidMinor;
-jump[29] = invalidMinor;
-jump[30] = invalidMinor;
-jump[31] = invalidMinor;
-for (let i = 32; i <= 55; i++) {
-  jump[i] = invalidMinor;
-}
-jump[56] = decodeNegint8;
-jump[57] = decodeNegint16;
-jump[58] = decodeNegint32;
-jump[59] = decodeNegint64;
-jump[60] = invalidMinor;
-jump[61] = invalidMinor;
-jump[62] = invalidMinor;
-jump[63] = invalidMinor;
-for (let i = 64; i <= 87; i++) {
-  jump[i] = decodeBytesCompact;
-}
-jump[88] = decodeBytes8;
-jump[89] = decodeBytes16;
-jump[90] = decodeBytes32;
-jump[91] = decodeBytes64;
-jump[92] = invalidMinor;
-jump[93] = invalidMinor;
-jump[94] = invalidMinor;
-jump[95] = errorer("indefinite length bytes/strings are not supported");
-for (let i = 96; i <= 119; i++) {
-  jump[i] = decodeStringCompact;
-}
-jump[120] = decodeString8;
-jump[121] = decodeString16;
-jump[122] = decodeString32;
-jump[123] = decodeString64;
-jump[124] = invalidMinor;
-jump[125] = invalidMinor;
-jump[126] = invalidMinor;
-jump[127] = errorer("indefinite length bytes/strings are not supported");
-for (let i = 128; i <= 151; i++) {
-  jump[i] = decodeArrayCompact;
-}
-jump[152] = decodeArray8;
-jump[153] = decodeArray16;
-jump[154] = decodeArray32;
-jump[155] = decodeArray64;
-jump[156] = invalidMinor;
-jump[157] = invalidMinor;
-jump[158] = invalidMinor;
-jump[159] = decodeArrayIndefinite;
-for (let i = 160; i <= 183; i++) {
-  jump[i] = decodeMapCompact;
-}
-jump[184] = decodeMap8;
-jump[185] = decodeMap16;
-jump[186] = decodeMap32;
-jump[187] = decodeMap64;
-jump[188] = invalidMinor;
-jump[189] = invalidMinor;
-jump[190] = invalidMinor;
-jump[191] = decodeMapIndefinite;
-for (let i = 192; i <= 215; i++) {
-  jump[i] = decodeTagCompact;
-}
-jump[216] = decodeTag8;
-jump[217] = decodeTag16;
-jump[218] = decodeTag32;
-jump[219] = decodeTag64;
-jump[220] = invalidMinor;
-jump[221] = invalidMinor;
-jump[222] = invalidMinor;
-jump[223] = invalidMinor;
-for (let i = 224; i <= 243; i++) {
-  jump[i] = errorer("simple values are not supported");
-}
-jump[244] = invalidMinor;
-jump[245] = invalidMinor;
-jump[246] = invalidMinor;
-jump[247] = decodeUndefined;
-jump[248] = errorer("simple values are not supported");
-jump[249] = decodeFloat16;
-jump[250] = decodeFloat32;
-jump[251] = decodeFloat64;
-jump[252] = invalidMinor;
-jump[253] = invalidMinor;
-jump[254] = invalidMinor;
-jump[255] = decodeBreak;
-var quick = [];
-for (let i = 0; i < 24; i++) {
-  quick[i] = new Token(Type.uint, i, 1);
-}
-for (let i = -1; i >= -24; i--) {
-  quick[31 - i] = new Token(Type.negint, i, 1);
-}
-quick[64] = new Token(Type.bytes, new Uint8Array(0), 1);
-quick[96] = new Token(Type.string, "", 1);
-quick[128] = new Token(Type.array, 0, 1);
-quick[160] = new Token(Type.map, 0, 1);
-quick[244] = new Token(Type.false, false, 1);
-quick[245] = new Token(Type.true, true, 1);
-quick[246] = new Token(Type.null, null, 1);
-function quickEncodeToken(token) {
-  switch (token.type) {
-    case Type.false:
-      return fromArray([244]);
-    case Type.true:
-      return fromArray([245]);
-    case Type.null:
-      return fromArray([246]);
-    case Type.bytes:
-      if (!token.value.length) {
-        return fromArray([64]);
-      }
-      return;
-    case Type.string:
-      if (token.value === "") {
-        return fromArray([96]);
-      }
-      return;
-    case Type.array:
-      if (token.value === 0) {
-        return fromArray([128]);
-      }
-      return;
-    case Type.map:
-      if (token.value === 0) {
-        return fromArray([160]);
-      }
-      return;
-    case Type.uint:
-      if (token.value < 24) {
-        return fromArray([Number(token.value)]);
-      }
-      return;
-    case Type.negint:
-      if (token.value >= -24) {
-        return fromArray([31 - Number(token.value)]);
-      }
-  }
-}
-
-// node_modules/cborg/lib/encode.js
-var defaultEncodeOptions = {
-  float64: false,
-  mapSorter,
-  quickEncodeToken
-};
-function makeCborEncoders() {
-  const encoders = [];
-  encoders[Type.uint.major] = encodeUint;
-  encoders[Type.negint.major] = encodeNegint;
-  encoders[Type.bytes.major] = encodeBytes;
-  encoders[Type.string.major] = encodeString;
-  encoders[Type.array.major] = encodeArray;
-  encoders[Type.map.major] = encodeMap;
-  encoders[Type.tag.major] = encodeTag;
-  encoders[Type.float.major] = encodeFloat;
-  return encoders;
-}
-var cborEncoders = makeCborEncoders();
-var buf = new Bl();
-var Ref = class _Ref {
-  /**
-   * @param {object|any[]} obj
-   * @param {Reference|undefined} parent
-   */
-  constructor(obj, parent) {
-    this.obj = obj;
-    this.parent = parent;
-  }
-  /**
-   * @param {object|any[]} obj
-   * @returns {boolean}
-   */
-  includes(obj) {
-    let p = this;
-    do {
-      if (p.obj === obj) {
-        return true;
-      }
-    } while (p = p.parent);
-    return false;
-  }
-  /**
-   * @param {Reference|undefined} stack
-   * @param {object|any[]} obj
-   * @returns {Reference}
-   */
-  static createCheck(stack, obj) {
-    if (stack && stack.includes(obj)) {
-      throw new Error(`${encodeErrPrefix} object contains circular references`);
-    }
-    return new _Ref(obj, stack);
-  }
-};
-var simpleTokens = {
-  null: new Token(Type.null, null),
-  undefined: new Token(Type.undefined, void 0),
-  true: new Token(Type.true, true),
-  false: new Token(Type.false, false),
-  emptyArray: new Token(Type.array, 0),
-  emptyMap: new Token(Type.map, 0)
-};
-var typeEncoders = {
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  number(obj, _typ, _options, _refStack) {
-    if (!Number.isInteger(obj) || !Number.isSafeInteger(obj)) {
-      return new Token(Type.float, obj);
-    } else if (obj >= 0) {
-      return new Token(Type.uint, obj);
-    } else {
-      return new Token(Type.negint, obj);
-    }
-  },
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  bigint(obj, _typ, _options, _refStack) {
-    if (obj >= BigInt(0)) {
-      return new Token(Type.uint, obj);
-    } else {
-      return new Token(Type.negint, obj);
-    }
-  },
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  Uint8Array(obj, _typ, _options, _refStack) {
-    return new Token(Type.bytes, obj);
-  },
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  string(obj, _typ, _options, _refStack) {
-    return new Token(Type.string, obj);
-  },
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  boolean(obj, _typ, _options, _refStack) {
-    return obj ? simpleTokens.true : simpleTokens.false;
-  },
-  /**
-   * @param {any} _obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  null(_obj, _typ, _options, _refStack) {
-    return simpleTokens.null;
-  },
-  /**
-   * @param {any} _obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  undefined(_obj, _typ, _options, _refStack) {
-    return simpleTokens.undefined;
-  },
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  ArrayBuffer(obj, _typ, _options, _refStack) {
-    return new Token(Type.bytes, new Uint8Array(obj));
-  },
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} _options
-   * @param {Reference} [_refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  DataView(obj, _typ, _options, _refStack) {
-    return new Token(Type.bytes, new Uint8Array(obj.buffer, obj.byteOffset, obj.byteLength));
-  },
-  /**
-   * @param {any} obj
-   * @param {string} _typ
-   * @param {EncodeOptions} options
-   * @param {Reference} [refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  Array(obj, _typ, options, refStack) {
-    if (!obj.length) {
-      if (options.addBreakTokens === true) {
-        return [simpleTokens.emptyArray, new Token(Type.break)];
-      }
-      return simpleTokens.emptyArray;
-    }
-    refStack = Ref.createCheck(refStack, obj);
-    const entries = [];
-    let i = 0;
-    for (const e of obj) {
-      entries[i++] = objectToTokens(e, options, refStack);
-    }
-    if (options.addBreakTokens) {
-      return [new Token(Type.array, obj.length), entries, new Token(Type.break)];
-    }
-    return [new Token(Type.array, obj.length), entries];
-  },
-  /**
-   * @param {any} obj
-   * @param {string} typ
-   * @param {EncodeOptions} options
-   * @param {Reference} [refStack]
-   * @returns {TokenOrNestedTokens}
-   */
-  Object(obj, typ, options, refStack) {
-    const isMap = typ !== "Object";
-    const keys = isMap ? obj.keys() : Object.keys(obj);
-    const length4 = isMap ? obj.size : keys.length;
-    if (!length4) {
-      if (options.addBreakTokens === true) {
-        return [simpleTokens.emptyMap, new Token(Type.break)];
-      }
-      return simpleTokens.emptyMap;
-    }
-    refStack = Ref.createCheck(refStack, obj);
-    const entries = [];
-    let i = 0;
-    for (const key of keys) {
-      entries[i++] = [
-        objectToTokens(key, options, refStack),
-        objectToTokens(isMap ? obj.get(key) : obj[key], options, refStack)
-      ];
-    }
-    sortMapEntries(entries, options);
-    if (options.addBreakTokens) {
-      return [new Token(Type.map, length4), entries, new Token(Type.break)];
-    }
-    return [new Token(Type.map, length4), entries];
-  }
-};
-typeEncoders.Map = typeEncoders.Object;
-typeEncoders.Buffer = typeEncoders.Uint8Array;
-for (const typ of "Uint8Clamped Uint16 Uint32 Int8 Int16 Int32 BigUint64 BigInt64 Float32 Float64".split(" ")) {
-  typeEncoders[`${typ}Array`] = typeEncoders.DataView;
-}
-function objectToTokens(obj, options = {}, refStack) {
-  const typ = is(obj);
-  const customTypeEncoder = options && options.typeEncoders && /** @type {OptionalTypeEncoder} */
-  options.typeEncoders[typ] || typeEncoders[typ];
-  if (typeof customTypeEncoder === "function") {
-    const tokens = customTypeEncoder(obj, typ, options, refStack);
-    if (tokens != null) {
-      return tokens;
-    }
-  }
-  const typeEncoder = typeEncoders[typ];
-  if (!typeEncoder) {
-    throw new Error(`${encodeErrPrefix} unsupported type: ${typ}`);
-  }
-  return typeEncoder(obj, typ, options, refStack);
-}
-function sortMapEntries(entries, options) {
-  if (options.mapSorter) {
-    entries.sort(options.mapSorter);
-  }
-}
-function mapSorter(e1, e2) {
-  const keyToken1 = Array.isArray(e1[0]) ? e1[0][0] : e1[0];
-  const keyToken2 = Array.isArray(e2[0]) ? e2[0][0] : e2[0];
-  if (keyToken1.type !== keyToken2.type) {
-    return keyToken1.type.compare(keyToken2.type);
-  }
-  const major = keyToken1.type.major;
-  const tcmp = cborEncoders[major].compareTokens(keyToken1, keyToken2);
-  if (tcmp === 0) {
-    console.warn("WARNING: complex key types used, CBOR key sorting guarantees are gone");
-  }
-  return tcmp;
-}
-function tokensToEncoded(buf2, tokens, encoders, options) {
-  if (Array.isArray(tokens)) {
-    for (const token of tokens) {
-      tokensToEncoded(buf2, token, encoders, options);
-    }
-  } else {
-    encoders[tokens.type.major](buf2, tokens, options);
-  }
-}
-function encodeCustom(data, encoders, options) {
-  const tokens = objectToTokens(data, options);
-  if (!Array.isArray(tokens) && options.quickEncodeToken) {
-    const quickBytes = options.quickEncodeToken(tokens);
-    if (quickBytes) {
-      return quickBytes;
-    }
-    const encoder = encoders[tokens.type.major];
-    if (encoder.encodedSize) {
-      const size = encoder.encodedSize(tokens, options);
-      const buf2 = new Bl(size);
-      encoder(buf2, tokens, options);
-      if (buf2.chunks.length !== 1) {
-        throw new Error(`Unexpected error: pre-calculated length for ${tokens} was wrong`);
-      }
-      return asU8A(buf2.chunks[0]);
-    }
-  }
-  buf.reset();
-  tokensToEncoded(buf, tokens, encoders, options);
-  return buf.toBytes(true);
-}
-function encode(data, options) {
-  options = Object.assign({}, defaultEncodeOptions, options);
-  return encodeCustom(data, cborEncoders, options);
-}
-
-// node_modules/cborg/lib/decode.js
-var defaultDecodeOptions = {
-  strict: false,
-  allowIndefinite: true,
-  allowUndefined: true,
-  allowBigInt: true
-};
-var Tokeniser = class {
-  /**
-   * @param {Uint8Array} data
-   * @param {DecodeOptions} options
-   */
-  constructor(data, options = {}) {
-    this._pos = 0;
-    this.data = data;
-    this.options = options;
-  }
-  pos() {
-    return this._pos;
-  }
-  done() {
-    return this._pos >= this.data.length;
-  }
-  next() {
-    const byt = this.data[this._pos];
-    let token = quick[byt];
-    if (token === void 0) {
-      const decoder = jump[byt];
-      if (!decoder) {
-        throw new Error(`${decodeErrPrefix} no decoder for major type ${byt >>> 5} (byte 0x${byt.toString(16).padStart(2, "0")})`);
-      }
-      const minor = byt & 31;
-      token = decoder(this.data, this._pos, minor, this.options);
-    }
-    this._pos += token.encodedLength;
-    return token;
-  }
-};
-var DONE = Symbol.for("DONE");
-var BREAK = Symbol.for("BREAK");
-function tokenToArray(token, tokeniser, options) {
-  const arr = [];
-  for (let i = 0; i < token.value; i++) {
-    const value = tokensToObject(tokeniser, options);
-    if (value === BREAK) {
-      if (token.value === Infinity) {
-        break;
-      }
-      throw new Error(`${decodeErrPrefix} got unexpected break to lengthed array`);
-    }
-    if (value === DONE) {
-      throw new Error(`${decodeErrPrefix} found array but not enough entries (got ${i}, expected ${token.value})`);
-    }
-    arr[i] = value;
-  }
-  return arr;
-}
-function tokenToMap(token, tokeniser, options) {
-  const useMaps = options.useMaps === true;
-  const obj = useMaps ? void 0 : {};
-  const m = useMaps ? /* @__PURE__ */ new Map() : void 0;
-  for (let i = 0; i < token.value; i++) {
-    const key = tokensToObject(tokeniser, options);
-    if (key === BREAK) {
-      if (token.value === Infinity) {
-        break;
-      }
-      throw new Error(`${decodeErrPrefix} got unexpected break to lengthed map`);
-    }
-    if (key === DONE) {
-      throw new Error(`${decodeErrPrefix} found map but not enough entries (got ${i} [no key], expected ${token.value})`);
-    }
-    if (useMaps !== true && typeof key !== "string") {
-      throw new Error(`${decodeErrPrefix} non-string keys not supported (got ${typeof key})`);
-    }
-    if (options.rejectDuplicateMapKeys === true) {
-      if (useMaps && m.has(key) || !useMaps && key in obj) {
-        throw new Error(`${decodeErrPrefix} found repeat map key "${key}"`);
-      }
-    }
-    const value = tokensToObject(tokeniser, options);
-    if (value === DONE) {
-      throw new Error(`${decodeErrPrefix} found map but not enough entries (got ${i} [no value], expected ${token.value})`);
-    }
-    if (useMaps) {
-      m.set(key, value);
-    } else {
-      obj[key] = value;
-    }
-  }
-  return useMaps ? m : obj;
-}
-function tokensToObject(tokeniser, options) {
-  if (tokeniser.done()) {
-    return DONE;
-  }
-  const token = tokeniser.next();
-  if (token.type === Type.break) {
-    return BREAK;
-  }
-  if (token.type.terminal) {
-    return token.value;
-  }
-  if (token.type === Type.array) {
-    return tokenToArray(token, tokeniser, options);
-  }
-  if (token.type === Type.map) {
-    return tokenToMap(token, tokeniser, options);
-  }
-  if (token.type === Type.tag) {
-    if (options.tags && typeof options.tags[token.value] === "function") {
-      const tagged = tokensToObject(tokeniser, options);
-      return options.tags[token.value](tagged);
-    }
-    throw new Error(`${decodeErrPrefix} tag not supported (${token.value})`);
-  }
-  throw new Error("unsupported");
-}
-function decodeFirst(data, options) {
-  if (!(data instanceof Uint8Array)) {
-    throw new Error(`${decodeErrPrefix} data to decode must be a Uint8Array`);
-  }
-  options = Object.assign({}, defaultDecodeOptions, options);
-  const tokeniser = options.tokenizer || new Tokeniser(data, options);
-  const decoded = tokensToObject(tokeniser, options);
-  if (decoded === DONE) {
-    throw new Error(`${decodeErrPrefix} did not find any content to decode`);
-  }
-  if (decoded === BREAK) {
-    throw new Error(`${decodeErrPrefix} got unexpected break`);
-  }
-  return [decoded, data.subarray(tokeniser.pos())];
-}
-function decode(data, options) {
-  const [decoded, remainder] = decodeFirst(data, options);
-  if (remainder.length > 0) {
-    throw new Error(`${decodeErrPrefix} too many terminals, data makes no sense`);
-  }
-  return decoded;
-}
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bytes.js
-var empty = new Uint8Array(0);
-function equals(aa, bb) {
-  if (aa === bb)
-    return true;
-  if (aa.byteLength !== bb.byteLength) {
-    return false;
-  }
-  for (let ii = 0; ii < aa.byteLength; ii++) {
-    if (aa[ii] !== bb[ii]) {
-      return false;
-    }
-  }
-  return true;
-}
-function coerce(o) {
-  if (o instanceof Uint8Array && o.constructor.name === "Uint8Array")
-    return o;
-  if (o instanceof ArrayBuffer)
-    return new Uint8Array(o);
-  if (ArrayBuffer.isView(o)) {
-    return new Uint8Array(o.buffer, o.byteOffset, o.byteLength);
-  }
-  throw new Error("Unknown type, must be binary type");
-}
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/vendor/base-x.js
-function base(ALPHABET, name4) {
-  if (ALPHABET.length >= 255) {
-    throw new TypeError("Alphabet too long");
-  }
-  var BASE_MAP = new Uint8Array(256);
-  for (var j = 0; j < BASE_MAP.length; j++) {
-    BASE_MAP[j] = 255;
-  }
-  for (var i = 0; i < ALPHABET.length; i++) {
-    var x = ALPHABET.charAt(i);
-    var xc = x.charCodeAt(0);
-    if (BASE_MAP[xc] !== 255) {
-      throw new TypeError(x + " is ambiguous");
-    }
-    BASE_MAP[xc] = i;
-  }
-  var BASE = ALPHABET.length;
-  var LEADER = ALPHABET.charAt(0);
-  var FACTOR = Math.log(BASE) / Math.log(256);
-  var iFACTOR = Math.log(256) / Math.log(BASE);
-  function encode12(source) {
-    if (source instanceof Uint8Array)
-      ;
-    else if (ArrayBuffer.isView(source)) {
-      source = new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
-    } else if (Array.isArray(source)) {
-      source = Uint8Array.from(source);
-    }
-    if (!(source instanceof Uint8Array)) {
-      throw new TypeError("Expected Uint8Array");
-    }
-    if (source.length === 0) {
-      return "";
-    }
-    var zeroes = 0;
-    var length4 = 0;
-    var pbegin = 0;
-    var pend = source.length;
-    while (pbegin !== pend && source[pbegin] === 0) {
-      pbegin++;
-      zeroes++;
-    }
-    var size = (pend - pbegin) * iFACTOR + 1 >>> 0;
-    var b58 = new Uint8Array(size);
-    while (pbegin !== pend) {
-      var carry = source[pbegin];
-      var i2 = 0;
-      for (var it1 = size - 1; (carry !== 0 || i2 < length4) && it1 !== -1; it1--, i2++) {
-        carry += 256 * b58[it1] >>> 0;
-        b58[it1] = carry % BASE >>> 0;
-        carry = carry / BASE >>> 0;
-      }
-      if (carry !== 0) {
-        throw new Error("Non-zero carry");
-      }
-      length4 = i2;
-      pbegin++;
-    }
-    var it2 = size - length4;
-    while (it2 !== size && b58[it2] === 0) {
-      it2++;
-    }
-    var str = LEADER.repeat(zeroes);
-    for (; it2 < size; ++it2) {
-      str += ALPHABET.charAt(b58[it2]);
-    }
-    return str;
-  }
-  function decodeUnsafe(source) {
-    if (typeof source !== "string") {
-      throw new TypeError("Expected String");
-    }
-    if (source.length === 0) {
-      return new Uint8Array();
-    }
-    var psz = 0;
-    if (source[psz] === " ") {
-      return;
-    }
-    var zeroes = 0;
-    var length4 = 0;
-    while (source[psz] === LEADER) {
-      zeroes++;
-      psz++;
-    }
-    var size = (source.length - psz) * FACTOR + 1 >>> 0;
-    var b256 = new Uint8Array(size);
-    while (source[psz]) {
-      var carry = BASE_MAP[source.charCodeAt(psz)];
-      if (carry === 255) {
-        return;
-      }
-      var i2 = 0;
-      for (var it3 = size - 1; (carry !== 0 || i2 < length4) && it3 !== -1; it3--, i2++) {
-        carry += BASE * b256[it3] >>> 0;
-        b256[it3] = carry % 256 >>> 0;
-        carry = carry / 256 >>> 0;
-      }
-      if (carry !== 0) {
-        throw new Error("Non-zero carry");
-      }
-      length4 = i2;
-      psz++;
-    }
-    if (source[psz] === " ") {
-      return;
-    }
-    var it4 = size - length4;
-    while (it4 !== size && b256[it4] === 0) {
-      it4++;
-    }
-    var vch = new Uint8Array(zeroes + (size - it4));
-    var j2 = zeroes;
-    while (it4 !== size) {
-      vch[j2++] = b256[it4++];
-    }
-    return vch;
-  }
-  function decode15(string2) {
-    var buffer2 = decodeUnsafe(string2);
-    if (buffer2) {
-      return buffer2;
-    }
-    throw new Error(`Non-${name4} character`);
-  }
-  return {
-    encode: encode12,
-    decodeUnsafe,
-    decode: decode15
-  };
-}
-var src = base;
-var _brrp__multiformats_scope_baseX = src;
-var base_x_default = _brrp__multiformats_scope_baseX;
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bases/base.js
-var Encoder = class {
-  name;
-  prefix;
-  baseEncode;
-  constructor(name4, prefix, baseEncode) {
-    this.name = name4;
-    this.prefix = prefix;
-    this.baseEncode = baseEncode;
-  }
-  encode(bytes) {
-    if (bytes instanceof Uint8Array) {
-      return `${this.prefix}${this.baseEncode(bytes)}`;
-    } else {
-      throw Error("Unknown type, must be binary type");
-    }
-  }
-};
-var Decoder = class {
-  name;
-  prefix;
-  baseDecode;
-  prefixCodePoint;
-  constructor(name4, prefix, baseDecode) {
-    this.name = name4;
-    this.prefix = prefix;
-    if (prefix.codePointAt(0) === void 0) {
-      throw new Error("Invalid prefix character");
-    }
-    this.prefixCodePoint = prefix.codePointAt(0);
-    this.baseDecode = baseDecode;
-  }
-  decode(text) {
-    if (typeof text === "string") {
-      if (text.codePointAt(0) !== this.prefixCodePoint) {
-        throw Error(`Unable to decode multibase string ${JSON.stringify(text)}, ${this.name} decoder only supports inputs prefixed with ${this.prefix}`);
-      }
-      return this.baseDecode(text.slice(this.prefix.length));
-    } else {
-      throw Error("Can only multibase decode strings");
-    }
-  }
-  or(decoder) {
-    return or(this, decoder);
-  }
-};
-var ComposedDecoder = class {
-  decoders;
-  constructor(decoders) {
-    this.decoders = decoders;
-  }
-  or(decoder) {
-    return or(this, decoder);
-  }
-  decode(input) {
-    const prefix = input[0];
-    const decoder = this.decoders[prefix];
-    if (decoder != null) {
-      return decoder.decode(input);
-    } else {
-      throw RangeError(`Unable to decode multibase string ${JSON.stringify(input)}, only inputs prefixed with ${Object.keys(this.decoders)} are supported`);
-    }
-  }
-};
-function or(left, right) {
-  return new ComposedDecoder({
-    ...left.decoders ?? { [left.prefix]: left },
-    ...right.decoders ?? { [right.prefix]: right }
-  });
-}
-var Codec = class {
-  name;
-  prefix;
-  baseEncode;
-  baseDecode;
-  encoder;
-  decoder;
-  constructor(name4, prefix, baseEncode, baseDecode) {
-    this.name = name4;
-    this.prefix = prefix;
-    this.baseEncode = baseEncode;
-    this.baseDecode = baseDecode;
-    this.encoder = new Encoder(name4, prefix, baseEncode);
-    this.decoder = new Decoder(name4, prefix, baseDecode);
-  }
-  encode(input) {
-    return this.encoder.encode(input);
-  }
-  decode(input) {
-    return this.decoder.decode(input);
-  }
-};
-function from({ name: name4, prefix, encode: encode12, decode: decode15 }) {
-  return new Codec(name4, prefix, encode12, decode15);
-}
-function baseX({ name: name4, prefix, alphabet: alphabet2 }) {
-  const { encode: encode12, decode: decode15 } = base_x_default(alphabet2, name4);
-  return from({
-    prefix,
-    name: name4,
-    encode: encode12,
-    decode: (text) => coerce(decode15(text))
-  });
-}
-function decode2(string2, alphabet2, bitsPerChar, name4) {
-  const codes = {};
-  for (let i = 0; i < alphabet2.length; ++i) {
-    codes[alphabet2[i]] = i;
-  }
-  let end = string2.length;
-  while (string2[end - 1] === "=") {
-    --end;
-  }
-  const out = new Uint8Array(end * bitsPerChar / 8 | 0);
-  let bits = 0;
-  let buffer2 = 0;
-  let written = 0;
-  for (let i = 0; i < end; ++i) {
-    const value = codes[string2[i]];
-    if (value === void 0) {
-      throw new SyntaxError(`Non-${name4} character`);
-    }
-    buffer2 = buffer2 << bitsPerChar | value;
-    bits += bitsPerChar;
-    if (bits >= 8) {
-      bits -= 8;
-      out[written++] = 255 & buffer2 >> bits;
-    }
-  }
-  if (bits >= bitsPerChar || (255 & buffer2 << 8 - bits) !== 0) {
-    throw new SyntaxError("Unexpected end of data");
-  }
-  return out;
-}
-function encode2(data, alphabet2, bitsPerChar) {
-  const pad = alphabet2[alphabet2.length - 1] === "=";
-  const mask = (1 << bitsPerChar) - 1;
-  let out = "";
-  let bits = 0;
-  let buffer2 = 0;
-  for (let i = 0; i < data.length; ++i) {
-    buffer2 = buffer2 << 8 | data[i];
-    bits += 8;
-    while (bits > bitsPerChar) {
-      bits -= bitsPerChar;
-      out += alphabet2[mask & buffer2 >> bits];
-    }
-  }
-  if (bits !== 0) {
-    out += alphabet2[mask & buffer2 << bitsPerChar - bits];
-  }
-  if (pad) {
-    while ((out.length * bitsPerChar & 7) !== 0) {
-      out += "=";
-    }
-  }
-  return out;
-}
-function rfc4648({ name: name4, prefix, bitsPerChar, alphabet: alphabet2 }) {
-  return from({
-    prefix,
-    name: name4,
-    encode(input) {
-      return encode2(input, alphabet2, bitsPerChar);
-    },
-    decode(input) {
-      return decode2(input, alphabet2, bitsPerChar, name4);
-    }
-  });
-}
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bases/base32.js
-var base32 = rfc4648({
-  prefix: "b",
-  name: "base32",
-  alphabet: "abcdefghijklmnopqrstuvwxyz234567",
-  bitsPerChar: 5
-});
-var base32upper = rfc4648({
-  prefix: "B",
-  name: "base32upper",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567",
-  bitsPerChar: 5
-});
-var base32pad = rfc4648({
-  prefix: "c",
-  name: "base32pad",
-  alphabet: "abcdefghijklmnopqrstuvwxyz234567=",
-  bitsPerChar: 5
-});
-var base32padupper = rfc4648({
-  prefix: "C",
-  name: "base32padupper",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=",
-  bitsPerChar: 5
-});
-var base32hex = rfc4648({
-  prefix: "v",
-  name: "base32hex",
-  alphabet: "0123456789abcdefghijklmnopqrstuv",
-  bitsPerChar: 5
-});
-var base32hexupper = rfc4648({
-  prefix: "V",
-  name: "base32hexupper",
-  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV",
-  bitsPerChar: 5
-});
-var base32hexpad = rfc4648({
-  prefix: "t",
-  name: "base32hexpad",
-  alphabet: "0123456789abcdefghijklmnopqrstuv=",
-  bitsPerChar: 5
-});
-var base32hexpadupper = rfc4648({
-  prefix: "T",
-  name: "base32hexpadupper",
-  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV=",
-  bitsPerChar: 5
-});
-var base32z = rfc4648({
-  prefix: "h",
-  name: "base32z",
-  alphabet: "ybndrfg8ejkmcpqxot1uwisza345h769",
-  bitsPerChar: 5
-});
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/bases/base58.js
-var base58btc = baseX({
-  name: "base58btc",
-  prefix: "z",
-  alphabet: "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
-});
-var base58flickr = baseX({
-  name: "base58flickr",
-  prefix: "Z",
-  alphabet: "123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ"
-});
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/vendor/varint.js
-var encode_1 = encode3;
-var MSB = 128;
-var REST = 127;
-var MSBALL = ~REST;
-var INT = Math.pow(2, 31);
-function encode3(num, out, offset) {
-  out = out || [];
-  offset = offset || 0;
-  var oldOffset = offset;
-  while (num >= INT) {
-    out[offset++] = num & 255 | MSB;
-    num /= 128;
-  }
-  while (num & MSBALL) {
-    out[offset++] = num & 255 | MSB;
-    num >>>= 7;
-  }
-  out[offset] = num | 0;
-  encode3.bytes = offset - oldOffset + 1;
-  return out;
-}
-var decode3 = read;
-var MSB$1 = 128;
-var REST$1 = 127;
-function read(buf2, offset) {
-  var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf2.length;
-  do {
-    if (counter >= l) {
-      read.bytes = 0;
-      throw new RangeError("Could not decode varint");
-    }
-    b = buf2[counter++];
-    res += shift < 28 ? (b & REST$1) << shift : (b & REST$1) * Math.pow(2, shift);
-    shift += 7;
-  } while (b >= MSB$1);
-  read.bytes = counter - offset;
-  return res;
-}
-var N1 = Math.pow(2, 7);
-var N2 = Math.pow(2, 14);
-var N3 = Math.pow(2, 21);
-var N4 = Math.pow(2, 28);
-var N5 = Math.pow(2, 35);
-var N6 = Math.pow(2, 42);
-var N7 = Math.pow(2, 49);
-var N8 = Math.pow(2, 56);
-var N9 = Math.pow(2, 63);
-var length = function(value) {
-  return value < N1 ? 1 : value < N2 ? 2 : value < N3 ? 3 : value < N4 ? 4 : value < N5 ? 5 : value < N6 ? 6 : value < N7 ? 7 : value < N8 ? 8 : value < N9 ? 9 : 10;
-};
-var varint = {
-  encode: encode_1,
-  decode: decode3,
-  encodingLength: length
-};
-var _brrp_varint = varint;
-var varint_default = _brrp_varint;
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/varint.js
-function decode4(data, offset = 0) {
-  const code5 = varint_default.decode(data, offset);
-  return [code5, varint_default.decode.bytes];
-}
-function encodeTo(int, target, offset = 0) {
-  varint_default.encode(int, target, offset);
-  return target;
-}
-function encodingLength(int) {
-  return varint_default.encodingLength(int);
-}
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/hashes/digest.js
-function create(code5, digest2) {
-  const size = digest2.byteLength;
-  const sizeOffset = encodingLength(code5);
-  const digestOffset = sizeOffset + encodingLength(size);
-  const bytes = new Uint8Array(digestOffset + size);
-  encodeTo(code5, bytes, 0);
-  encodeTo(size, bytes, sizeOffset);
-  bytes.set(digest2, digestOffset);
-  return new Digest(code5, size, digest2, bytes);
-}
-function decode5(multihash) {
-  const bytes = coerce(multihash);
-  const [code5, sizeOffset] = decode4(bytes);
-  const [size, digestOffset] = decode4(bytes.subarray(sizeOffset));
-  const digest2 = bytes.subarray(sizeOffset + digestOffset);
-  if (digest2.byteLength !== size) {
-    throw new Error("Incorrect length");
-  }
-  return new Digest(code5, size, digest2, bytes);
-}
-function equals2(a, b) {
-  if (a === b) {
-    return true;
-  } else {
-    const data = b;
-    return a.code === data.code && a.size === data.size && data.bytes instanceof Uint8Array && equals(a.bytes, data.bytes);
-  }
-}
-var Digest = class {
-  code;
-  size;
-  digest;
-  bytes;
-  /**
-   * Creates a multihash digest.
-   */
-  constructor(code5, size, digest2, bytes) {
-    this.code = code5;
-    this.size = size;
-    this.digest = digest2;
-    this.bytes = bytes;
-  }
-};
-
-// node_modules/@ipld/dag-cbor/node_modules/multiformats/dist/src/cid.js
-function format(link, base3) {
-  const { bytes, version } = link;
-  switch (version) {
-    case 0:
-      return toStringV0(bytes, baseCache(link), base3 ?? base58btc.encoder);
-    default:
-      return toStringV1(bytes, baseCache(link), base3 ?? base32.encoder);
-  }
-}
-var cache = /* @__PURE__ */ new WeakMap();
-function baseCache(cid) {
-  const baseCache3 = cache.get(cid);
-  if (baseCache3 == null) {
-    const baseCache4 = /* @__PURE__ */ new Map();
-    cache.set(cid, baseCache4);
-    return baseCache4;
-  }
-  return baseCache3;
-}
-var CID = class _CID {
-  code;
-  version;
-  multihash;
-  bytes;
-  "/";
-  /**
-   * @param version - Version of the CID
-   * @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
-   * @param multihash - (Multi)hash of the of the content.
-   */
-  constructor(version, code5, multihash, bytes) {
-    this.code = code5;
-    this.version = version;
-    this.multihash = multihash;
-    this.bytes = bytes;
-    this["/"] = bytes;
-  }
-  /**
-   * Signalling `cid.asCID === cid` has been replaced with `cid['/'] === cid.bytes`
-   * please either use `CID.asCID(cid)` or switch to new signalling mechanism
-   *
-   * @deprecated
-   */
-  get asCID() {
-    return this;
-  }
-  // ArrayBufferView
-  get byteOffset() {
-    return this.bytes.byteOffset;
-  }
-  // ArrayBufferView
-  get byteLength() {
-    return this.bytes.byteLength;
-  }
-  toV0() {
-    switch (this.version) {
-      case 0: {
-        return this;
-      }
-      case 1: {
-        const { code: code5, multihash } = this;
-        if (code5 !== DAG_PB_CODE) {
-          throw new Error("Cannot convert a non dag-pb CID to CIDv0");
-        }
-        if (multihash.code !== SHA_256_CODE) {
-          throw new Error("Cannot convert non sha2-256 multihash CID to CIDv0");
-        }
-        return _CID.createV0(multihash);
-      }
-      default: {
-        throw Error(`Can not convert CID version ${this.version} to version 0. This is a bug please report`);
-      }
-    }
-  }
-  toV1() {
-    switch (this.version) {
-      case 0: {
-        const { code: code5, digest: digest2 } = this.multihash;
-        const multihash = create(code5, digest2);
-        return _CID.createV1(this.code, multihash);
-      }
-      case 1: {
-        return this;
-      }
-      default: {
-        throw Error(`Can not convert CID version ${this.version} to version 1. This is a bug please report`);
-      }
-    }
-  }
-  equals(other) {
-    return _CID.equals(this, other);
-  }
-  static equals(self, other) {
-    const unknown = other;
-    return unknown != null && self.code === unknown.code && self.version === unknown.version && equals2(self.multihash, unknown.multihash);
-  }
-  toString(base3) {
-    return format(this, base3);
-  }
-  toJSON() {
-    return { "/": format(this) };
-  }
-  link() {
-    return this;
-  }
-  [Symbol.toStringTag] = "CID";
-  // Legacy
-  [Symbol.for("nodejs.util.inspect.custom")]() {
-    return `CID(${this.toString()})`;
-  }
-  /**
-   * Takes any input `value` and returns a `CID` instance if it was
-   * a `CID` otherwise returns `null`. If `value` is instanceof `CID`
-   * it will return value back. If `value` is not instance of this CID
-   * class, but is compatible CID it will return new instance of this
-   * `CID` class. Otherwise returns null.
-   *
-   * This allows two different incompatible versions of CID library to
-   * co-exist and interop as long as binary interface is compatible.
-   */
-  static asCID(input) {
-    if (input == null) {
-      return null;
-    }
-    const value = input;
-    if (value instanceof _CID) {
-      return value;
-    } else if (value["/"] != null && value["/"] === value.bytes || value.asCID === value) {
-      const { version, code: code5, multihash, bytes } = value;
-      return new _CID(version, code5, multihash, bytes ?? encodeCID(version, code5, multihash.bytes));
-    } else if (value[cidSymbol] === true) {
-      const { version, multihash, code: code5 } = value;
-      const digest2 = decode5(multihash);
-      return _CID.create(version, code5, digest2);
-    } else {
-      return null;
-    }
-  }
-  /**
-   * @param version - Version of the CID
-   * @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
-   * @param digest - (Multi)hash of the of the content.
-   */
-  static create(version, code5, digest2) {
-    if (typeof code5 !== "number") {
-      throw new Error("String codecs are no longer supported");
-    }
-    if (!(digest2.bytes instanceof Uint8Array)) {
-      throw new Error("Invalid digest");
-    }
-    switch (version) {
-      case 0: {
-        if (code5 !== DAG_PB_CODE) {
-          throw new Error(`Version 0 CID must use dag-pb (code: ${DAG_PB_CODE}) block encoding`);
-        } else {
-          return new _CID(version, code5, digest2, digest2.bytes);
-        }
-      }
-      case 1: {
-        const bytes = encodeCID(version, code5, digest2.bytes);
-        return new _CID(version, code5, digest2, bytes);
-      }
-      default: {
-        throw new Error("Invalid version");
-      }
-    }
-  }
-  /**
-   * Simplified version of `create` for CIDv0.
-   */
-  static createV0(digest2) {
-    return _CID.create(0, DAG_PB_CODE, digest2);
-  }
-  /**
-   * Simplified version of `create` for CIDv1.
-   *
-   * @param code - Content encoding format code.
-   * @param digest - Multihash of the content.
-   */
-  static createV1(code5, digest2) {
-    return _CID.create(1, code5, digest2);
-  }
-  /**
-   * Decoded a CID from its binary representation. The byte array must contain
-   * only the CID with no additional bytes.
-   *
-   * An error will be thrown if the bytes provided do not contain a valid
-   * binary representation of a CID.
-   */
-  static decode(bytes) {
-    const [cid, remainder] = _CID.decodeFirst(bytes);
-    if (remainder.length !== 0) {
-      throw new Error("Incorrect length");
-    }
-    return cid;
-  }
-  /**
-   * Decoded a CID from its binary representation at the beginning of a byte
-   * array.
-   *
-   * Returns an array with the first element containing the CID and the second
-   * element containing the remainder of the original byte array. The remainder
-   * will be a zero-length byte array if the provided bytes only contained a
-   * binary CID representation.
-   */
-  static decodeFirst(bytes) {
-    const specs = _CID.inspectBytes(bytes);
-    const prefixSize = specs.size - specs.multihashSize;
-    const multihashBytes = coerce(bytes.subarray(prefixSize, prefixSize + specs.multihashSize));
-    if (multihashBytes.byteLength !== specs.multihashSize) {
-      throw new Error("Incorrect length");
-    }
-    const digestBytes = multihashBytes.subarray(specs.multihashSize - specs.digestSize);
-    const digest2 = new Digest(specs.multihashCode, specs.digestSize, digestBytes, multihashBytes);
-    const cid = specs.version === 0 ? _CID.createV0(digest2) : _CID.createV1(specs.codec, digest2);
-    return [cid, bytes.subarray(specs.size)];
-  }
-  /**
-   * Inspect the initial bytes of a CID to determine its properties.
-   *
-   * Involves decoding up to 4 varints. Typically this will require only 4 to 6
-   * bytes but for larger multicodec code values and larger multihash digest
-   * lengths these varints can be quite large. It is recommended that at least
-   * 10 bytes be made available in the `initialBytes` argument for a complete
-   * inspection.
-   */
-  static inspectBytes(initialBytes) {
-    let offset = 0;
-    const next = () => {
-      const [i, length4] = decode4(initialBytes.subarray(offset));
-      offset += length4;
-      return i;
-    };
-    let version = next();
-    let codec = DAG_PB_CODE;
-    if (version === 18) {
-      version = 0;
-      offset = 0;
-    } else {
-      codec = next();
-    }
-    if (version !== 0 && version !== 1) {
-      throw new RangeError(`Invalid CID version ${version}`);
-    }
-    const prefixSize = offset;
-    const multihashCode = next();
-    const digestSize = next();
-    const size = offset + digestSize;
-    const multihashSize = size - prefixSize;
-    return { version, codec, multihashCode, digestSize, multihashSize, size };
-  }
-  /**
-   * Takes cid in a string representation and creates an instance. If `base`
-   * decoder is not provided will use a default from the configuration. It will
-   * throw an error if encoding of the CID is not compatible with supplied (or
-   * a default decoder).
-   */
-  static parse(source, base3) {
-    const [prefix, bytes] = parseCIDtoBytes(source, base3);
-    const cid = _CID.decode(bytes);
-    if (cid.version === 0 && source[0] !== "Q") {
-      throw Error("Version 0 CID string must not include multibase prefix");
-    }
-    baseCache(cid).set(prefix, source);
-    return cid;
-  }
-};
-function parseCIDtoBytes(source, base3) {
-  switch (source[0]) {
-    case "Q": {
-      const decoder = base3 ?? base58btc;
-      return [
-        base58btc.prefix,
-        decoder.decode(`${base58btc.prefix}${source}`)
-      ];
-    }
-    case base58btc.prefix: {
-      const decoder = base3 ?? base58btc;
-      return [base58btc.prefix, decoder.decode(source)];
-    }
-    case base32.prefix: {
-      const decoder = base3 ?? base32;
-      return [base32.prefix, decoder.decode(source)];
-    }
-    default: {
-      if (base3 == null) {
-        throw Error("To parse non base32 or base58btc encoded CID multibase decoder must be provided");
-      }
-      return [source[0], base3.decode(source)];
-    }
-  }
-}
-function toStringV0(bytes, cache3, base3) {
-  const { prefix } = base3;
-  if (prefix !== base58btc.prefix) {
-    throw Error(`Cannot string encode V0 in ${base3.name} encoding`);
-  }
-  const cid = cache3.get(prefix);
-  if (cid == null) {
-    const cid2 = base3.encode(bytes).slice(1);
-    cache3.set(prefix, cid2);
-    return cid2;
-  } else {
-    return cid;
-  }
-}
-function toStringV1(bytes, cache3, base3) {
-  const { prefix } = base3;
-  const cid = cache3.get(prefix);
-  if (cid == null) {
-    const cid2 = base3.encode(bytes);
-    cache3.set(prefix, cid2);
-    return cid2;
-  } else {
-    return cid;
-  }
-}
-var DAG_PB_CODE = 112;
-var SHA_256_CODE = 18;
-function encodeCID(version, code5, multihash) {
-  const codeOffset = encodingLength(version);
-  const hashOffset = codeOffset + encodingLength(code5);
-  const bytes = new Uint8Array(hashOffset + multihash.byteLength);
-  encodeTo(version, bytes, 0);
-  encodeTo(code5, bytes, codeOffset);
-  bytes.set(multihash, hashOffset);
-  return bytes;
-}
-var cidSymbol = Symbol.for("@ipld/js-cid/CID");
-
-// node_modules/@ipld/dag-cbor/src/index.js
-var CID_CBOR_TAG = 42;
-function cidEncoder(obj) {
-  if (obj.asCID !== obj && obj["/"] !== obj.bytes) {
-    return null;
-  }
-  const cid = CID.asCID(obj);
-  if (!cid) {
-    return null;
-  }
-  const bytes = new Uint8Array(cid.bytes.byteLength + 1);
-  bytes.set(cid.bytes, 1);
-  return [
-    new Token(Type.tag, CID_CBOR_TAG),
-    new Token(Type.bytes, bytes)
-  ];
-}
-function undefinedEncoder() {
-  throw new Error("`undefined` is not supported by the IPLD Data Model and cannot be encoded");
-}
-function numberEncoder(num) {
-  if (Number.isNaN(num)) {
-    throw new Error("`NaN` is not supported by the IPLD Data Model and cannot be encoded");
-  }
-  if (num === Infinity || num === -Infinity) {
-    throw new Error("`Infinity` and `-Infinity` is not supported by the IPLD Data Model and cannot be encoded");
-  }
-  return null;
-}
-var _encodeOptions = {
-  float64: true,
-  typeEncoders: {
-    Object: cidEncoder,
-    undefined: undefinedEncoder,
-    number: numberEncoder
-  }
-};
-var encodeOptions = {
-  ..._encodeOptions,
-  typeEncoders: {
-    ..._encodeOptions.typeEncoders
-  }
-};
-function cidDecoder(bytes) {
-  if (bytes[0] !== 0) {
-    throw new Error("Invalid CID for CBOR tag 42; expected leading 0x00");
-  }
-  return CID.decode(bytes.subarray(1));
-}
-var _decodeOptions = {
-  allowIndefinite: false,
-  coerceUndefinedToNull: true,
-  allowNaN: false,
-  allowInfinity: false,
-  allowBigInt: true,
-  // this will lead to BigInt for ints outside of
-  // safe-integer range, which may surprise users
-  strict: true,
-  useMaps: false,
-  rejectDuplicateMapKeys: true,
-  /** @type {import('cborg').TagDecoder[]} */
-  tags: []
-};
-_decodeOptions.tags[CID_CBOR_TAG] = cidDecoder;
-var decodeOptions = {
-  ..._decodeOptions,
-  tags: _decodeOptions.tags.slice()
-};
-var code = 113;
-var encode4 = (node) => encode(node, _encodeOptions);
-var decode6 = (data) => decode(data, _decodeOptions);
-
-// node_modules/multiformats/src/bases/base32.js
-var base32_exports = {};
-__export(base32_exports, {
-  base32: () => base322,
-  base32hex: () => base32hex2,
-  base32hexpad: () => base32hexpad2,
-  base32hexpadupper: () => base32hexpadupper2,
-  base32hexupper: () => base32hexupper2,
-  base32pad: () => base32pad2,
-  base32padupper: () => base32padupper2,
-  base32upper: () => base32upper2,
-  base32z: () => base32z2
-});
-
-// node_modules/multiformats/vendor/base-x.js
-function base2(ALPHABET, name4) {
-  if (ALPHABET.length >= 255) {
-    throw new TypeError("Alphabet too long");
-  }
-  var BASE_MAP = new Uint8Array(256);
-  for (var j = 0; j < BASE_MAP.length; j++) {
-    BASE_MAP[j] = 255;
-  }
-  for (var i = 0; i < ALPHABET.length; i++) {
-    var x = ALPHABET.charAt(i);
-    var xc = x.charCodeAt(0);
-    if (BASE_MAP[xc] !== 255) {
-      throw new TypeError(x + " is ambiguous");
-    }
-    BASE_MAP[xc] = i;
-  }
-  var BASE = ALPHABET.length;
-  var LEADER = ALPHABET.charAt(0);
-  var FACTOR = Math.log(BASE) / Math.log(256);
-  var iFACTOR = Math.log(256) / Math.log(BASE);
-  function encode12(source) {
-    if (source instanceof Uint8Array)
-      ;
-    else if (ArrayBuffer.isView(source)) {
-      source = new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
-    } else if (Array.isArray(source)) {
-      source = Uint8Array.from(source);
-    }
-    if (!(source instanceof Uint8Array)) {
-      throw new TypeError("Expected Uint8Array");
-    }
-    if (source.length === 0) {
-      return "";
-    }
-    var zeroes = 0;
-    var length4 = 0;
-    var pbegin = 0;
-    var pend = source.length;
-    while (pbegin !== pend && source[pbegin] === 0) {
-      pbegin++;
-      zeroes++;
-    }
-    var size = (pend - pbegin) * iFACTOR + 1 >>> 0;
-    var b58 = new Uint8Array(size);
-    while (pbegin !== pend) {
-      var carry = source[pbegin];
-      var i2 = 0;
-      for (var it1 = size - 1; (carry !== 0 || i2 < length4) && it1 !== -1; it1--, i2++) {
-        carry += 256 * b58[it1] >>> 0;
-        b58[it1] = carry % BASE >>> 0;
-        carry = carry / BASE >>> 0;
-      }
-      if (carry !== 0) {
-        throw new Error("Non-zero carry");
-      }
-      length4 = i2;
-      pbegin++;
-    }
-    var it2 = size - length4;
-    while (it2 !== size && b58[it2] === 0) {
-      it2++;
-    }
-    var str = LEADER.repeat(zeroes);
-    for (; it2 < size; ++it2) {
-      str += ALPHABET.charAt(b58[it2]);
-    }
-    return str;
-  }
-  function decodeUnsafe(source) {
-    if (typeof source !== "string") {
-      throw new TypeError("Expected String");
-    }
-    if (source.length === 0) {
-      return new Uint8Array();
-    }
-    var psz = 0;
-    if (source[psz] === " ") {
-      return;
-    }
-    var zeroes = 0;
-    var length4 = 0;
-    while (source[psz] === LEADER) {
-      zeroes++;
-      psz++;
-    }
-    var size = (source.length - psz) * FACTOR + 1 >>> 0;
-    var b256 = new Uint8Array(size);
-    while (source[psz]) {
-      var carry = BASE_MAP[source.charCodeAt(psz)];
-      if (carry === 255) {
-        return;
-      }
-      var i2 = 0;
-      for (var it3 = size - 1; (carry !== 0 || i2 < length4) && it3 !== -1; it3--, i2++) {
-        carry += BASE * b256[it3] >>> 0;
-        b256[it3] = carry % 256 >>> 0;
-        carry = carry / 256 >>> 0;
-      }
-      if (carry !== 0) {
-        throw new Error("Non-zero carry");
-      }
-      length4 = i2;
-      psz++;
-    }
-    if (source[psz] === " ") {
-      return;
-    }
-    var it4 = size - length4;
-    while (it4 !== size && b256[it4] === 0) {
-      it4++;
-    }
-    var vch = new Uint8Array(zeroes + (size - it4));
-    var j2 = zeroes;
-    while (it4 !== size) {
-      vch[j2++] = b256[it4++];
-    }
-    return vch;
-  }
-  function decode15(string2) {
-    var buffer2 = decodeUnsafe(string2);
-    if (buffer2) {
-      return buffer2;
-    }
-    throw new Error(`Non-${name4} character`);
-  }
-  return {
-    encode: encode12,
-    decodeUnsafe,
-    decode: decode15
-  };
-}
-var src2 = base2;
-var _brrp__multiformats_scope_baseX2 = src2;
-var base_x_default2 = _brrp__multiformats_scope_baseX2;
-
-// node_modules/multiformats/src/bytes.js
-var bytes_exports2 = {};
-__export(bytes_exports2, {
-  coerce: () => coerce2,
-  empty: () => empty2,
-  equals: () => equals3,
-  fromHex: () => fromHex,
-  fromString: () => fromString2,
-  isBinary: () => isBinary,
-  toHex: () => toHex,
-  toString: () => toString2
-});
-var empty2 = new Uint8Array(0);
-var toHex = (d) => d.reduce((hex, byte) => hex + byte.toString(16).padStart(2, "0"), "");
-var fromHex = (hex) => {
-  const hexes = hex.match(/../g);
-  return hexes ? new Uint8Array(hexes.map((b) => parseInt(b, 16))) : empty2;
-};
-var equals3 = (aa, bb) => {
-  if (aa === bb)
-    return true;
-  if (aa.byteLength !== bb.byteLength) {
-    return false;
-  }
-  for (let ii = 0; ii < aa.byteLength; ii++) {
-    if (aa[ii] !== bb[ii]) {
-      return false;
-    }
-  }
-  return true;
-};
-var coerce2 = (o) => {
-  if (o instanceof Uint8Array && o.constructor.name === "Uint8Array")
-    return o;
-  if (o instanceof ArrayBuffer)
-    return new Uint8Array(o);
-  if (ArrayBuffer.isView(o)) {
-    return new Uint8Array(o.buffer, o.byteOffset, o.byteLength);
-  }
-  throw new Error("Unknown type, must be binary type");
-};
-var isBinary = (o) => o instanceof ArrayBuffer || ArrayBuffer.isView(o);
-var fromString2 = (str) => new TextEncoder().encode(str);
-var toString2 = (b) => new TextDecoder().decode(b);
-
-// node_modules/multiformats/src/bases/base.js
-var Encoder2 = class {
-  /**
-   * @param {Base} name
-   * @param {Prefix} prefix
-   * @param {(bytes:Uint8Array) => string} baseEncode
-   */
-  constructor(name4, prefix, baseEncode) {
-    this.name = name4;
-    this.prefix = prefix;
-    this.baseEncode = baseEncode;
-  }
-  /**
-   * @param {Uint8Array} bytes
-   * @returns {API.Multibase}
-   */
-  encode(bytes) {
-    if (bytes instanceof Uint8Array) {
-      return `${this.prefix}${this.baseEncode(bytes)}`;
-    } else {
-      throw Error("Unknown type, must be binary type");
-    }
-  }
-};
-var Decoder2 = class {
-  /**
-   * @param {Base} name
-   * @param {Prefix} prefix
-   * @param {(text:string) => Uint8Array} baseDecode
-   */
-  constructor(name4, prefix, baseDecode) {
-    this.name = name4;
-    this.prefix = prefix;
-    if (prefix.codePointAt(0) === void 0) {
-      throw new Error("Invalid prefix character");
-    }
-    this.prefixCodePoint = /** @type {number} */
-    prefix.codePointAt(0);
-    this.baseDecode = baseDecode;
-  }
-  /**
-   * @param {string} text
-   */
-  decode(text) {
-    if (typeof text === "string") {
-      if (text.codePointAt(0) !== this.prefixCodePoint) {
-        throw Error(`Unable to decode multibase string ${JSON.stringify(text)}, ${this.name} decoder only supports inputs prefixed with ${this.prefix}`);
-      }
-      return this.baseDecode(text.slice(this.prefix.length));
-    } else {
-      throw Error("Can only multibase decode strings");
-    }
-  }
-  /**
-   * @template {string} OtherPrefix
-   * @param {API.UnibaseDecoder|ComposedDecoder} decoder
-   * @returns {ComposedDecoder}
-   */
-  or(decoder) {
-    return or2(this, decoder);
-  }
-};
-var ComposedDecoder2 = class {
-  /**
-   * @param {Decoders} decoders
-   */
-  constructor(decoders) {
-    this.decoders = decoders;
-  }
-  /**
-   * @template {string} OtherPrefix
-   * @param {API.UnibaseDecoder|ComposedDecoder} decoder
-   * @returns {ComposedDecoder}
-   */
-  or(decoder) {
-    return or2(this, decoder);
-  }
-  /**
-   * @param {string} input
-   * @returns {Uint8Array}
-   */
-  decode(input) {
-    const prefix = (
-      /** @type {Prefix} */
-      input[0]
-    );
-    const decoder = this.decoders[prefix];
-    if (decoder) {
-      return decoder.decode(input);
-    } else {
-      throw RangeError(`Unable to decode multibase string ${JSON.stringify(input)}, only inputs prefixed with ${Object.keys(this.decoders)} are supported`);
-    }
-  }
-};
-var or2 = (left, right) => new ComposedDecoder2(
-  /** @type {Decoders} */
-  {
-    ...left.decoders || { [
-      /** @type API.UnibaseDecoder */
-      left.prefix
-    ]: left },
-    ...right.decoders || { [
-      /** @type API.UnibaseDecoder */
-      right.prefix
-    ]: right }
-  }
-);
-var Codec2 = class {
-  /**
-   * @param {Base} name
-   * @param {Prefix} prefix
-   * @param {(bytes:Uint8Array) => string} baseEncode
-   * @param {(text:string) => Uint8Array} baseDecode
-   */
-  constructor(name4, prefix, baseEncode, baseDecode) {
-    this.name = name4;
-    this.prefix = prefix;
-    this.baseEncode = baseEncode;
-    this.baseDecode = baseDecode;
-    this.encoder = new Encoder2(name4, prefix, baseEncode);
-    this.decoder = new Decoder2(name4, prefix, baseDecode);
-  }
-  /**
-   * @param {Uint8Array} input
-   */
-  encode(input) {
-    return this.encoder.encode(input);
-  }
-  /**
-   * @param {string} input
-   */
-  decode(input) {
-    return this.decoder.decode(input);
-  }
-};
-var from2 = ({ name: name4, prefix, encode: encode12, decode: decode15 }) => new Codec2(name4, prefix, encode12, decode15);
-var baseX2 = ({ prefix, name: name4, alphabet: alphabet2 }) => {
-  const { encode: encode12, decode: decode15 } = base_x_default2(alphabet2, name4);
-  return from2({
-    prefix,
-    name: name4,
-    encode: encode12,
-    /**
-     * @param {string} text
-     */
-    decode: (text) => coerce2(decode15(text))
-  });
-};
-var decode7 = (string2, alphabet2, bitsPerChar, name4) => {
-  const codes = {};
-  for (let i = 0; i < alphabet2.length; ++i) {
-    codes[alphabet2[i]] = i;
-  }
-  let end = string2.length;
-  while (string2[end - 1] === "=") {
-    --end;
-  }
-  const out = new Uint8Array(end * bitsPerChar / 8 | 0);
-  let bits = 0;
-  let buffer2 = 0;
-  let written = 0;
-  for (let i = 0; i < end; ++i) {
-    const value = codes[string2[i]];
-    if (value === void 0) {
-      throw new SyntaxError(`Non-${name4} character`);
-    }
-    buffer2 = buffer2 << bitsPerChar | value;
-    bits += bitsPerChar;
-    if (bits >= 8) {
-      bits -= 8;
-      out[written++] = 255 & buffer2 >> bits;
-    }
-  }
-  if (bits >= bitsPerChar || 255 & buffer2 << 8 - bits) {
-    throw new SyntaxError("Unexpected end of data");
-  }
-  return out;
-};
-var encode5 = (data, alphabet2, bitsPerChar) => {
-  const pad = alphabet2[alphabet2.length - 1] === "=";
-  const mask = (1 << bitsPerChar) - 1;
-  let out = "";
-  let bits = 0;
-  let buffer2 = 0;
-  for (let i = 0; i < data.length; ++i) {
-    buffer2 = buffer2 << 8 | data[i];
-    bits += 8;
-    while (bits > bitsPerChar) {
-      bits -= bitsPerChar;
-      out += alphabet2[mask & buffer2 >> bits];
-    }
-  }
-  if (bits) {
-    out += alphabet2[mask & buffer2 << bitsPerChar - bits];
-  }
-  if (pad) {
-    while (out.length * bitsPerChar & 7) {
-      out += "=";
-    }
-  }
-  return out;
-};
-var rfc46482 = ({ name: name4, prefix, bitsPerChar, alphabet: alphabet2 }) => {
-  return from2({
-    prefix,
-    name: name4,
-    encode(input) {
-      return encode5(input, alphabet2, bitsPerChar);
-    },
-    decode(input) {
-      return decode7(input, alphabet2, bitsPerChar, name4);
-    }
-  });
-};
-
-// node_modules/multiformats/src/bases/base32.js
-var base322 = rfc46482({
-  prefix: "b",
-  name: "base32",
-  alphabet: "abcdefghijklmnopqrstuvwxyz234567",
-  bitsPerChar: 5
-});
-var base32upper2 = rfc46482({
-  prefix: "B",
-  name: "base32upper",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567",
-  bitsPerChar: 5
-});
-var base32pad2 = rfc46482({
-  prefix: "c",
-  name: "base32pad",
-  alphabet: "abcdefghijklmnopqrstuvwxyz234567=",
-  bitsPerChar: 5
-});
-var base32padupper2 = rfc46482({
-  prefix: "C",
-  name: "base32padupper",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=",
-  bitsPerChar: 5
-});
-var base32hex2 = rfc46482({
-  prefix: "v",
-  name: "base32hex",
-  alphabet: "0123456789abcdefghijklmnopqrstuv",
-  bitsPerChar: 5
-});
-var base32hexupper2 = rfc46482({
-  prefix: "V",
-  name: "base32hexupper",
-  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV",
-  bitsPerChar: 5
-});
-var base32hexpad2 = rfc46482({
-  prefix: "t",
-  name: "base32hexpad",
-  alphabet: "0123456789abcdefghijklmnopqrstuv=",
-  bitsPerChar: 5
-});
-var base32hexpadupper2 = rfc46482({
-  prefix: "T",
-  name: "base32hexpadupper",
-  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV=",
-  bitsPerChar: 5
-});
-var base32z2 = rfc46482({
-  prefix: "h",
-  name: "base32z",
-  alphabet: "ybndrfg8ejkmcpqxot1uwisza345h769",
-  bitsPerChar: 5
-});
-
-// node_modules/multiformats/src/bases/base58.js
-var base58_exports = {};
-__export(base58_exports, {
-  base58btc: () => base58btc2,
-  base58flickr: () => base58flickr2
-});
-var base58btc2 = baseX2({
-  name: "base58btc",
-  prefix: "z",
-  alphabet: "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
-});
-var base58flickr2 = baseX2({
-  name: "base58flickr",
-  prefix: "Z",
-  alphabet: "123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ"
-});
-
-// node_modules/multiformats/vendor/varint.js
-var encode_12 = encode6;
-var MSB2 = 128;
-var REST2 = 127;
-var MSBALL2 = ~REST2;
-var INT2 = Math.pow(2, 31);
-function encode6(num, out, offset) {
-  out = out || [];
-  offset = offset || 0;
-  var oldOffset = offset;
-  while (num >= INT2) {
-    out[offset++] = num & 255 | MSB2;
-    num /= 128;
-  }
-  while (num & MSBALL2) {
-    out[offset++] = num & 255 | MSB2;
-    num >>>= 7;
-  }
-  out[offset] = num | 0;
-  encode6.bytes = offset - oldOffset + 1;
-  return out;
-}
-var decode8 = read2;
-var MSB$12 = 128;
-var REST$12 = 127;
-function read2(buf2, offset) {
-  var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf2.length;
-  do {
-    if (counter >= l) {
-      read2.bytes = 0;
-      throw new RangeError("Could not decode varint");
-    }
-    b = buf2[counter++];
-    res += shift < 28 ? (b & REST$12) << shift : (b & REST$12) * Math.pow(2, shift);
-    shift += 7;
-  } while (b >= MSB$12);
-  read2.bytes = counter - offset;
-  return res;
-}
-var N12 = Math.pow(2, 7);
-var N22 = Math.pow(2, 14);
-var N32 = Math.pow(2, 21);
-var N42 = Math.pow(2, 28);
-var N52 = Math.pow(2, 35);
-var N62 = Math.pow(2, 42);
-var N72 = Math.pow(2, 49);
-var N82 = Math.pow(2, 56);
-var N92 = Math.pow(2, 63);
-var length2 = function(value) {
-  return value < N12 ? 1 : value < N22 ? 2 : value < N32 ? 3 : value < N42 ? 4 : value < N52 ? 5 : value < N62 ? 6 : value < N72 ? 7 : value < N82 ? 8 : value < N92 ? 9 : 10;
-};
-var varint2 = {
-  encode: encode_12,
-  decode: decode8,
-  encodingLength: length2
-};
-var _brrp_varint2 = varint2;
-var varint_default2 = _brrp_varint2;
-
-// node_modules/multiformats/src/varint.js
-var decode9 = (data, offset = 0) => {
-  const code5 = varint_default2.decode(data, offset);
-  return [code5, varint_default2.decode.bytes];
-};
-var encodeTo2 = (int, target, offset = 0) => {
-  varint_default2.encode(int, target, offset);
-  return target;
-};
-var encodingLength2 = (int) => {
-  return varint_default2.encodingLength(int);
-};
-
-// node_modules/multiformats/src/hashes/digest.js
-var create2 = (code5, digest2) => {
-  const size = digest2.byteLength;
-  const sizeOffset = encodingLength2(code5);
-  const digestOffset = sizeOffset + encodingLength2(size);
-  const bytes = new Uint8Array(digestOffset + size);
-  encodeTo2(code5, bytes, 0);
-  encodeTo2(size, bytes, sizeOffset);
-  bytes.set(digest2, digestOffset);
-  return new Digest2(code5, size, digest2, bytes);
-};
-var decode10 = (multihash) => {
-  const bytes = coerce2(multihash);
-  const [code5, sizeOffset] = decode9(bytes);
-  const [size, digestOffset] = decode9(bytes.subarray(sizeOffset));
-  const digest2 = bytes.subarray(sizeOffset + digestOffset);
-  if (digest2.byteLength !== size) {
-    throw new Error("Incorrect length");
-  }
-  return new Digest2(code5, size, digest2, bytes);
-};
-var equals4 = (a, b) => {
-  if (a === b) {
-    return true;
-  } else {
-    const data = (
-      /** @type {{code?:unknown, size?:unknown, bytes?:unknown}} */
-      b
-    );
-    return a.code === data.code && a.size === data.size && data.bytes instanceof Uint8Array && equals3(a.bytes, data.bytes);
-  }
-};
-var Digest2 = class {
-  /**
-   * Creates a multihash digest.
-   *
-   * @param {Code} code
-   * @param {Size} size
-   * @param {Uint8Array} digest
-   * @param {Uint8Array} bytes
-   */
-  constructor(code5, size, digest2, bytes) {
-    this.code = code5;
-    this.size = size;
-    this.digest = digest2;
-    this.bytes = bytes;
-  }
-};
-
-// node_modules/multiformats/src/cid.js
-var format2 = (link, base3) => {
-  const { bytes, version } = link;
-  switch (version) {
-    case 0:
-      return toStringV02(
-        bytes,
-        baseCache2(link),
-        /** @type {API.MultibaseEncoder<"z">} */
-        base3 || base58btc2.encoder
-      );
-    default:
-      return toStringV12(
-        bytes,
-        baseCache2(link),
-        /** @type {API.MultibaseEncoder} */
-        base3 || base322.encoder
-      );
-  }
-};
-var cache2 = /* @__PURE__ */ new WeakMap();
-var baseCache2 = (cid) => {
-  const baseCache3 = cache2.get(cid);
-  if (baseCache3 == null) {
-    const baseCache4 = /* @__PURE__ */ new Map();
-    cache2.set(cid, baseCache4);
-    return baseCache4;
-  }
-  return baseCache3;
-};
-var CID2 = class _CID {
-  /**
-   * @param {Version} version - Version of the CID
-   * @param {Format} code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
-   * @param {API.MultihashDigest} multihash - (Multi)hash of the of the content.
-   * @param {Uint8Array} bytes
-   */
-  constructor(version, code5, multihash, bytes) {
-    this.code = code5;
-    this.version = version;
-    this.multihash = multihash;
-    this.bytes = bytes;
-    this["/"] = bytes;
-  }
-  /**
-   * Signalling `cid.asCID === cid` has been replaced with `cid['/'] === cid.bytes`
-   * please either use `CID.asCID(cid)` or switch to new signalling mechanism
-   *
-   * @deprecated
-   */
-  get asCID() {
-    return this;
-  }
-  // ArrayBufferView
-  get byteOffset() {
-    return this.bytes.byteOffset;
-  }
-  // ArrayBufferView
-  get byteLength() {
-    return this.bytes.byteLength;
-  }
-  /**
-   * @returns {CID}
-   */
-  toV0() {
-    switch (this.version) {
-      case 0: {
-        return (
-          /** @type {CID} */
-          this
-        );
-      }
-      case 1: {
-        const { code: code5, multihash } = this;
-        if (code5 !== DAG_PB_CODE2) {
-          throw new Error("Cannot convert a non dag-pb CID to CIDv0");
-        }
-        if (multihash.code !== SHA_256_CODE2) {
-          throw new Error("Cannot convert non sha2-256 multihash CID to CIDv0");
-        }
-        return (
-          /** @type {CID} */
-          _CID.createV0(
-            /** @type {API.MultihashDigest} */
-            multihash
-          )
-        );
-      }
-      default: {
-        throw Error(
-          `Can not convert CID version ${this.version} to version 0. This is a bug please report`
-        );
-      }
-    }
-  }
-  /**
-   * @returns {CID}
-   */
-  toV1() {
-    switch (this.version) {
-      case 0: {
-        const { code: code5, digest: digest2 } = this.multihash;
-        const multihash = create2(code5, digest2);
-        return (
-          /** @type {CID} */
-          _CID.createV1(this.code, multihash)
-        );
-      }
-      case 1: {
-        return (
-          /** @type {CID} */
-          this
-        );
-      }
-      default: {
-        throw Error(
-          `Can not convert CID version ${this.version} to version 1. This is a bug please report`
-        );
-      }
-    }
-  }
-  /**
-   * @param {unknown} other
-   * @returns {other is CID}
-   */
-  equals(other) {
-    return _CID.equals(this, other);
-  }
-  /**
-   * @template {unknown} Data
-   * @template {number} Format
-   * @template {number} Alg
-   * @template {API.Version} Version
-   * @param {API.Link} self
-   * @param {unknown} other
-   * @returns {other is CID}
-   */
-  static equals(self, other) {
-    const unknown = (
-      /** @type {{code?:unknown, version?:unknown, multihash?:unknown}} */
-      other
-    );
-    return unknown && self.code === unknown.code && self.version === unknown.version && equals4(self.multihash, unknown.multihash);
-  }
-  /**
-   * @param {API.MultibaseEncoder} [base]
-   * @returns {string}
-   */
-  toString(base3) {
-    return format2(this, base3);
-  }
-  /**
-   * @returns {API.LinkJSON}
-   */
-  toJSON() {
-    return { "/": format2(this) };
-  }
-  link() {
-    return this;
-  }
-  get [Symbol.toStringTag]() {
-    return "CID";
-  }
-  // Legacy
-  [Symbol.for("nodejs.util.inspect.custom")]() {
-    return `CID(${this.toString()})`;
-  }
-  /**
-   * Takes any input `value` and returns a `CID` instance if it was
-   * a `CID` otherwise returns `null`. If `value` is instanceof `CID`
-   * it will return value back. If `value` is not instance of this CID
-   * class, but is compatible CID it will return new instance of this
-   * `CID` class. Otherwise returns null.
-   *
-   * This allows two different incompatible versions of CID library to
-   * co-exist and interop as long as binary interface is compatible.
-   *
-   * @template {unknown} Data
-   * @template {number} Format
-   * @template {number} Alg
-   * @template {API.Version} Version
-   * @template {unknown} U
-   * @param {API.Link|U} input
-   * @returns {CID|null}
-   */
-  static asCID(input) {
-    if (input == null) {
-      return null;
-    }
-    const value = (
-      /** @type {any} */
-      input
-    );
-    if (value instanceof _CID) {
-      return value;
-    } else if (value["/"] != null && value["/"] === value.bytes || value.asCID === value) {
-      const { version, code: code5, multihash, bytes } = value;
-      return new _CID(
-        version,
-        code5,
-        /** @type {API.MultihashDigest} */
-        multihash,
-        bytes || encodeCID2(version, code5, multihash.bytes)
-      );
-    } else if (value[cidSymbol2] === true) {
-      const { version, multihash, code: code5 } = value;
-      const digest2 = (
-        /** @type {API.MultihashDigest} */
-        decode10(multihash)
-      );
-      return _CID.create(version, code5, digest2);
-    } else {
-      return null;
-    }
-  }
-  /**
-   *
-   * @template {unknown} Data
-   * @template {number} Format
-   * @template {number} Alg
-   * @template {API.Version} Version
-   * @param {Version} version - Version of the CID
-   * @param {Format} code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
-   * @param {API.MultihashDigest} digest - (Multi)hash of the of the content.
-   * @returns {CID}
-   */
-  static create(version, code5, digest2) {
-    if (typeof code5 !== "number") {
-      throw new Error("String codecs are no longer supported");
-    }
-    if (!(digest2.bytes instanceof Uint8Array)) {
-      throw new Error("Invalid digest");
-    }
-    switch (version) {
-      case 0: {
-        if (code5 !== DAG_PB_CODE2) {
-          throw new Error(
-            `Version 0 CID must use dag-pb (code: ${DAG_PB_CODE2}) block encoding`
-          );
-        } else {
-          return new _CID(version, code5, digest2, digest2.bytes);
-        }
-      }
-      case 1: {
-        const bytes = encodeCID2(version, code5, digest2.bytes);
-        return new _CID(version, code5, digest2, bytes);
-      }
-      default: {
-        throw new Error("Invalid version");
-      }
-    }
-  }
-  /**
-   * Simplified version of `create` for CIDv0.
-   *
-   * @template {unknown} [T=unknown]
-   * @param {API.MultihashDigest} digest - Multihash.
-   * @returns {CID}
-   */
-  static createV0(digest2) {
-    return _CID.create(0, DAG_PB_CODE2, digest2);
-  }
-  /**
-   * Simplified version of `create` for CIDv1.
-   *
-   * @template {unknown} Data
-   * @template {number} Code
-   * @template {number} Alg
-   * @param {Code} code - Content encoding format code.
-   * @param {API.MultihashDigest} digest - Miltihash of the content.
-   * @returns {CID}
-   */
-  static createV1(code5, digest2) {
-    return _CID.create(1, code5, digest2);
-  }
-  /**
-   * Decoded a CID from its binary representation. The byte array must contain
-   * only the CID with no additional bytes.
-   *
-   * An error will be thrown if the bytes provided do not contain a valid
-   * binary representation of a CID.
-   *
-   * @template {unknown} Data
-   * @template {number} Code
-   * @template {number} Alg
-   * @template {API.Version} Ver
-   * @param {API.ByteView>} bytes
-   * @returns {CID}
-   */
-  static decode(bytes) {
-    const [cid, remainder] = _CID.decodeFirst(bytes);
-    if (remainder.length) {
-      throw new Error("Incorrect length");
-    }
-    return cid;
-  }
-  /**
-   * Decoded a CID from its binary representation at the beginning of a byte
-   * array.
-   *
-   * Returns an array with the first element containing the CID and the second
-   * element containing the remainder of the original byte array. The remainder
-   * will be a zero-length byte array if the provided bytes only contained a
-   * binary CID representation.
-   *
-   * @template {unknown} T
-   * @template {number} C
-   * @template {number} A
-   * @template {API.Version} V
-   * @param {API.ByteView>} bytes
-   * @returns {[CID, Uint8Array]}
-   */
-  static decodeFirst(bytes) {
-    const specs = _CID.inspectBytes(bytes);
-    const prefixSize = specs.size - specs.multihashSize;
-    const multihashBytes = coerce2(
-      bytes.subarray(prefixSize, prefixSize + specs.multihashSize)
-    );
-    if (multihashBytes.byteLength !== specs.multihashSize) {
-      throw new Error("Incorrect length");
-    }
-    const digestBytes = multihashBytes.subarray(
-      specs.multihashSize - specs.digestSize
-    );
-    const digest2 = new Digest2(
-      specs.multihashCode,
-      specs.digestSize,
-      digestBytes,
-      multihashBytes
-    );
-    const cid = specs.version === 0 ? _CID.createV0(
-      /** @type {API.MultihashDigest} */
-      digest2
-    ) : _CID.createV1(specs.codec, digest2);
-    return [
-      /** @type {CID} */
-      cid,
-      bytes.subarray(specs.size)
-    ];
-  }
-  /**
-   * Inspect the initial bytes of a CID to determine its properties.
-   *
-   * Involves decoding up to 4 varints. Typically this will require only 4 to 6
-   * bytes but for larger multicodec code values and larger multihash digest
-   * lengths these varints can be quite large. It is recommended that at least
-   * 10 bytes be made available in the `initialBytes` argument for a complete
-   * inspection.
-   *
-   * @template {unknown} T
-   * @template {number} C
-   * @template {number} A
-   * @template {API.Version} V
-   * @param {API.ByteView>} initialBytes
-   * @returns {{ version:V, codec:C, multihashCode:A, digestSize:number, multihashSize:number, size:number }}
-   */
-  static inspectBytes(initialBytes) {
-    let offset = 0;
-    const next = () => {
-      const [i, length4] = decode9(initialBytes.subarray(offset));
-      offset += length4;
-      return i;
-    };
-    let version = (
-      /** @type {V} */
-      next()
-    );
-    let codec = (
-      /** @type {C} */
-      DAG_PB_CODE2
-    );
-    if (
-      /** @type {number} */
-      version === 18
-    ) {
-      version = /** @type {V} */
-      0;
-      offset = 0;
-    } else {
-      codec = /** @type {C} */
-      next();
-    }
-    if (version !== 0 && version !== 1) {
-      throw new RangeError(`Invalid CID version ${version}`);
-    }
-    const prefixSize = offset;
-    const multihashCode = (
-      /** @type {A} */
-      next()
-    );
-    const digestSize = next();
-    const size = offset + digestSize;
-    const multihashSize = size - prefixSize;
-    return { version, codec, multihashCode, digestSize, multihashSize, size };
-  }
-  /**
-   * Takes cid in a string representation and creates an instance. If `base`
-   * decoder is not provided will use a default from the configuration. It will
-   * throw an error if encoding of the CID is not compatible with supplied (or
-   * a default decoder).
-   *
-   * @template {string} Prefix
-   * @template {unknown} Data
-   * @template {number} Code
-   * @template {number} Alg
-   * @template {API.Version} Ver
-   * @param {API.ToString, Prefix>} source
-   * @param {API.MultibaseDecoder} [base]
-   * @returns {CID}
-   */
-  static parse(source, base3) {
-    const [prefix, bytes] = parseCIDtoBytes2(source, base3);
-    const cid = _CID.decode(bytes);
-    if (cid.version === 0 && source[0] !== "Q") {
-      throw Error("Version 0 CID string must not include multibase prefix");
-    }
-    baseCache2(cid).set(prefix, source);
-    return cid;
-  }
-};
-var parseCIDtoBytes2 = (source, base3) => {
-  switch (source[0]) {
-    case "Q": {
-      const decoder = base3 || base58btc2;
-      return [
-        /** @type {Prefix} */
-        base58btc2.prefix,
-        decoder.decode(`${base58btc2.prefix}${source}`)
-      ];
-    }
-    case base58btc2.prefix: {
-      const decoder = base3 || base58btc2;
-      return [
-        /** @type {Prefix} */
-        base58btc2.prefix,
-        decoder.decode(source)
-      ];
-    }
-    case base322.prefix: {
-      const decoder = base3 || base322;
-      return [
-        /** @type {Prefix} */
-        base322.prefix,
-        decoder.decode(source)
-      ];
-    }
-    default: {
-      if (base3 == null) {
-        throw Error(
-          "To parse non base32 or base58btc encoded CID multibase decoder must be provided"
-        );
-      }
-      return [
-        /** @type {Prefix} */
-        source[0],
-        base3.decode(source)
-      ];
-    }
-  }
-};
-var toStringV02 = (bytes, cache3, base3) => {
-  const { prefix } = base3;
-  if (prefix !== base58btc2.prefix) {
-    throw Error(`Cannot string encode V0 in ${base3.name} encoding`);
-  }
-  const cid = cache3.get(prefix);
-  if (cid == null) {
-    const cid2 = base3.encode(bytes).slice(1);
-    cache3.set(prefix, cid2);
-    return cid2;
-  } else {
-    return cid;
-  }
-};
-var toStringV12 = (bytes, cache3, base3) => {
-  const { prefix } = base3;
-  const cid = cache3.get(prefix);
-  if (cid == null) {
-    const cid2 = base3.encode(bytes);
-    cache3.set(prefix, cid2);
-    return cid2;
-  } else {
-    return cid;
-  }
-};
-var DAG_PB_CODE2 = 112;
-var SHA_256_CODE2 = 18;
-var encodeCID2 = (version, code5, multihash) => {
-  const codeOffset = encodingLength2(version);
-  const hashOffset = codeOffset + encodingLength2(code5);
-  const bytes = new Uint8Array(hashOffset + multihash.byteLength);
-  encodeTo2(version, bytes, 0);
-  encodeTo2(code5, bytes, codeOffset);
-  bytes.set(multihash, hashOffset);
-  return bytes;
-};
-var cidSymbol2 = Symbol.for("@ipld/js-cid/CID");
-
-// node_modules/@ipld/car/src/decoder-common.js
-var import_varint3 = __toESM(require_varint(), 1);
-var V2_HEADER_LENGTH = (
-  /* characteristics */
-  16 + 8 + 8 + 8
-);
-function decodeVarint(bytes, seeker) {
-  if (!bytes.length) {
-    throw new Error("Unexpected end of data");
-  }
-  const i = import_varint3.default.decode(bytes);
-  seeker.seek(
-    /** @type {number} */
-    import_varint3.default.decode.bytes
-  );
-  return i;
-}
-function decodeV2Header(bytes) {
-  const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
-  let offset = 0;
-  const header = {
-    version: 2,
-    /** @type {[bigint, bigint]} */
-    characteristics: [
-      dv.getBigUint64(offset, true),
-      dv.getBigUint64(offset += 8, true)
-    ],
-    dataOffset: Number(dv.getBigUint64(offset += 8, true)),
-    dataSize: Number(dv.getBigUint64(offset += 8, true)),
-    indexOffset: Number(dv.getBigUint64(offset += 8, true))
-  };
-  return header;
-}
-
-// node_modules/@ipld/car/src/header-validator.js
-var Kinds = {
-  Null: (
-    /** @returns {undefined|null} */
-    (obj) => obj === null ? obj : void 0
-  ),
-  Int: (
-    /** @returns {undefined|number} */
-    (obj) => Number.isInteger(obj) ? obj : void 0
-  ),
-  Float: (
-    /** @returns {undefined|number} */
-    (obj) => typeof obj === "number" && Number.isFinite(obj) ? obj : void 0
-  ),
-  String: (
-    /** @returns {undefined|string} */
-    (obj) => typeof obj === "string" ? obj : void 0
-  ),
-  Bool: (
-    /** @returns {undefined|boolean} */
-    (obj) => typeof obj === "boolean" ? obj : void 0
-  ),
-  Bytes: (
-    /** @returns {undefined|Uint8Array} */
-    (obj) => obj instanceof Uint8Array ? obj : void 0
-  ),
-  Link: (
-    /** @returns {undefined|object} */
-    (obj) => obj !== null && typeof obj === "object" && obj.asCID === obj ? obj : void 0
-  ),
-  List: (
-    /** @returns {undefined|Array} */
-    (obj) => Array.isArray(obj) ? obj : void 0
-  ),
-  Map: (
-    /** @returns {undefined|object} */
-    (obj) => obj !== null && typeof obj === "object" && obj.asCID !== obj && !Array.isArray(obj) && !(obj instanceof Uint8Array) ? obj : void 0
-  )
-};
-var Types = {
-  "CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)": Kinds.Link,
-  "CarV1HeaderOrV2Pragma > roots (anon)": (
-    /** @returns {undefined|any} */
-    (obj) => {
-      if (Kinds.List(obj) === void 0) {
-        return void 0;
-      }
-      for (let i = 0; i < obj.length; i++) {
-        let v = obj[i];
-        v = Types["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v);
-        if (v === void 0) {
-          return void 0;
-        }
-        if (v !== obj[i]) {
-          const ret = obj.slice(0, i);
-          for (let j = i; j < obj.length; j++) {
-            let v2 = obj[j];
-            v2 = Types["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v2);
-            if (v2 === void 0) {
-              return void 0;
-            }
-            ret.push(v2);
-          }
-          return ret;
-        }
-      }
-      return obj;
-    }
-  ),
-  Int: Kinds.Int,
-  CarV1HeaderOrV2Pragma: (
-    /** @returns {undefined|any} */
-    (obj) => {
-      if (Kinds.Map(obj) === void 0) {
-        return void 0;
-      }
-      const entries = Object.entries(obj);
-      let ret = obj;
-      let requiredCount = 1;
-      for (let i = 0; i < entries.length; i++) {
-        const [key, value] = entries[i];
-        switch (key) {
-          case "roots":
-            {
-              const v = Types["CarV1HeaderOrV2Pragma > roots (anon)"](obj[key]);
-              if (v === void 0) {
-                return void 0;
-              }
-              if (v !== value || ret !== obj) {
-                if (ret === obj) {
-                  ret = {};
-                  for (let j = 0; j < i; j++) {
-                    ret[entries[j][0]] = entries[j][1];
-                  }
-                }
-                ret.roots = v;
-              }
-            }
-            break;
-          case "version":
-            {
-              requiredCount--;
-              const v = Types.Int(obj[key]);
-              if (v === void 0) {
-                return void 0;
-              }
-              if (v !== value || ret !== obj) {
-                if (ret === obj) {
-                  ret = {};
-                  for (let j = 0; j < i; j++) {
-                    ret[entries[j][0]] = entries[j][1];
-                  }
-                }
-                ret.version = v;
-              }
-            }
-            break;
-          default:
-            return void 0;
-        }
-      }
-      if (requiredCount > 0) {
-        return void 0;
-      }
-      return ret;
-    }
-  )
-};
-var Reprs = {
-  "CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)": Kinds.Link,
-  "CarV1HeaderOrV2Pragma > roots (anon)": (
-    /** @returns {undefined|any} */
-    (obj) => {
-      if (Kinds.List(obj) === void 0) {
-        return void 0;
-      }
-      for (let i = 0; i < obj.length; i++) {
-        let v = obj[i];
-        v = Reprs["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v);
-        if (v === void 0) {
-          return void 0;
-        }
-        if (v !== obj[i]) {
-          const ret = obj.slice(0, i);
-          for (let j = i; j < obj.length; j++) {
-            let v2 = obj[j];
-            v2 = Reprs["CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)"](v2);
-            if (v2 === void 0) {
-              return void 0;
-            }
-            ret.push(v2);
-          }
-          return ret;
-        }
-      }
-      return obj;
-    }
-  ),
-  Int: Kinds.Int,
-  CarV1HeaderOrV2Pragma: (
-    /** @returns {undefined|any} */
-    (obj) => {
-      if (Kinds.Map(obj) === void 0) {
-        return void 0;
-      }
-      const entries = Object.entries(obj);
-      let ret = obj;
-      let requiredCount = 1;
-      for (let i = 0; i < entries.length; i++) {
-        const [key, value] = entries[i];
-        switch (key) {
-          case "roots":
-            {
-              const v = Reprs["CarV1HeaderOrV2Pragma > roots (anon)"](value);
-              if (v === void 0) {
-                return void 0;
-              }
-              if (v !== value || ret !== obj) {
-                if (ret === obj) {
-                  ret = {};
-                  for (let j = 0; j < i; j++) {
-                    ret[entries[j][0]] = entries[j][1];
-                  }
-                }
-                ret.roots = v;
-              }
-            }
-            break;
-          case "version":
-            {
-              requiredCount--;
-              const v = Reprs.Int(value);
-              if (v === void 0) {
-                return void 0;
-              }
-              if (v !== value || ret !== obj) {
-                if (ret === obj) {
-                  ret = {};
-                  for (let j = 0; j < i; j++) {
-                    ret[entries[j][0]] = entries[j][1];
-                  }
-                }
-                ret.version = v;
-              }
-            }
-            break;
-          default:
-            return void 0;
-        }
-      }
-      if (requiredCount > 0) {
-        return void 0;
-      }
-      return ret;
-    }
-  )
-};
-var CarV1HeaderOrV2Pragma = {
-  toTyped: Types.CarV1HeaderOrV2Pragma,
-  toRepresentation: Reprs.CarV1HeaderOrV2Pragma
-};
-
-// node_modules/@ipld/car/src/buffer-reader.js
-var fsread = fs.readSync;
-
-// node_modules/cborg/lib/length.js
-var cborEncoders2 = makeCborEncoders();
-
-// node_modules/@ipld/car/src/buffer-writer.js
-var import_varint4 = __toESM(require_varint(), 1);
-var headerPreludeTokens = [
-  new Token(Type.map, 2),
-  new Token(Type.string, "version"),
-  new Token(Type.uint, 1),
-  new Token(Type.string, "roots")
-];
-var CID_TAG = new Token(Type.tag, 42);
-
-// node_modules/@ipld/car/src/decoder.js
-async function readHeader(reader, strictVersion) {
-  const length4 = decodeVarint(await reader.upTo(8), reader);
-  if (length4 === 0) {
-    throw new Error("Invalid CAR header (zero length)");
-  }
-  const header = await reader.exactly(length4, true);
-  const block = decode6(header);
-  if (CarV1HeaderOrV2Pragma.toTyped(block) === void 0) {
-    throw new Error("Invalid CAR header format");
-  }
-  if (block.version !== 1 && block.version !== 2 || strictVersion !== void 0 && block.version !== strictVersion) {
-    throw new Error(`Invalid CAR version: ${block.version}${strictVersion !== void 0 ? ` (expected ${strictVersion})` : ""}`);
-  }
-  if (block.version === 1) {
-    if (!Array.isArray(block.roots)) {
-      throw new Error("Invalid CAR header format");
-    }
-    return block;
-  }
-  if (block.roots !== void 0) {
-    throw new Error("Invalid CAR header format");
-  }
-  const v2Header = decodeV2Header(await reader.exactly(V2_HEADER_LENGTH, true));
-  reader.seek(v2Header.dataOffset - reader.pos);
-  const v1Header = await readHeader(reader, 1);
-  return Object.assign(v1Header, v2Header);
-}
-function bytesReader(bytes) {
-  let pos = 0;
-  return {
-    async upTo(length4) {
-      const out = bytes.subarray(pos, pos + Math.min(length4, bytes.length - pos));
-      return out;
-    },
-    async exactly(length4, seek = false) {
-      if (length4 > bytes.length - pos) {
-        throw new Error("Unexpected end of data");
-      }
-      const out = bytes.subarray(pos, pos + length4);
-      if (seek) {
-        pos += length4;
-      }
-      return out;
-    },
-    seek(length4) {
-      pos += length4;
-    },
-    get pos() {
-      return pos;
-    }
-  };
-}
-function chunkReader(readChunk) {
-  let pos = 0;
-  let have = 0;
-  let offset = 0;
-  let currentChunk = new Uint8Array(0);
-  const read4 = async (length4) => {
-    have = currentChunk.length - offset;
-    const bufa = [currentChunk.subarray(offset)];
-    while (have < length4) {
-      const chunk = await readChunk();
-      if (chunk == null) {
-        break;
-      }
-      if (have < 0) {
-        if (chunk.length > have) {
-          bufa.push(chunk.subarray(-have));
-        }
-      } else {
-        bufa.push(chunk);
-      }
-      have += chunk.length;
-    }
-    currentChunk = new Uint8Array(bufa.reduce((p, c) => p + c.length, 0));
-    let off = 0;
-    for (const b of bufa) {
-      currentChunk.set(b, off);
-      off += b.length;
-    }
-    offset = 0;
-  };
-  return {
-    async upTo(length4) {
-      if (currentChunk.length - offset < length4) {
-        await read4(length4);
-      }
-      return currentChunk.subarray(offset, offset + Math.min(currentChunk.length - offset, length4));
-    },
-    async exactly(length4, seek = false) {
-      if (currentChunk.length - offset < length4) {
-        await read4(length4);
-      }
-      if (currentChunk.length - offset < length4) {
-        throw new Error("Unexpected end of data");
-      }
-      const out = currentChunk.subarray(offset, offset + length4);
-      if (seek) {
-        pos += length4;
-        offset += length4;
-      }
-      return out;
-    },
-    seek(length4) {
-      pos += length4;
-      offset += length4;
-    },
-    get pos() {
-      return pos;
-    }
-  };
-}
-
-// node_modules/@ipld/car/src/reader.js
-import fs2 from "fs";
-import { promisify } from "util";
-var fsread2 = promisify(fs2.read);
-
-// node_modules/@ipld/car/src/writer.js
-import fs3 from "fs";
-import { promisify as promisify2 } from "util";
-
-// node_modules/@ipld/car/src/encoder.js
-var import_varint5 = __toESM(require_varint(), 1);
-function createHeader(roots) {
-  const headerBytes = encode4({ version: 1, roots });
-  const varintBytes = import_varint5.default.encode(headerBytes.length);
-  const header = new Uint8Array(varintBytes.length + headerBytes.length);
-  header.set(varintBytes, 0);
-  header.set(headerBytes, varintBytes.length);
-  return header;
-}
-function createEncoder(writer) {
-  return {
-    /**
-     * @param {CID[]} roots
-     * @returns {Promise}
-     */
-    async setRoots(roots) {
-      const bytes = createHeader(roots);
-      await writer.write(bytes);
-    },
-    /**
-     * @param {Block} block
-     * @returns {Promise}
-     */
-    async writeBlock(block) {
-      const { cid, bytes } = block;
-      await writer.write(new Uint8Array(import_varint5.default.encode(cid.bytes.length + bytes.length)));
-      await writer.write(cid.bytes);
-      if (bytes.length) {
-        await writer.write(bytes);
-      }
-    },
-    /**
-     * @returns {Promise}
-     */
-    async close() {
-      await writer.end();
-    }
-  };
-}
-
-// node_modules/@ipld/car/src/iterator-channel.js
-function noop() {
-}
-function create3() {
-  const chunkQueue = [];
-  let drainer = null;
-  let drainerResolver = noop;
-  let ended = false;
-  let outWait = null;
-  let outWaitResolver = noop;
-  const makeDrainer = () => {
-    if (!drainer) {
-      drainer = new Promise((resolve6) => {
-        drainerResolver = () => {
-          drainer = null;
-          drainerResolver = noop;
-          resolve6();
-        };
-      });
-    }
-    return drainer;
-  };
-  const writer = {
-    /**
-     * @param {T} chunk
-     * @returns {Promise}
-     */
-    write(chunk) {
-      chunkQueue.push(chunk);
-      const drainer2 = makeDrainer();
-      outWaitResolver();
-      return drainer2;
-    },
-    async end() {
-      ended = true;
-      const drainer2 = makeDrainer();
-      outWaitResolver();
-      await drainer2;
-    }
-  };
-  const iterator = {
-    /** @returns {Promise>} */
-    async next() {
-      const chunk = chunkQueue.shift();
-      if (chunk) {
-        if (chunkQueue.length === 0) {
-          drainerResolver();
-        }
-        return { done: false, value: chunk };
-      }
-      if (ended) {
-        drainerResolver();
-        return { done: true, value: void 0 };
-      }
-      if (!outWait) {
-        outWait = new Promise((resolve6) => {
-          outWaitResolver = () => {
-            outWait = null;
-            outWaitResolver = noop;
-            return resolve6(iterator.next());
-          };
-        });
-      }
-      return outWait;
-    }
-  };
-  return { writer, iterator };
-}
-
-// node_modules/@ipld/car/src/writer-browser.js
-var CarWriter = class _CarWriter {
-  /**
-   * @param {CID[]} roots
-   * @param {CarEncoder} encoder
-   */
-  constructor(roots, encoder) {
-    this._encoder = encoder;
-    this._mutex = encoder.setRoots(roots);
-    this._ended = false;
-  }
-  /**
-   * Write a `Block` (a `{ cid:CID, bytes:Uint8Array }` pair) to the archive.
-   *
-   * @function
-   * @memberof CarWriter
-   * @instance
-   * @async
-   * @param {Block} block - A `{ cid:CID, bytes:Uint8Array }` pair.
-   * @returns {Promise} The returned promise will only resolve once the
-   * bytes this block generates are written to the `out` iterable.
-   */
-  async put(block) {
-    if (!(block.bytes instanceof Uint8Array) || !block.cid) {
-      throw new TypeError("Can only write {cid, bytes} objects");
-    }
-    if (this._ended) {
-      throw new Error("Already closed");
-    }
-    const cid = CID2.asCID(block.cid);
-    if (!cid) {
-      throw new TypeError("Can only write {cid, bytes} objects");
-    }
-    this._mutex = this._mutex.then(() => this._encoder.writeBlock({ cid, bytes: block.bytes }));
-    return this._mutex;
-  }
-  /**
-   * Finalise the CAR archive and signal that the `out` iterable should end once
-   * any remaining bytes are written.
-   *
-   * @function
-   * @memberof CarWriter
-   * @instance
-   * @async
-   * @returns {Promise}
-   */
-  async close() {
-    if (this._ended) {
-      throw new Error("Already closed");
-    }
-    await this._mutex;
-    this._ended = true;
-    return this._encoder.close();
-  }
-  /**
-   * Create a new CAR writer "channel" which consists of a
-   * `{ writer:CarWriter, out:AsyncIterable }` pair.
-   *
-   * @async
-   * @static
-   * @memberof CarWriter
-   * @param {CID[] | CID | void} roots
-   * @returns {WriterChannel} The channel takes the form of
-   * `{ writer:CarWriter, out:AsyncIterable }`.
-   */
-  static create(roots) {
-    roots = toRoots(roots);
-    const { encoder, iterator } = encodeWriter();
-    const writer = new _CarWriter(roots, encoder);
-    const out = new CarWriterOut(iterator);
-    return { writer, out };
-  }
-  /**
-   * Create a new CAR appender "channel" which consists of a
-   * `{ writer:CarWriter, out:AsyncIterable }` pair.
-   * This appender does not consider roots and does not produce a CAR header.
-   * It is designed to append blocks to an _existing_ CAR archive. It is
-   * expected that `out` will be concatenated onto the end of an existing
-   * archive that already has a properly formatted header.
-   *
-   * @async
-   * @static
-   * @memberof CarWriter
-   * @returns {WriterChannel} The channel takes the form of
-   * `{ writer:CarWriter, out:AsyncIterable }`.
-   */
-  static createAppender() {
-    const { encoder, iterator } = encodeWriter();
-    encoder.setRoots = () => Promise.resolve();
-    const writer = new _CarWriter([], encoder);
-    const out = new CarWriterOut(iterator);
-    return { writer, out };
-  }
-  /**
-   * Update the list of roots in the header of an existing CAR as represented
-   * in a Uint8Array.
-   *
-   * This operation is an _overwrite_, the total length of the CAR will not be
-   * modified. A rejection will occur if the new header will not be the same
-   * length as the existing header, in which case the CAR will not be modified.
-   * It is the responsibility of the user to ensure that the roots being
-   * replaced encode as the same length as the new roots.
-   *
-   * The byte array passed in an argument will be modified and also returned
-   * upon successful modification.
-   *
-   * @async
-   * @static
-   * @memberof CarWriter
-   * @param {Uint8Array} bytes
-   * @param {CID[]} roots - A new list of roots to replace the existing list in
-   * the CAR header. The new header must take up the same number of bytes as the
-   * existing header, so the roots should collectively be the same byte length
-   * as the existing roots.
-   * @returns {Promise}
-   */
-  static async updateRootsInBytes(bytes, roots) {
-    const reader = bytesReader(bytes);
-    await readHeader(reader);
-    const newHeader = createHeader(roots);
-    if (Number(reader.pos) !== newHeader.length) {
-      throw new Error(`updateRoots() can only overwrite a header of the same length (old header is ${reader.pos} bytes, new header is ${newHeader.length} bytes)`);
-    }
-    bytes.set(newHeader, 0);
-    return bytes;
-  }
-};
-var CarWriterOut = class {
-  /**
-   * @param {AsyncIterator} iterator
-   */
-  constructor(iterator) {
-    this._iterator = iterator;
-  }
-  [Symbol.asyncIterator]() {
-    if (this._iterating) {
-      throw new Error("Multiple iterator not supported");
-    }
-    this._iterating = true;
-    return this._iterator;
-  }
-};
-function encodeWriter() {
-  const iw = create3();
-  const { writer, iterator } = iw;
-  const encoder = createEncoder(writer);
-  return { encoder, iterator };
-}
-function toRoots(roots) {
-  if (roots === void 0) {
-    return [];
-  }
-  if (!Array.isArray(roots)) {
-    const cid = CID2.asCID(roots);
-    if (!cid) {
-      throw new TypeError("roots must be a single CID or an array of CIDs");
-    }
-    return [cid];
-  }
-  const _roots = [];
-  for (const root of roots) {
-    const _root = CID2.asCID(root);
-    if (!_root) {
-      throw new TypeError("roots must be a single CID or an array of CIDs");
-    }
-    _roots.push(_root);
-  }
-  return _roots;
-}
-
-// node_modules/@ipld/car/src/writer.js
-var fsread3 = promisify2(fs3.read);
-var fswrite = promisify2(fs3.write);
-var CarWriter2 = class extends CarWriter {
-  /**
-   * Update the list of roots in the header of an existing CAR file. The first
-   * argument must be a file descriptor for CAR file that is open in read and
-   * write mode (not append), e.g. `fs.open` or `fs.promises.open` with `'r+'`
-   * mode.
-   *
-   * This operation is an _overwrite_, the total length of the CAR will not be
-   * modified. A rejection will occur if the new header will not be the same
-   * length as the existing header, in which case the CAR will not be modified.
-   * It is the responsibility of the user to ensure that the roots being
-   * replaced encode as the same length as the new roots.
-   *
-   * This function is **only available in Node.js** and not a browser
-   * environment.
-   *
-   * @async
-   * @static
-   * @memberof CarWriter
-   * @param {fs.promises.FileHandle | number} fd - A file descriptor from the
-   * Node.js `fs` module. Either an integer, from `fs.open()` or a `FileHandle`
-   * from `fs.promises.open()`.
-   * @param {CID[]} roots - A new list of roots to replace the existing list in
-   * the CAR header. The new header must take up the same number of bytes as the
-   * existing header, so the roots should collectively be the same byte length
-   * as the existing roots.
-   * @returns {Promise}
-   */
-  static async updateRootsInFile(fd, roots) {
-    const chunkSize = 256;
-    let bytes;
-    let offset = 0;
-    let readChunk;
-    if (typeof fd === "number") {
-      readChunk = async () => (await fsread3(fd, bytes, 0, chunkSize, offset)).bytesRead;
-    } else if (typeof fd === "object" && typeof fd.read === "function") {
-      readChunk = async () => (await fd.read(bytes, 0, chunkSize, offset)).bytesRead;
-    } else {
-      throw new TypeError("Bad fd");
-    }
-    const fdReader = chunkReader(async () => {
-      bytes = new Uint8Array(chunkSize);
-      const read4 = await readChunk();
-      offset += read4;
-      return read4 < chunkSize ? bytes.subarray(0, read4) : bytes;
-    });
-    await readHeader(fdReader);
-    const newHeader = createHeader(roots);
-    if (fdReader.pos !== newHeader.length) {
-      throw new Error(`updateRoots() can only overwrite a header of the same length (old header is ${fdReader.pos} bytes, new header is ${newHeader.length} bytes)`);
-    }
-    if (typeof fd === "number") {
-      await fswrite(fd, newHeader, 0, newHeader.length, 0);
-    } else if (typeof fd === "object" && typeof fd.read === "function") {
-      await fd.write(newHeader, 0, newHeader.length, 0);
-    }
-  }
-};
-
-// node_modules/it-drain/dist/src/index.js
-function isAsyncIterable(thing) {
-  return thing[Symbol.asyncIterator] != null;
-}
-function drain(source) {
-  if (isAsyncIterable(source)) {
-    return (async () => {
-      for await (const _ of source) {
-      }
-    })();
-  } else {
-    for (const _ of source) {
-    }
-  }
-}
-var src_default = drain;
-
-// node_modules/it-peekable/dist/src/index.js
-function peekable(iterable) {
-  const [iterator, symbol2] = iterable[Symbol.asyncIterator] != null ? [iterable[Symbol.asyncIterator](), Symbol.asyncIterator] : [iterable[Symbol.iterator](), Symbol.iterator];
-  const queue = [];
-  return {
-    peek: () => {
-      return iterator.next();
-    },
-    push: (value) => {
-      queue.push(value);
-    },
-    next: () => {
-      if (queue.length > 0) {
-        return {
-          done: false,
-          value: queue.shift()
-        };
-      }
-      return iterator.next();
-    },
-    [symbol2]() {
-      return this;
-    }
-  };
-}
-var src_default2 = peekable;
-
-// node_modules/it-map/dist/src/index.js
-function isAsyncIterable2(thing) {
-  return thing[Symbol.asyncIterator] != null;
-}
-function map(source, func) {
-  if (isAsyncIterable2(source)) {
-    return async function* () {
-      for await (const val of source) {
-        yield func(val);
-      }
-    }();
-  }
-  const peekable2 = src_default2(source);
-  const { value, done } = peekable2.next();
-  if (done === true) {
-    return function* () {
-    }();
-  }
-  const res = func(value);
-  if (typeof res.then === "function") {
-    return async function* () {
-      yield await res;
-      for await (const val of peekable2) {
-        yield func(val);
-      }
-    }();
-  }
-  const fn = func;
-  return function* () {
-    yield res;
-    for (const val of peekable2) {
-      yield fn(val);
-    }
-  }();
-}
-var src_default3 = map;
-
-// node_modules/p-defer/index.js
-function pDefer() {
-  const deferred = {};
-  deferred.promise = new Promise((resolve6, reject) => {
-    deferred.resolve = resolve6;
-    deferred.reject = reject;
-  });
-  return deferred;
-}
-
-// node_modules/eventemitter3/index.mjs
-var import_index = __toESM(require_eventemitter3(), 1);
-
-// node_modules/p-timeout/index.js
-var TimeoutError = class extends Error {
-  constructor(message2) {
-    super(message2);
-    this.name = "TimeoutError";
-  }
-};
-var AbortError = class extends Error {
-  constructor(message2) {
-    super();
-    this.name = "AbortError";
-    this.message = message2;
-  }
-};
-var getDOMException = (errorMessage) => globalThis.DOMException === void 0 ? new AbortError(errorMessage) : new DOMException(errorMessage);
-var getAbortedReason = (signal) => {
-  const reason = signal.reason === void 0 ? getDOMException("This operation was aborted.") : signal.reason;
-  return reason instanceof Error ? reason : getDOMException(reason);
-};
-function pTimeout(promise, milliseconds, fallback, options) {
-  let timer;
-  const cancelablePromise = new Promise((resolve6, reject) => {
-    if (typeof milliseconds !== "number" || Math.sign(milliseconds) !== 1) {
-      throw new TypeError(`Expected \`milliseconds\` to be a positive number, got \`${milliseconds}\``);
-    }
-    if (milliseconds === Number.POSITIVE_INFINITY) {
-      resolve6(promise);
-      return;
-    }
-    options = {
-      customTimers: { setTimeout, clearTimeout },
-      ...options
-    };
-    if (options.signal) {
-      const { signal } = options;
-      if (signal.aborted) {
-        reject(getAbortedReason(signal));
-      }
-      signal.addEventListener("abort", () => {
-        reject(getAbortedReason(signal));
-      });
-    }
-    timer = options.customTimers.setTimeout.call(void 0, () => {
-      if (typeof fallback === "function") {
-        try {
-          resolve6(fallback());
-        } catch (error) {
-          reject(error);
-        }
-        return;
-      }
-      const message2 = typeof fallback === "string" ? fallback : `Promise timed out after ${milliseconds} milliseconds`;
-      const timeoutError = fallback instanceof Error ? fallback : new TimeoutError(message2);
-      if (typeof promise.cancel === "function") {
-        promise.cancel();
-      }
-      reject(timeoutError);
-    }, milliseconds);
-    (async () => {
-      try {
-        resolve6(await promise);
-      } catch (error) {
-        reject(error);
-      } finally {
-        options.customTimers.clearTimeout.call(void 0, timer);
-      }
-    })();
-  });
-  cancelablePromise.clear = () => {
-    clearTimeout(timer);
-    timer = void 0;
-  };
-  return cancelablePromise;
-}
-
-// node_modules/p-queue/dist/lower-bound.js
-function lowerBound(array, value, comparator) {
-  let first2 = 0;
-  let count = array.length;
-  while (count > 0) {
-    const step = Math.trunc(count / 2);
-    let it = first2 + step;
-    if (comparator(array[it], value) <= 0) {
-      first2 = ++it;
-      count -= step + 1;
-    } else {
-      count = step;
-    }
-  }
-  return first2;
-}
-
-// node_modules/p-queue/dist/priority-queue.js
-var __classPrivateFieldGet = function(receiver, state, kind, f) {
-  if (kind === "a" && !f)
-    throw new TypeError("Private accessor was defined without a getter");
-  if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
-    throw new TypeError("Cannot read private member from an object whose class did not declare it");
-  return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
-};
-var _PriorityQueue_queue;
-var PriorityQueue = class {
-  constructor() {
-    _PriorityQueue_queue.set(this, []);
-  }
-  enqueue(run, options) {
-    options = {
-      priority: 0,
-      ...options
-    };
-    const element = {
-      priority: options.priority,
-      run
-    };
-    if (this.size && __classPrivateFieldGet(this, _PriorityQueue_queue, "f")[this.size - 1].priority >= options.priority) {
-      __classPrivateFieldGet(this, _PriorityQueue_queue, "f").push(element);
-      return;
-    }
-    const index = lowerBound(__classPrivateFieldGet(this, _PriorityQueue_queue, "f"), element, (a, b) => b.priority - a.priority);
-    __classPrivateFieldGet(this, _PriorityQueue_queue, "f").splice(index, 0, element);
-  }
-  dequeue() {
-    const item = __classPrivateFieldGet(this, _PriorityQueue_queue, "f").shift();
-    return item === null || item === void 0 ? void 0 : item.run;
-  }
-  filter(options) {
-    return __classPrivateFieldGet(this, _PriorityQueue_queue, "f").filter((element) => element.priority === options.priority).map((element) => element.run);
-  }
-  get size() {
-    return __classPrivateFieldGet(this, _PriorityQueue_queue, "f").length;
-  }
-};
-_PriorityQueue_queue = /* @__PURE__ */ new WeakMap();
-var priority_queue_default = PriorityQueue;
-
-// node_modules/p-queue/dist/index.js
-var __classPrivateFieldSet = function(receiver, state, value, kind, f) {
-  if (kind === "m")
-    throw new TypeError("Private method is not writable");
-  if (kind === "a" && !f)
-    throw new TypeError("Private accessor was defined without a setter");
-  if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
-    throw new TypeError("Cannot write private member to an object whose class did not declare it");
-  return kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value), value;
-};
-var __classPrivateFieldGet2 = function(receiver, state, kind, f) {
-  if (kind === "a" && !f)
-    throw new TypeError("Private accessor was defined without a getter");
-  if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
-    throw new TypeError("Cannot read private member from an object whose class did not declare it");
-  return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
-};
-var _PQueue_instances;
-var _PQueue_carryoverConcurrencyCount;
-var _PQueue_isIntervalIgnored;
-var _PQueue_intervalCount;
-var _PQueue_intervalCap;
-var _PQueue_interval;
-var _PQueue_intervalEnd;
-var _PQueue_intervalId;
-var _PQueue_timeoutId;
-var _PQueue_queue;
-var _PQueue_queueClass;
-var _PQueue_pending;
-var _PQueue_concurrency;
-var _PQueue_isPaused;
-var _PQueue_throwOnTimeout;
-var _PQueue_doesIntervalAllowAnother_get;
-var _PQueue_doesConcurrentAllowAnother_get;
-var _PQueue_next;
-var _PQueue_onResumeInterval;
-var _PQueue_isIntervalPaused_get;
-var _PQueue_tryToStartAnother;
-var _PQueue_initializeIntervalIfNeeded;
-var _PQueue_onInterval;
-var _PQueue_processQueue;
-var _PQueue_throwOnAbort;
-var _PQueue_onEvent;
-var AbortError2 = class extends Error {
-};
-var PQueue = class extends import_index.default {
-  // TODO: The `throwOnTimeout` option should affect the return types of `add()` and `addAll()`
-  constructor(options) {
-    var _a, _b, _c, _d;
-    super();
-    _PQueue_instances.add(this);
-    _PQueue_carryoverConcurrencyCount.set(this, void 0);
-    _PQueue_isIntervalIgnored.set(this, void 0);
-    _PQueue_intervalCount.set(this, 0);
-    _PQueue_intervalCap.set(this, void 0);
-    _PQueue_interval.set(this, void 0);
-    _PQueue_intervalEnd.set(this, 0);
-    _PQueue_intervalId.set(this, void 0);
-    _PQueue_timeoutId.set(this, void 0);
-    _PQueue_queue.set(this, void 0);
-    _PQueue_queueClass.set(this, void 0);
-    _PQueue_pending.set(this, 0);
-    _PQueue_concurrency.set(this, void 0);
-    _PQueue_isPaused.set(this, void 0);
-    _PQueue_throwOnTimeout.set(this, void 0);
-    Object.defineProperty(this, "timeout", {
-      enumerable: true,
-      configurable: true,
-      writable: true,
-      value: void 0
-    });
-    options = {
-      carryoverConcurrencyCount: false,
-      intervalCap: Number.POSITIVE_INFINITY,
-      interval: 0,
-      concurrency: Number.POSITIVE_INFINITY,
-      autoStart: true,
-      queueClass: priority_queue_default,
-      ...options
-    };
-    if (!(typeof options.intervalCap === "number" && options.intervalCap >= 1)) {
-      throw new TypeError(`Expected \`intervalCap\` to be a number from 1 and up, got \`${(_b = (_a = options.intervalCap) === null || _a === void 0 ? void 0 : _a.toString()) !== null && _b !== void 0 ? _b : ""}\` (${typeof options.intervalCap})`);
-    }
-    if (options.interval === void 0 || !(Number.isFinite(options.interval) && options.interval >= 0)) {
-      throw new TypeError(`Expected \`interval\` to be a finite number >= 0, got \`${(_d = (_c = options.interval) === null || _c === void 0 ? void 0 : _c.toString()) !== null && _d !== void 0 ? _d : ""}\` (${typeof options.interval})`);
-    }
-    __classPrivateFieldSet(this, _PQueue_carryoverConcurrencyCount, options.carryoverConcurrencyCount, "f");
-    __classPrivateFieldSet(this, _PQueue_isIntervalIgnored, options.intervalCap === Number.POSITIVE_INFINITY || options.interval === 0, "f");
-    __classPrivateFieldSet(this, _PQueue_intervalCap, options.intervalCap, "f");
-    __classPrivateFieldSet(this, _PQueue_interval, options.interval, "f");
-    __classPrivateFieldSet(this, _PQueue_queue, new options.queueClass(), "f");
-    __classPrivateFieldSet(this, _PQueue_queueClass, options.queueClass, "f");
-    this.concurrency = options.concurrency;
-    this.timeout = options.timeout;
-    __classPrivateFieldSet(this, _PQueue_throwOnTimeout, options.throwOnTimeout === true, "f");
-    __classPrivateFieldSet(this, _PQueue_isPaused, options.autoStart === false, "f");
-  }
-  get concurrency() {
-    return __classPrivateFieldGet2(this, _PQueue_concurrency, "f");
-  }
-  set concurrency(newConcurrency) {
-    if (!(typeof newConcurrency === "number" && newConcurrency >= 1)) {
-      throw new TypeError(`Expected \`concurrency\` to be a number from 1 and up, got \`${newConcurrency}\` (${typeof newConcurrency})`);
-    }
-    __classPrivateFieldSet(this, _PQueue_concurrency, newConcurrency, "f");
-    __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_processQueue).call(this);
-  }
-  async add(function_, options = {}) {
-    options = {
-      timeout: this.timeout,
-      throwOnTimeout: __classPrivateFieldGet2(this, _PQueue_throwOnTimeout, "f"),
-      ...options
-    };
-    return new Promise((resolve6, reject) => {
-      __classPrivateFieldGet2(this, _PQueue_queue, "f").enqueue(async () => {
-        var _a;
-        var _b, _c;
-        __classPrivateFieldSet(this, _PQueue_pending, (_b = __classPrivateFieldGet2(this, _PQueue_pending, "f"), _b++, _b), "f");
-        __classPrivateFieldSet(this, _PQueue_intervalCount, (_c = __classPrivateFieldGet2(this, _PQueue_intervalCount, "f"), _c++, _c), "f");
-        try {
-          if ((_a = options.signal) === null || _a === void 0 ? void 0 : _a.aborted) {
-            throw new AbortError2("The task was aborted.");
-          }
-          let operation = function_({ signal: options.signal });
-          if (options.timeout) {
-            operation = pTimeout(Promise.resolve(operation), options.timeout);
-          }
-          if (options.signal) {
-            operation = Promise.race([operation, __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_throwOnAbort).call(this, options.signal)]);
-          }
-          const result = await operation;
-          resolve6(result);
-          this.emit("completed", result);
-        } catch (error) {
-          if (error instanceof TimeoutError && !options.throwOnTimeout) {
-            resolve6();
-            return;
-          }
-          reject(error);
-          this.emit("error", error);
-        } finally {
-          __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_next).call(this);
-        }
-      }, options);
-      this.emit("add");
-      __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_tryToStartAnother).call(this);
-    });
-  }
-  async addAll(functions, options) {
-    return Promise.all(functions.map(async (function_) => this.add(function_, options)));
-  }
-  /**
-  Start (or resume) executing enqueued tasks within concurrency limit. No need to call this if queue is not paused (via `options.autoStart = false` or by `.pause()` method.)
-  */
-  start() {
-    if (!__classPrivateFieldGet2(this, _PQueue_isPaused, "f")) {
-      return this;
-    }
-    __classPrivateFieldSet(this, _PQueue_isPaused, false, "f");
-    __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_processQueue).call(this);
-    return this;
-  }
-  /**
-  Put queue execution on hold.
-  */
-  pause() {
-    __classPrivateFieldSet(this, _PQueue_isPaused, true, "f");
-  }
-  /**
-  Clear the queue.
-  */
-  clear() {
-    __classPrivateFieldSet(this, _PQueue_queue, new (__classPrivateFieldGet2(this, _PQueue_queueClass, "f"))(), "f");
-  }
-  /**
-      Can be called multiple times. Useful if you for example add additional items at a later time.
-  
-      @returns A promise that settles when the queue becomes empty.
-      */
-  async onEmpty() {
-    if (__classPrivateFieldGet2(this, _PQueue_queue, "f").size === 0) {
-      return;
-    }
-    await __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onEvent).call(this, "empty");
-  }
-  /**
-      @returns A promise that settles when the queue size is less than the given limit: `queue.size < limit`.
-  
-      If you want to avoid having the queue grow beyond a certain size you can `await queue.onSizeLessThan()` before adding a new item.
-  
-      Note that this only limits the number of items waiting to start. There could still be up to `concurrency` jobs already running that this call does not include in its calculation.
-      */
-  async onSizeLessThan(limit) {
-    if (__classPrivateFieldGet2(this, _PQueue_queue, "f").size < limit) {
-      return;
-    }
-    await __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onEvent).call(this, "next", () => __classPrivateFieldGet2(this, _PQueue_queue, "f").size < limit);
-  }
-  /**
-      The difference with `.onEmpty` is that `.onIdle` guarantees that all work from the queue has finished. `.onEmpty` merely signals that the queue is empty, but it could mean that some promises haven't completed yet.
-  
-      @returns A promise that settles when the queue becomes empty, and all promises have completed; `queue.size === 0 && queue.pending === 0`.
-      */
-  async onIdle() {
-    if (__classPrivateFieldGet2(this, _PQueue_pending, "f") === 0 && __classPrivateFieldGet2(this, _PQueue_queue, "f").size === 0) {
-      return;
-    }
-    await __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onEvent).call(this, "idle");
-  }
-  /**
-  Size of the queue, the number of queued items waiting to run.
-  */
-  get size() {
-    return __classPrivateFieldGet2(this, _PQueue_queue, "f").size;
-  }
-  /**
-      Size of the queue, filtered by the given options.
-  
-      For example, this can be used to find the number of items remaining in the queue with a specific priority level.
-      */
-  sizeBy(options) {
-    return __classPrivateFieldGet2(this, _PQueue_queue, "f").filter(options).length;
-  }
-  /**
-  Number of running items (no longer in the queue).
-  */
-  get pending() {
-    return __classPrivateFieldGet2(this, _PQueue_pending, "f");
-  }
-  /**
-  Whether the queue is currently paused.
-  */
-  get isPaused() {
-    return __classPrivateFieldGet2(this, _PQueue_isPaused, "f");
-  }
-};
-_PQueue_carryoverConcurrencyCount = /* @__PURE__ */ new WeakMap(), _PQueue_isIntervalIgnored = /* @__PURE__ */ new WeakMap(), _PQueue_intervalCount = /* @__PURE__ */ new WeakMap(), _PQueue_intervalCap = /* @__PURE__ */ new WeakMap(), _PQueue_interval = /* @__PURE__ */ new WeakMap(), _PQueue_intervalEnd = /* @__PURE__ */ new WeakMap(), _PQueue_intervalId = /* @__PURE__ */ new WeakMap(), _PQueue_timeoutId = /* @__PURE__ */ new WeakMap(), _PQueue_queue = /* @__PURE__ */ new WeakMap(), _PQueue_queueClass = /* @__PURE__ */ new WeakMap(), _PQueue_pending = /* @__PURE__ */ new WeakMap(), _PQueue_concurrency = /* @__PURE__ */ new WeakMap(), _PQueue_isPaused = /* @__PURE__ */ new WeakMap(), _PQueue_throwOnTimeout = /* @__PURE__ */ new WeakMap(), _PQueue_instances = /* @__PURE__ */ new WeakSet(), _PQueue_doesIntervalAllowAnother_get = function _PQueue_doesIntervalAllowAnother_get2() {
-  return __classPrivateFieldGet2(this, _PQueue_isIntervalIgnored, "f") || __classPrivateFieldGet2(this, _PQueue_intervalCount, "f") < __classPrivateFieldGet2(this, _PQueue_intervalCap, "f");
-}, _PQueue_doesConcurrentAllowAnother_get = function _PQueue_doesConcurrentAllowAnother_get2() {
-  return __classPrivateFieldGet2(this, _PQueue_pending, "f") < __classPrivateFieldGet2(this, _PQueue_concurrency, "f");
-}, _PQueue_next = function _PQueue_next2() {
-  var _a;
-  __classPrivateFieldSet(this, _PQueue_pending, (_a = __classPrivateFieldGet2(this, _PQueue_pending, "f"), _a--, _a), "f");
-  __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_tryToStartAnother).call(this);
-  this.emit("next");
-}, _PQueue_onResumeInterval = function _PQueue_onResumeInterval2() {
-  __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onInterval).call(this);
-  __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_initializeIntervalIfNeeded).call(this);
-  __classPrivateFieldSet(this, _PQueue_timeoutId, void 0, "f");
-}, _PQueue_isIntervalPaused_get = function _PQueue_isIntervalPaused_get2() {
-  const now = Date.now();
-  if (__classPrivateFieldGet2(this, _PQueue_intervalId, "f") === void 0) {
-    const delay = __classPrivateFieldGet2(this, _PQueue_intervalEnd, "f") - now;
-    if (delay < 0) {
-      __classPrivateFieldSet(this, _PQueue_intervalCount, __classPrivateFieldGet2(this, _PQueue_carryoverConcurrencyCount, "f") ? __classPrivateFieldGet2(this, _PQueue_pending, "f") : 0, "f");
-    } else {
-      if (__classPrivateFieldGet2(this, _PQueue_timeoutId, "f") === void 0) {
-        __classPrivateFieldSet(this, _PQueue_timeoutId, setTimeout(() => {
-          __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onResumeInterval).call(this);
-        }, delay), "f");
-      }
-      return true;
-    }
-  }
-  return false;
-}, _PQueue_tryToStartAnother = function _PQueue_tryToStartAnother2() {
-  if (__classPrivateFieldGet2(this, _PQueue_queue, "f").size === 0) {
-    if (__classPrivateFieldGet2(this, _PQueue_intervalId, "f")) {
-      clearInterval(__classPrivateFieldGet2(this, _PQueue_intervalId, "f"));
-    }
-    __classPrivateFieldSet(this, _PQueue_intervalId, void 0, "f");
-    this.emit("empty");
-    if (__classPrivateFieldGet2(this, _PQueue_pending, "f") === 0) {
-      this.emit("idle");
-    }
-    return false;
-  }
-  if (!__classPrivateFieldGet2(this, _PQueue_isPaused, "f")) {
-    const canInitializeInterval = !__classPrivateFieldGet2(this, _PQueue_instances, "a", _PQueue_isIntervalPaused_get);
-    if (__classPrivateFieldGet2(this, _PQueue_instances, "a", _PQueue_doesIntervalAllowAnother_get) && __classPrivateFieldGet2(this, _PQueue_instances, "a", _PQueue_doesConcurrentAllowAnother_get)) {
-      const job = __classPrivateFieldGet2(this, _PQueue_queue, "f").dequeue();
-      if (!job) {
-        return false;
-      }
-      this.emit("active");
-      job();
-      if (canInitializeInterval) {
-        __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_initializeIntervalIfNeeded).call(this);
-      }
-      return true;
-    }
-  }
-  return false;
-}, _PQueue_initializeIntervalIfNeeded = function _PQueue_initializeIntervalIfNeeded2() {
-  if (__classPrivateFieldGet2(this, _PQueue_isIntervalIgnored, "f") || __classPrivateFieldGet2(this, _PQueue_intervalId, "f") !== void 0) {
-    return;
-  }
-  __classPrivateFieldSet(this, _PQueue_intervalId, setInterval(() => {
-    __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_onInterval).call(this);
-  }, __classPrivateFieldGet2(this, _PQueue_interval, "f")), "f");
-  __classPrivateFieldSet(this, _PQueue_intervalEnd, Date.now() + __classPrivateFieldGet2(this, _PQueue_interval, "f"), "f");
-}, _PQueue_onInterval = function _PQueue_onInterval2() {
-  if (__classPrivateFieldGet2(this, _PQueue_intervalCount, "f") === 0 && __classPrivateFieldGet2(this, _PQueue_pending, "f") === 0 && __classPrivateFieldGet2(this, _PQueue_intervalId, "f")) {
-    clearInterval(__classPrivateFieldGet2(this, _PQueue_intervalId, "f"));
-    __classPrivateFieldSet(this, _PQueue_intervalId, void 0, "f");
-  }
-  __classPrivateFieldSet(this, _PQueue_intervalCount, __classPrivateFieldGet2(this, _PQueue_carryoverConcurrencyCount, "f") ? __classPrivateFieldGet2(this, _PQueue_pending, "f") : 0, "f");
-  __classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_processQueue).call(this);
-}, _PQueue_processQueue = function _PQueue_processQueue2() {
-  while (__classPrivateFieldGet2(this, _PQueue_instances, "m", _PQueue_tryToStartAnother).call(this)) {
-  }
-}, _PQueue_throwOnAbort = async function _PQueue_throwOnAbort2(signal) {
-  return new Promise((_resolve, reject) => {
-    signal.addEventListener("abort", () => {
-      reject(new AbortError2("The task was aborted."));
-    }, { once: true });
-  });
-}, _PQueue_onEvent = async function _PQueue_onEvent2(event, filter3) {
-  return new Promise((resolve6) => {
-    const listener = () => {
-      if (filter3 && !filter3()) {
-        return;
-      }
-      this.off(event, listener);
-      resolve6();
-    };
-    this.on(event, listener);
-  });
-};
-var dist_default = PQueue;
-
-// node_modules/@ipld/dag-pb/src/index.js
-var src_exports2 = {};
-__export(src_exports2, {
-  code: () => code2,
-  createLink: () => createLink,
-  createNode: () => createNode,
-  decode: () => decode11,
-  encode: () => encode7,
-  name: () => name,
-  prepare: () => prepare,
-  validate: () => validate
-});
-
-// node_modules/@ipld/dag-pb/src/pb-decode.js
-var textDecoder2 = new TextDecoder();
-function decodeVarint2(bytes, offset) {
-  let v = 0;
-  for (let shift = 0; ; shift += 7) {
-    if (shift >= 64) {
-      throw new Error("protobuf: varint overflow");
-    }
-    if (offset >= bytes.length) {
-      throw new Error("protobuf: unexpected end of data");
-    }
-    const b = bytes[offset++];
-    v += shift < 28 ? (b & 127) << shift : (b & 127) * 2 ** shift;
-    if (b < 128) {
-      break;
-    }
-  }
-  return [v, offset];
-}
-function decodeBytes(bytes, offset) {
-  let byteLen;
-  [byteLen, offset] = decodeVarint2(bytes, offset);
-  const postOffset = offset + byteLen;
-  if (byteLen < 0 || postOffset < 0) {
-    throw new Error("protobuf: invalid length");
-  }
-  if (postOffset > bytes.length) {
-    throw new Error("protobuf: unexpected end of data");
-  }
-  return [bytes.subarray(offset, postOffset), postOffset];
-}
-function decodeKey(bytes, index) {
-  let wire;
-  [wire, index] = decodeVarint2(bytes, index);
-  return [wire & 7, wire >> 3, index];
-}
-function decodeLink(bytes) {
-  const link = {};
-  const l = bytes.length;
-  let index = 0;
-  while (index < l) {
-    let wireType, fieldNum;
-    [wireType, fieldNum, index] = decodeKey(bytes, index);
-    if (fieldNum === 1) {
-      if (link.Hash) {
-        throw new Error("protobuf: (PBLink) duplicate Hash section");
-      }
-      if (wireType !== 2) {
-        throw new Error(`protobuf: (PBLink) wrong wireType (${wireType}) for Hash`);
-      }
-      if (link.Name !== void 0) {
-        throw new Error("protobuf: (PBLink) invalid order, found Name before Hash");
-      }
-      if (link.Tsize !== void 0) {
-        throw new Error("protobuf: (PBLink) invalid order, found Tsize before Hash");
-      }
-      [link.Hash, index] = decodeBytes(bytes, index);
-    } else if (fieldNum === 2) {
-      if (link.Name !== void 0) {
-        throw new Error("protobuf: (PBLink) duplicate Name section");
-      }
-      if (wireType !== 2) {
-        throw new Error(`protobuf: (PBLink) wrong wireType (${wireType}) for Name`);
-      }
-      if (link.Tsize !== void 0) {
-        throw new Error("protobuf: (PBLink) invalid order, found Tsize before Name");
-      }
-      let byts;
-      [byts, index] = decodeBytes(bytes, index);
-      link.Name = textDecoder2.decode(byts);
-    } else if (fieldNum === 3) {
-      if (link.Tsize !== void 0) {
-        throw new Error("protobuf: (PBLink) duplicate Tsize section");
-      }
-      if (wireType !== 0) {
-        throw new Error(`protobuf: (PBLink) wrong wireType (${wireType}) for Tsize`);
-      }
-      [link.Tsize, index] = decodeVarint2(bytes, index);
-    } else {
-      throw new Error(`protobuf: (PBLink) invalid fieldNumber, expected 1, 2 or 3, got ${fieldNum}`);
-    }
-  }
-  if (index > l) {
-    throw new Error("protobuf: (PBLink) unexpected end of data");
-  }
-  return link;
-}
-function decodeNode(bytes) {
-  const l = bytes.length;
-  let index = 0;
-  let links = void 0;
-  let linksBeforeData = false;
-  let data = void 0;
-  while (index < l) {
-    let wireType, fieldNum;
-    [wireType, fieldNum, index] = decodeKey(bytes, index);
-    if (wireType !== 2) {
-      throw new Error(`protobuf: (PBNode) invalid wireType, expected 2, got ${wireType}`);
-    }
-    if (fieldNum === 1) {
-      if (data) {
-        throw new Error("protobuf: (PBNode) duplicate Data section");
-      }
-      [data, index] = decodeBytes(bytes, index);
-      if (links) {
-        linksBeforeData = true;
-      }
-    } else if (fieldNum === 2) {
-      if (linksBeforeData) {
-        throw new Error("protobuf: (PBNode) duplicate Links section");
-      } else if (!links) {
-        links = [];
-      }
-      let byts;
-      [byts, index] = decodeBytes(bytes, index);
-      links.push(decodeLink(byts));
-    } else {
-      throw new Error(`protobuf: (PBNode) invalid fieldNumber, expected 1 or 2, got ${fieldNum}`);
-    }
-  }
-  if (index > l) {
-    throw new Error("protobuf: (PBNode) unexpected end of data");
-  }
-  const node = {};
-  if (data) {
-    node.Data = data;
-  }
-  node.Links = links || [];
-  return node;
-}
-
-// node_modules/@ipld/dag-pb/src/pb-encode.js
-var textEncoder2 = new TextEncoder();
-var maxInt32 = 2 ** 32;
-var maxUInt32 = 2 ** 31;
-function encodeLink(link, bytes) {
-  let i = bytes.length;
-  if (typeof link.Tsize === "number") {
-    if (link.Tsize < 0) {
-      throw new Error("Tsize cannot be negative");
-    }
-    if (!Number.isSafeInteger(link.Tsize)) {
-      throw new Error("Tsize too large for encoding");
-    }
-    i = encodeVarint(bytes, i, link.Tsize) - 1;
-    bytes[i] = 24;
-  }
-  if (typeof link.Name === "string") {
-    const nameBytes = textEncoder2.encode(link.Name);
-    i -= nameBytes.length;
-    bytes.set(nameBytes, i);
-    i = encodeVarint(bytes, i, nameBytes.length) - 1;
-    bytes[i] = 18;
-  }
-  if (link.Hash) {
-    i -= link.Hash.length;
-    bytes.set(link.Hash, i);
-    i = encodeVarint(bytes, i, link.Hash.length) - 1;
-    bytes[i] = 10;
-  }
-  return bytes.length - i;
-}
-function encodeNode(node) {
-  const size = sizeNode(node);
-  const bytes = new Uint8Array(size);
-  let i = size;
-  if (node.Data) {
-    i -= node.Data.length;
-    bytes.set(node.Data, i);
-    i = encodeVarint(bytes, i, node.Data.length) - 1;
-    bytes[i] = 10;
-  }
-  if (node.Links) {
-    for (let index = node.Links.length - 1; index >= 0; index--) {
-      const size2 = encodeLink(node.Links[index], bytes.subarray(0, i));
-      i -= size2;
-      i = encodeVarint(bytes, i, size2) - 1;
-      bytes[i] = 18;
-    }
-  }
-  return bytes;
-}
-function sizeLink(link) {
-  let n = 0;
-  if (link.Hash) {
-    const l = link.Hash.length;
-    n += 1 + l + sov(l);
-  }
-  if (typeof link.Name === "string") {
-    const l = textEncoder2.encode(link.Name).length;
-    n += 1 + l + sov(l);
-  }
-  if (typeof link.Tsize === "number") {
-    n += 1 + sov(link.Tsize);
-  }
-  return n;
-}
-function sizeNode(node) {
-  let n = 0;
-  if (node.Data) {
-    const l = node.Data.length;
-    n += 1 + l + sov(l);
-  }
-  if (node.Links) {
-    for (const link of node.Links) {
-      const l = sizeLink(link);
-      n += 1 + l + sov(l);
-    }
-  }
-  return n;
-}
-function encodeVarint(bytes, offset, v) {
-  offset -= sov(v);
-  const base3 = offset;
-  while (v >= maxUInt32) {
-    bytes[offset++] = v & 127 | 128;
-    v /= 128;
-  }
-  while (v >= 128) {
-    bytes[offset++] = v & 127 | 128;
-    v >>>= 7;
-  }
-  bytes[offset] = v;
-  return base3;
-}
-function sov(x) {
-  if (x % 2 === 0) {
-    x++;
-  }
-  return Math.floor((len64(x) + 6) / 7);
-}
-function len64(x) {
-  let n = 0;
-  if (x >= maxInt32) {
-    x = Math.floor(x / maxInt32);
-    n = 32;
-  }
-  if (x >= 1 << 16) {
-    x >>>= 16;
-    n += 16;
-  }
-  if (x >= 1 << 8) {
-    x >>>= 8;
-    n += 8;
-  }
-  return n + len8tab[x];
-}
-var len8tab = [
-  0,
-  1,
-  2,
-  2,
-  3,
-  3,
-  3,
-  3,
-  4,
-  4,
-  4,
-  4,
-  4,
-  4,
-  4,
-  4,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  5,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  6,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  7,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8,
-  8
-];
-
-// node_modules/@ipld/dag-pb/src/util.js
-var pbNodeProperties = ["Data", "Links"];
-var pbLinkProperties = ["Hash", "Name", "Tsize"];
-var textEncoder3 = new TextEncoder();
-function linkComparator(a, b) {
-  if (a === b) {
-    return 0;
-  }
-  const abuf = a.Name ? textEncoder3.encode(a.Name) : [];
-  const bbuf = b.Name ? textEncoder3.encode(b.Name) : [];
-  let x = abuf.length;
-  let y = bbuf.length;
-  for (let i = 0, len = Math.min(x, y); i < len; ++i) {
-    if (abuf[i] !== bbuf[i]) {
-      x = abuf[i];
-      y = bbuf[i];
-      break;
-    }
-  }
-  return x < y ? -1 : y < x ? 1 : 0;
-}
-function hasOnlyProperties(node, properties) {
-  return !Object.keys(node).some((p) => !properties.includes(p));
-}
-function asLink(link) {
-  if (typeof link.asCID === "object") {
-    const Hash = CID2.asCID(link);
-    if (!Hash) {
-      throw new TypeError("Invalid DAG-PB form");
-    }
-    return { Hash };
-  }
-  if (typeof link !== "object" || Array.isArray(link)) {
-    throw new TypeError("Invalid DAG-PB form");
-  }
-  const pbl = {};
-  if (link.Hash) {
-    let cid = CID2.asCID(link.Hash);
-    try {
-      if (!cid) {
-        if (typeof link.Hash === "string") {
-          cid = CID2.parse(link.Hash);
-        } else if (link.Hash instanceof Uint8Array) {
-          cid = CID2.decode(link.Hash);
-        }
-      }
-    } catch (e) {
-      throw new TypeError(`Invalid DAG-PB form: ${e.message}`);
-    }
-    if (cid) {
-      pbl.Hash = cid;
-    }
-  }
-  if (!pbl.Hash) {
-    throw new TypeError("Invalid DAG-PB form");
-  }
-  if (typeof link.Name === "string") {
-    pbl.Name = link.Name;
-  }
-  if (typeof link.Tsize === "number") {
-    pbl.Tsize = link.Tsize;
-  }
-  return pbl;
-}
-function prepare(node) {
-  if (node instanceof Uint8Array || typeof node === "string") {
-    node = { Data: node };
-  }
-  if (typeof node !== "object" || Array.isArray(node)) {
-    throw new TypeError("Invalid DAG-PB form");
-  }
-  const pbn = {};
-  if (node.Data !== void 0) {
-    if (typeof node.Data === "string") {
-      pbn.Data = textEncoder3.encode(node.Data);
-    } else if (node.Data instanceof Uint8Array) {
-      pbn.Data = node.Data;
-    } else {
-      throw new TypeError("Invalid DAG-PB form");
-    }
-  }
-  if (node.Links !== void 0) {
-    if (Array.isArray(node.Links)) {
-      pbn.Links = node.Links.map(asLink);
-      pbn.Links.sort(linkComparator);
-    } else {
-      throw new TypeError("Invalid DAG-PB form");
-    }
-  } else {
-    pbn.Links = [];
-  }
-  return pbn;
-}
-function validate(node) {
-  if (!node || typeof node !== "object" || Array.isArray(node) || node instanceof Uint8Array || node["/"] && node["/"] === node.bytes) {
-    throw new TypeError("Invalid DAG-PB form");
-  }
-  if (!hasOnlyProperties(node, pbNodeProperties)) {
-    throw new TypeError("Invalid DAG-PB form (extraneous properties)");
-  }
-  if (node.Data !== void 0 && !(node.Data instanceof Uint8Array)) {
-    throw new TypeError("Invalid DAG-PB form (Data must be bytes)");
-  }
-  if (!Array.isArray(node.Links)) {
-    throw new TypeError("Invalid DAG-PB form (Links must be a list)");
-  }
-  for (let i = 0; i < node.Links.length; i++) {
-    const link = node.Links[i];
-    if (!link || typeof link !== "object" || Array.isArray(link) || link instanceof Uint8Array || link["/"] && link["/"] === link.bytes) {
-      throw new TypeError("Invalid DAG-PB form (bad link)");
-    }
-    if (!hasOnlyProperties(link, pbLinkProperties)) {
-      throw new TypeError("Invalid DAG-PB form (extraneous properties on link)");
-    }
-    if (link.Hash === void 0) {
-      throw new TypeError("Invalid DAG-PB form (link must have a Hash)");
-    }
-    if (link.Hash == null || !link.Hash["/"] || link.Hash["/"] !== link.Hash.bytes) {
-      throw new TypeError("Invalid DAG-PB form (link Hash must be a CID)");
-    }
-    if (link.Name !== void 0 && typeof link.Name !== "string") {
-      throw new TypeError("Invalid DAG-PB form (link Name must be a string)");
-    }
-    if (link.Tsize !== void 0) {
-      if (typeof link.Tsize !== "number" || link.Tsize % 1 !== 0) {
-        throw new TypeError("Invalid DAG-PB form (link Tsize must be an integer)");
-      }
-      if (link.Tsize < 0) {
-        throw new TypeError("Invalid DAG-PB form (link Tsize cannot be negative)");
-      }
-    }
-    if (i > 0 && linkComparator(link, node.Links[i - 1]) === -1) {
-      throw new TypeError("Invalid DAG-PB form (links must be sorted by Name bytes)");
-    }
-  }
-}
-function createNode(data, links = []) {
-  return prepare({ Data: data, Links: links });
-}
-function createLink(name4, size, cid) {
-  return asLink({ Hash: cid, Name: name4, Tsize: size });
-}
-
-// node_modules/@ipld/dag-pb/src/index.js
-var name = "dag-pb";
-var code2 = 112;
-function encode7(node) {
-  validate(node);
-  const pbn = {};
-  if (node.Links) {
-    pbn.Links = node.Links.map((l) => {
-      const link = {};
-      if (l.Hash) {
-        link.Hash = l.Hash.bytes;
-      }
-      if (l.Name !== void 0) {
-        link.Name = l.Name;
-      }
-      if (l.Tsize !== void 0) {
-        link.Tsize = l.Tsize;
-      }
-      return link;
-    });
-  }
-  if (node.Data) {
-    pbn.Data = node.Data;
-  }
-  return encodeNode(pbn);
-}
-function decode11(bytes) {
-  const pbn = decodeNode(bytes);
-  const node = {};
-  if (pbn.Data) {
-    node.Data = pbn.Data;
-  }
-  if (pbn.Links) {
-    node.Links = pbn.Links.map((l) => {
-      const link = {};
-      try {
-        link.Hash = CID2.decode(l.Hash);
-      } catch (e) {
-      }
-      if (!link.Hash) {
-        throw new Error("Invalid Hash field found in link, expected CID");
-      }
-      if (l.Name !== void 0) {
-        link.Name = l.Name;
-      }
-      if (l.Tsize !== void 0) {
-        link.Tsize = l.Tsize;
-      }
-      return link;
-    });
-  }
-  return node;
-}
-
-// node_modules/cborg/lib/json/encode.js
-var JSONEncoder = class extends Array {
-  constructor() {
-    super();
-    this.inRecursive = [];
-  }
-  /**
-   * @param {Bl} buf
-   */
-  prefix(buf2) {
-    const recurs = this.inRecursive[this.inRecursive.length - 1];
-    if (recurs) {
-      if (recurs.type === Type.array) {
-        recurs.elements++;
-        if (recurs.elements !== 1) {
-          buf2.push([44]);
-        }
-      }
-      if (recurs.type === Type.map) {
-        recurs.elements++;
-        if (recurs.elements !== 1) {
-          if (recurs.elements % 2 === 1) {
-            buf2.push([44]);
-          } else {
-            buf2.push([58]);
-          }
-        }
-      }
-    }
-  }
-  /**
-   * @param {Bl} buf
-   * @param {Token} token
-   */
-  [Type.uint.major](buf2, token) {
-    this.prefix(buf2);
-    const is2 = String(token.value);
-    const isa = [];
-    for (let i = 0; i < is2.length; i++) {
-      isa[i] = is2.charCodeAt(i);
-    }
-    buf2.push(isa);
-  }
-  /**
-   * @param {Bl} buf
-   * @param {Token} token
-   */
-  [Type.negint.major](buf2, token) {
-    this[Type.uint.major](buf2, token);
-  }
-  /**
-   * @param {Bl} _buf
-   * @param {Token} _token
-   */
-  [Type.bytes.major](_buf, _token) {
-    throw new Error(`${encodeErrPrefix} unsupported type: Uint8Array`);
-  }
-  /**
-   * @param {Bl} buf
-   * @param {Token} token
-   */
-  [Type.string.major](buf2, token) {
-    this.prefix(buf2);
-    const byts = fromString(JSON.stringify(token.value));
-    buf2.push(byts.length > 32 ? asU8A(byts) : byts);
-  }
-  /**
-   * @param {Bl} buf
-   * @param {Token} _token
-   */
-  [Type.array.major](buf2, _token) {
-    this.prefix(buf2);
-    this.inRecursive.push({ type: Type.array, elements: 0 });
-    buf2.push([91]);
-  }
-  /**
-   * @param {Bl} buf
-   * @param {Token} _token
-   */
-  [Type.map.major](buf2, _token) {
-    this.prefix(buf2);
-    this.inRecursive.push({ type: Type.map, elements: 0 });
-    buf2.push([123]);
-  }
-  /**
-   * @param {Bl} _buf
-   * @param {Token} _token
-   */
-  [Type.tag.major](_buf, _token) {
-  }
-  /**
-   * @param {Bl} buf
-   * @param {Token} token
-   */
-  [Type.float.major](buf2, token) {
-    if (token.type.name === "break") {
-      const recurs = this.inRecursive.pop();
-      if (recurs) {
-        if (recurs.type === Type.array) {
-          buf2.push([93]);
-        } else if (recurs.type === Type.map) {
-          buf2.push([125]);
-        } else {
-          throw new Error("Unexpected recursive type; this should not happen!");
-        }
-        return;
-      }
-      throw new Error("Unexpected break; this should not happen!");
-    }
-    if (token.value === void 0) {
-      throw new Error(`${encodeErrPrefix} unsupported type: undefined`);
-    }
-    this.prefix(buf2);
-    if (token.type.name === "true") {
-      buf2.push([116, 114, 117, 101]);
-      return;
-    } else if (token.type.name === "false") {
-      buf2.push([102, 97, 108, 115, 101]);
-      return;
-    } else if (token.type.name === "null") {
-      buf2.push([110, 117, 108, 108]);
-      return;
-    }
-    const is2 = String(token.value);
-    const isa = [];
-    let dp = false;
-    for (let i = 0; i < is2.length; i++) {
-      isa[i] = is2.charCodeAt(i);
-      if (!dp && (isa[i] === 46 || isa[i] === 101 || isa[i] === 69)) {
-        dp = true;
-      }
-    }
-    if (!dp) {
-      isa.push(46);
-      isa.push(48);
-    }
-    buf2.push(isa);
-  }
-};
-
-// node_modules/cborg/lib/json/decode.js
-var Tokenizer = class {
-  /**
-   * @param {Uint8Array} data
-   * @param {DecodeOptions} options
-   */
-  constructor(data, options = {}) {
-    this._pos = 0;
-    this.data = data;
-    this.options = options;
-    this.modeStack = ["value"];
-    this.lastToken = "";
-  }
-  pos() {
-    return this._pos;
-  }
-  /**
-   * @returns {boolean}
-   */
-  done() {
-    return this._pos >= this.data.length;
-  }
-  /**
-   * @returns {number}
-   */
-  ch() {
-    return this.data[this._pos];
-  }
-  /**
-   * @returns {string}
-   */
-  currentMode() {
-    return this.modeStack[this.modeStack.length - 1];
-  }
-  skipWhitespace() {
-    let c = this.ch();
-    while (c === 32 || c === 9 || c === 13 || c === 10) {
-      c = this.data[++this._pos];
-    }
-  }
-  /**
-   * @param {number[]} str
-   */
-  expect(str) {
-    if (this.data.length - this._pos < str.length) {
-      throw new Error(`${decodeErrPrefix} unexpected end of input at position ${this._pos}`);
-    }
-    for (let i = 0; i < str.length; i++) {
-      if (this.data[this._pos++] !== str[i]) {
-        throw new Error(`${decodeErrPrefix} unexpected token at position ${this._pos}, expected to find '${String.fromCharCode(...str)}'`);
-      }
-    }
-  }
-  parseNumber() {
-    const startPos = this._pos;
-    let negative = false;
-    let float = false;
-    const swallow = (chars) => {
-      while (!this.done()) {
-        const ch = this.ch();
-        if (chars.includes(ch)) {
-          this._pos++;
-        } else {
-          break;
-        }
-      }
-    };
-    if (this.ch() === 45) {
-      negative = true;
-      this._pos++;
-    }
-    if (this.ch() === 48) {
-      this._pos++;
-      if (this.ch() === 46) {
-        this._pos++;
-        float = true;
-      } else {
-        return new Token(Type.uint, 0, this._pos - startPos);
-      }
-    }
-    swallow([48, 49, 50, 51, 52, 53, 54, 55, 56, 57]);
-    if (negative && this._pos === startPos + 1) {
-      throw new Error(`${decodeErrPrefix} unexpected token at position ${this._pos}`);
-    }
-    if (!this.done() && this.ch() === 46) {
-      if (float) {
-        throw new Error(`${decodeErrPrefix} unexpected token at position ${this._pos}`);
-      }
-      float = true;
-      this._pos++;
-      swallow([48, 49, 50, 51, 52, 53, 54, 55, 56, 57]);
-    }
-    if (!this.done() && (this.ch() === 101 || this.ch() === 69)) {
-      float = true;
-      this._pos++;
-      if (!this.done() && (this.ch() === 43 || this.ch() === 45)) {
-        this._pos++;
-      }
-      swallow([48, 49, 50, 51, 52, 53, 54, 55, 56, 57]);
-    }
-    const numStr = String.fromCharCode.apply(null, this.data.subarray(startPos, this._pos));
-    const num = parseFloat(numStr);
-    if (float) {
-      return new Token(Type.float, num, this._pos - startPos);
-    }
-    if (this.options.allowBigInt !== true || Number.isSafeInteger(num)) {
-      return new Token(num >= 0 ? Type.uint : Type.negint, num, this._pos - startPos);
-    }
-    return new Token(num >= 0 ? Type.uint : Type.negint, BigInt(numStr), this._pos - startPos);
-  }
-  /**
-   * @returns {Token}
-   */
-  parseString() {
-    if (this.ch() !== 34) {
-      throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}; this shouldn't happen`);
-    }
-    this._pos++;
-    for (let i = this._pos, l = 0; i < this.data.length && l < 65536; i++, l++) {
-      const ch = this.data[i];
-      if (ch === 92 || ch < 32 || ch >= 128) {
-        break;
-      }
-      if (ch === 34) {
-        const str = String.fromCharCode.apply(null, this.data.subarray(this._pos, i));
-        this._pos = i + 1;
-        return new Token(Type.string, str, l);
-      }
-    }
-    const startPos = this._pos;
-    const chars = [];
-    const readu4 = () => {
-      if (this._pos + 4 >= this.data.length) {
-        throw new Error(`${decodeErrPrefix} unexpected end of unicode escape sequence at position ${this._pos}`);
-      }
-      let u4 = 0;
-      for (let i = 0; i < 4; i++) {
-        let ch = this.ch();
-        if (ch >= 48 && ch <= 57) {
-          ch -= 48;
-        } else if (ch >= 97 && ch <= 102) {
-          ch = ch - 97 + 10;
-        } else if (ch >= 65 && ch <= 70) {
-          ch = ch - 65 + 10;
-        } else {
-          throw new Error(`${decodeErrPrefix} unexpected unicode escape character at position ${this._pos}`);
-        }
-        u4 = u4 * 16 + ch;
-        this._pos++;
-      }
-      return u4;
-    };
-    const readUtf8Char = () => {
-      const firstByte = this.ch();
-      let codePoint = null;
-      let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
-      if (this._pos + bytesPerSequence > this.data.length) {
-        throw new Error(`${decodeErrPrefix} unexpected unicode sequence at position ${this._pos}`);
-      }
-      let secondByte, thirdByte, fourthByte, tempCodePoint;
-      switch (bytesPerSequence) {
-        case 1:
-          if (firstByte < 128) {
-            codePoint = firstByte;
-          }
-          break;
-        case 2:
-          secondByte = this.data[this._pos + 1];
-          if ((secondByte & 192) === 128) {
-            tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
-            if (tempCodePoint > 127) {
-              codePoint = tempCodePoint;
-            }
-          }
-          break;
-        case 3:
-          secondByte = this.data[this._pos + 1];
-          thirdByte = this.data[this._pos + 2];
-          if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
-            tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
-            if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
-              codePoint = tempCodePoint;
-            }
-          }
-          break;
-        case 4:
-          secondByte = this.data[this._pos + 1];
-          thirdByte = this.data[this._pos + 2];
-          fourthByte = this.data[this._pos + 3];
-          if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
-            tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
-            if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
-              codePoint = tempCodePoint;
-            }
-          }
-      }
-      if (codePoint === null) {
-        codePoint = 65533;
-        bytesPerSequence = 1;
-      } else if (codePoint > 65535) {
-        codePoint -= 65536;
-        chars.push(codePoint >>> 10 & 1023 | 55296);
-        codePoint = 56320 | codePoint & 1023;
-      }
-      chars.push(codePoint);
-      this._pos += bytesPerSequence;
-    };
-    while (!this.done()) {
-      const ch = this.ch();
-      let ch1;
-      switch (ch) {
-        case 92:
-          this._pos++;
-          if (this.done()) {
-            throw new Error(`${decodeErrPrefix} unexpected string termination at position ${this._pos}`);
-          }
-          ch1 = this.ch();
-          this._pos++;
-          switch (ch1) {
-            case 34:
-            case 39:
-            case 92:
-            case 47:
-              chars.push(ch1);
-              break;
-            case 98:
-              chars.push(8);
-              break;
-            case 116:
-              chars.push(9);
-              break;
-            case 110:
-              chars.push(10);
-              break;
-            case 102:
-              chars.push(12);
-              break;
-            case 114:
-              chars.push(13);
-              break;
-            case 117:
-              chars.push(readu4());
-              break;
-            default:
-              throw new Error(`${decodeErrPrefix} unexpected string escape character at position ${this._pos}`);
-          }
-          break;
-        case 34:
-          this._pos++;
-          return new Token(Type.string, decodeCodePointsArray(chars), this._pos - startPos);
-        default:
-          if (ch < 32) {
-            throw new Error(`${decodeErrPrefix} invalid control character at position ${this._pos}`);
-          } else if (ch < 128) {
-            chars.push(ch);
-            this._pos++;
-          } else {
-            readUtf8Char();
-          }
-      }
-    }
-    throw new Error(`${decodeErrPrefix} unexpected end of string at position ${this._pos}`);
-  }
-  /**
-   * @returns {Token}
-   */
-  parseValue() {
-    switch (this.ch()) {
-      case 123:
-        this.modeStack.push("obj-start");
-        this._pos++;
-        return new Token(Type.map, Infinity, 1);
-      case 91:
-        this.modeStack.push("array-start");
-        this._pos++;
-        return new Token(Type.array, Infinity, 1);
-      case 34: {
-        return this.parseString();
-      }
-      case 110:
-        this.expect([110, 117, 108, 108]);
-        return new Token(Type.null, null, 4);
-      case 102:
-        this.expect([102, 97, 108, 115, 101]);
-        return new Token(Type.false, false, 5);
-      case 116:
-        this.expect([116, 114, 117, 101]);
-        return new Token(Type.true, true, 4);
-      case 45:
-      case 48:
-      case 49:
-      case 50:
-      case 51:
-      case 52:
-      case 53:
-      case 54:
-      case 55:
-      case 56:
-      case 57:
-        return this.parseNumber();
-      default:
-        throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}`);
-    }
-  }
-  /**
-   * @returns {Token}
-   */
-  next() {
-    this.skipWhitespace();
-    switch (this.currentMode()) {
-      case "value":
-        this.modeStack.pop();
-        return this.parseValue();
-      case "array-value": {
-        this.modeStack.pop();
-        if (this.ch() === 93) {
-          this._pos++;
-          this.skipWhitespace();
-          return new Token(Type.break, void 0, 1);
-        }
-        if (this.ch() !== 44) {
-          throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}, was expecting array delimiter but found '${String.fromCharCode(this.ch())}'`);
-        }
-        this._pos++;
-        this.modeStack.push("array-value");
-        this.skipWhitespace();
-        return this.parseValue();
-      }
-      case "array-start": {
-        this.modeStack.pop();
-        if (this.ch() === 93) {
-          this._pos++;
-          this.skipWhitespace();
-          return new Token(Type.break, void 0, 1);
-        }
-        this.modeStack.push("array-value");
-        this.skipWhitespace();
-        return this.parseValue();
-      }
-      case "obj-key":
-        if (this.ch() === 125) {
-          this.modeStack.pop();
-          this._pos++;
-          this.skipWhitespace();
-          return new Token(Type.break, void 0, 1);
-        }
-        if (this.ch() !== 44) {
-          throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}, was expecting object delimiter but found '${String.fromCharCode(this.ch())}'`);
-        }
-        this._pos++;
-        this.skipWhitespace();
-      case "obj-start": {
-        this.modeStack.pop();
-        if (this.ch() === 125) {
-          this._pos++;
-          this.skipWhitespace();
-          return new Token(Type.break, void 0, 1);
-        }
-        const token = this.parseString();
-        this.skipWhitespace();
-        if (this.ch() !== 58) {
-          throw new Error(`${decodeErrPrefix} unexpected character at position ${this._pos}, was expecting key/value delimiter ':' but found '${String.fromCharCode(this.ch())}'`);
-        }
-        this._pos++;
-        this.modeStack.push("obj-value");
-        return token;
-      }
-      case "obj-value": {
-        this.modeStack.pop();
-        this.modeStack.push("obj-key");
-        this.skipWhitespace();
-        return this.parseValue();
-      }
-      default:
-        throw new Error(`${decodeErrPrefix} unexpected parse state at position ${this._pos}; this shouldn't happen`);
-    }
-  }
-};
-function decode12(data, options) {
-  options = Object.assign({ tokenizer: new Tokenizer(data, options) }, options);
-  return decode(data, options);
-}
-
-// node_modules/multiformats/src/bases/base64.js
-var base64_exports = {};
-__export(base64_exports, {
-  base64: () => base64,
-  base64pad: () => base64pad,
-  base64url: () => base64url,
-  base64urlpad: () => base64urlpad
-});
-var base64 = rfc46482({
-  prefix: "m",
-  name: "base64",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",
-  bitsPerChar: 6
-});
-var base64pad = rfc46482({
-  prefix: "M",
-  name: "base64pad",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
-  bitsPerChar: 6
-});
-var base64url = rfc46482({
-  prefix: "u",
-  name: "base64url",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_",
-  bitsPerChar: 6
-});
-var base64urlpad = rfc46482({
-  prefix: "U",
-  name: "base64urlpad",
-  alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_=",
-  bitsPerChar: 6
-});
-
-// node_modules/multiformats/src/codecs/raw.js
-var raw_exports = {};
-__export(raw_exports, {
-  code: () => code3,
-  decode: () => decode13,
-  encode: () => encode9,
-  name: () => name2
-});
-var name2 = "raw";
-var code3 = 85;
-var encode9 = (node) => coerce2(node);
-var decode13 = (data) => coerce2(data);
-
-// node_modules/@helia/car/dist/src/utils/dag-walkers.js
-var dagPbWalker = {
-  codec: code2,
-  async *walk(block) {
-    const node = decode11(block);
-    yield* node.Links.map((l) => l.Hash);
-  }
-};
-var rawWalker = {
-  codec: code3,
-  async *walk() {
-  }
-};
-var CID_TAG2 = 42;
-var cborWalker = {
-  codec: 113,
-  async *walk(block) {
-    const cids = [];
-    const tags = [];
-    tags[CID_TAG2] = (bytes) => {
-      if (bytes[0] !== 0) {
-        throw new Error("Invalid CID for CBOR tag 42; expected leading 0x00");
-      }
-      const cid = CID2.decode(bytes.subarray(1));
-      cids.push(cid);
-      return cid;
-    };
-    decode(block, {
-      tags
-    });
-    yield* cids;
-  }
-};
-var DagJsonTokenizer = class extends Tokenizer {
-  tokenBuffer;
-  constructor(data, options) {
-    super(data, options);
-    this.tokenBuffer = [];
-  }
-  done() {
-    return this.tokenBuffer.length === 0 && super.done();
-  }
-  _next() {
-    if (this.tokenBuffer.length > 0) {
-      return this.tokenBuffer.pop();
-    }
-    return super.next();
-  }
-  /**
-   * Implements rules outlined in https://github.com/ipld/specs/pull/356
-   */
-  next() {
-    const token = this._next();
-    if (token.type === Type.map) {
-      const keyToken = this._next();
-      if (keyToken.type === Type.string && keyToken.value === "/") {
-        const valueToken = this._next();
-        if (valueToken.type === Type.string) {
-          const breakToken = this._next();
-          if (breakToken.type !== Type.break) {
-            throw new Error("Invalid encoded CID form");
-          }
-          this.tokenBuffer.push(valueToken);
-          return new Token(Type.tag, 42, 0);
-        }
-        if (valueToken.type === Type.map) {
-          const innerKeyToken = this._next();
-          if (innerKeyToken.type === Type.string && innerKeyToken.value === "bytes") {
-            const innerValueToken = this._next();
-            if (innerValueToken.type === Type.string) {
-              for (let i = 0; i < 2; i++) {
-                const breakToken = this._next();
-                if (breakToken.type !== Type.break) {
-                  throw new Error("Invalid encoded Bytes form");
-                }
-              }
-              const bytes = base64.decode(`m${innerValueToken.value}`);
-              return new Token(Type.bytes, bytes, innerValueToken.value.length);
-            }
-            this.tokenBuffer.push(innerValueToken);
-          }
-          this.tokenBuffer.push(innerKeyToken);
-        }
-        this.tokenBuffer.push(valueToken);
-      }
-      this.tokenBuffer.push(keyToken);
-    }
-    return token;
-  }
-};
-var jsonWalker = {
-  codec: 297,
-  async *walk(block) {
-    const cids = [];
-    const tags = [];
-    tags[CID_TAG2] = (string2) => {
-      const cid = CID2.parse(string2);
-      cids.push(cid);
-      return cid;
-    };
-    decode12(block, {
-      tags,
-      tokenizer: new DagJsonTokenizer(block, {
-        tags,
-        allowIndefinite: true,
-        allowUndefined: true,
-        allowNaN: true,
-        allowInfinity: true,
-        allowBigInt: true,
-        strict: false,
-        rejectDuplicateMapKeys: false
-      })
-    });
-    yield* cids;
-  }
-};
-
-// node_modules/@helia/car/dist/src/index.js
-var DEFAULT_DAG_WALKERS = [
-  rawWalker,
-  dagPbWalker,
-  cborWalker,
-  jsonWalker
-];
-var DAG_WALK_QUEUE_CONCURRENCY = 1;
-var DefaultCar = class {
-  components;
-  dagWalkers;
-  constructor(components, init) {
-    this.components = components;
-    this.dagWalkers = {};
-    [...DEFAULT_DAG_WALKERS, ...init.dagWalkers ?? []].forEach((dagWalker) => {
-      this.dagWalkers[dagWalker.codec] = dagWalker;
-    });
-  }
-  async import(reader, options) {
-    await src_default(this.components.blockstore.putMany(src_default3(reader.blocks(), ({ cid, bytes }) => ({ cid, block: bytes })), options));
-  }
-  async export(root, writer, options) {
-    const deferred = pDefer();
-    const roots = Array.isArray(root) ? root : [root];
-    const queue = new dist_default({
-      concurrency: DAG_WALK_QUEUE_CONCURRENCY
-    });
-    queue.on("idle", () => {
-      deferred.resolve();
-    });
-    queue.on("error", (err) => {
-      deferred.resolve(err);
-    });
-    for (const root2 of roots) {
-      void queue.add(async () => {
-        await this.#walkDag(root2, queue, async (cid, bytes) => {
-          await writer.put({ cid, bytes });
-        }, options);
-      });
-    }
-    try {
-      await deferred.promise;
-    } finally {
-      await writer.close();
-    }
-  }
-  /**
-   * Walk the DAG behind the passed CID, ensure all blocks are present in the blockstore
-   * and update the pin count for them
-   */
-  async #walkDag(cid, queue, withBlock, options) {
-    const dagWalker = this.dagWalkers[cid.code];
-    if (dagWalker == null) {
-      throw new Error(`No dag walker found for cid codec ${cid.code}`);
-    }
-    const block = await this.components.blockstore.get(cid, options);
-    await withBlock(cid, block);
-    for await (const cid2 of dagWalker.walk(block)) {
-      void queue.add(async () => {
-        await this.#walkDag(cid2, queue, withBlock, options);
-      });
-    }
-  }
-};
-function car(helia, init = {}) {
-  return new DefaultCar(helia, init);
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/index.js
-var import_err_code4 = __toESM(require_err_code(), 1);
-
-// node_modules/it-first/dist/src/index.js
-function isAsyncIterable3(thing) {
-  return thing[Symbol.asyncIterator] != null;
-}
-function first(source) {
-  if (isAsyncIterable3(source)) {
-    return (async () => {
-      for await (const entry of source) {
-        return entry;
-      }
-      return void 0;
-    })();
-  }
-  for (const entry of source) {
-    return entry;
-  }
-  return void 0;
-}
-var src_default4 = first;
-
-// node_modules/it-batch/dist/src/index.js
-function isAsyncIterable4(thing) {
-  return thing[Symbol.asyncIterator] != null;
-}
-function batch(source, size = 1) {
-  size = Number(size);
-  if (isAsyncIterable4(source)) {
-    return async function* () {
-      let things = [];
-      if (size < 1) {
-        size = 1;
-      }
-      if (size !== Math.round(size)) {
-        throw new Error("Batch size must be an integer");
-      }
-      for await (const thing of source) {
-        things.push(thing);
-        while (things.length >= size) {
-          yield things.slice(0, size);
-          things = things.slice(size);
-        }
-      }
-      while (things.length > 0) {
-        yield things.slice(0, size);
-        things = things.slice(size);
-      }
-    }();
-  }
-  return function* () {
-    let things = [];
-    if (size < 1) {
-      size = 1;
-    }
-    if (size !== Math.round(size)) {
-      throw new Error("Batch size must be an integer");
-    }
-    for (const thing of source) {
-      things.push(thing);
-      while (things.length >= size) {
-        yield things.slice(0, size);
-        things = things.slice(size);
-      }
-    }
-    while (things.length > 0) {
-      yield things.slice(0, size);
-      things = things.slice(size);
-    }
-  }();
-}
-var src_default5 = batch;
-
-// node_modules/it-parallel-batch/dist/src/index.js
-async function* parallelBatch(source, size = 1) {
-  for await (const tasks of src_default5(source, size)) {
-    const things = tasks.map(async (p) => {
-      return p().then((value) => ({ ok: true, value }), (err) => ({ ok: false, err }));
-    });
-    for (let i = 0; i < things.length; i++) {
-      const result = await things[i];
-      if (result.ok) {
-        yield result.value;
-      } else {
-        throw result.err;
-      }
-    }
-  }
-}
-
-// node_modules/uint8arrays/dist/src/util/as-uint8array.js
-function asUint8Array(buf2) {
-  if (globalThis.Buffer != null) {
-    return new Uint8Array(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-  }
-  return buf2;
-}
-
-// node_modules/uint8arrays/dist/src/alloc.js
-function alloc2(size = 0) {
-  var _a;
-  if (((_a = globalThis.Buffer) == null ? void 0 : _a.alloc) != null) {
-    return asUint8Array(globalThis.Buffer.alloc(size));
-  }
-  return new Uint8Array(size);
-}
-function allocUnsafe(size = 0) {
-  var _a;
-  if (((_a = globalThis.Buffer) == null ? void 0 : _a.allocUnsafe) != null) {
-    return asUint8Array(globalThis.Buffer.allocUnsafe(size));
-  }
-  return new Uint8Array(size);
-}
-
-// node_modules/uint8arrays/dist/src/concat.js
-function concat2(arrays, length4) {
-  if (length4 == null) {
-    length4 = arrays.reduce((acc, curr) => acc + curr.length, 0);
-  }
-  const output = allocUnsafe(length4);
-  let offset = 0;
-  for (const arr of arrays) {
-    output.set(arr, offset);
-    offset += arr.length;
-  }
-  return asUint8Array(output);
-}
-
-// node_modules/uint8arrays/dist/src/equals.js
-function equals5(a, b) {
-  if (a === b) {
-    return true;
-  }
-  if (a.byteLength !== b.byteLength) {
-    return false;
-  }
-  for (let i = 0; i < a.byteLength; i++) {
-    if (a[i] !== b[i]) {
-      return false;
-    }
-  }
-  return true;
-}
-
-// node_modules/uint8arraylist/dist/src/index.js
-var symbol = Symbol.for("@achingbrain/uint8arraylist");
-function findBufAndOffset(bufs, index) {
-  if (index == null || index < 0) {
-    throw new RangeError("index is out of bounds");
-  }
-  let offset = 0;
-  for (const buf2 of bufs) {
-    const bufEnd = offset + buf2.byteLength;
-    if (index < bufEnd) {
-      return {
-        buf: buf2,
-        index: index - offset
-      };
-    }
-    offset = bufEnd;
-  }
-  throw new RangeError("index is out of bounds");
-}
-function isUint8ArrayList(value) {
-  return Boolean(value == null ? void 0 : value[symbol]);
-}
-var Uint8ArrayList = class _Uint8ArrayList {
-  bufs;
-  length;
-  [symbol] = true;
-  constructor(...data) {
-    this.bufs = [];
-    this.length = 0;
-    if (data.length > 0) {
-      this.appendAll(data);
-    }
-  }
-  *[Symbol.iterator]() {
-    yield* this.bufs;
-  }
-  get byteLength() {
-    return this.length;
-  }
-  /**
-   * Add one or more `bufs` to the end of this Uint8ArrayList
-   */
-  append(...bufs) {
-    this.appendAll(bufs);
-  }
-  /**
-   * Add all `bufs` to the end of this Uint8ArrayList
-   */
-  appendAll(bufs) {
-    let length4 = 0;
-    for (const buf2 of bufs) {
-      if (buf2 instanceof Uint8Array) {
-        length4 += buf2.byteLength;
-        this.bufs.push(buf2);
-      } else if (isUint8ArrayList(buf2)) {
-        length4 += buf2.byteLength;
-        this.bufs.push(...buf2.bufs);
-      } else {
-        throw new Error("Could not append value, must be an Uint8Array or a Uint8ArrayList");
-      }
-    }
-    this.length += length4;
-  }
-  /**
-   * Add one or more `bufs` to the start of this Uint8ArrayList
-   */
-  prepend(...bufs) {
-    this.prependAll(bufs);
-  }
-  /**
-   * Add all `bufs` to the start of this Uint8ArrayList
-   */
-  prependAll(bufs) {
-    let length4 = 0;
-    for (const buf2 of bufs.reverse()) {
-      if (buf2 instanceof Uint8Array) {
-        length4 += buf2.byteLength;
-        this.bufs.unshift(buf2);
-      } else if (isUint8ArrayList(buf2)) {
-        length4 += buf2.byteLength;
-        this.bufs.unshift(...buf2.bufs);
-      } else {
-        throw new Error("Could not prepend value, must be an Uint8Array or a Uint8ArrayList");
-      }
-    }
-    this.length += length4;
-  }
-  /**
-   * Read the value at `index`
-   */
-  get(index) {
-    const res = findBufAndOffset(this.bufs, index);
-    return res.buf[res.index];
-  }
-  /**
-   * Set the value at `index` to `value`
-   */
-  set(index, value) {
-    const res = findBufAndOffset(this.bufs, index);
-    res.buf[res.index] = value;
-  }
-  /**
-   * Copy bytes from `buf` to the index specified by `offset`
-   */
-  write(buf2, offset = 0) {
-    if (buf2 instanceof Uint8Array) {
-      for (let i = 0; i < buf2.length; i++) {
-        this.set(offset + i, buf2[i]);
-      }
-    } else if (isUint8ArrayList(buf2)) {
-      for (let i = 0; i < buf2.length; i++) {
-        this.set(offset + i, buf2.get(i));
-      }
-    } else {
-      throw new Error("Could not write value, must be an Uint8Array or a Uint8ArrayList");
-    }
-  }
-  /**
-   * Remove bytes from the front of the pool
-   */
-  consume(bytes) {
-    bytes = Math.trunc(bytes);
-    if (Number.isNaN(bytes) || bytes <= 0) {
-      return;
-    }
-    if (bytes === this.byteLength) {
-      this.bufs = [];
-      this.length = 0;
-      return;
-    }
-    while (this.bufs.length > 0) {
-      if (bytes >= this.bufs[0].byteLength) {
-        bytes -= this.bufs[0].byteLength;
-        this.length -= this.bufs[0].byteLength;
-        this.bufs.shift();
-      } else {
-        this.bufs[0] = this.bufs[0].subarray(bytes);
-        this.length -= bytes;
-        break;
-      }
-    }
-  }
-  /**
-   * Extracts a section of an array and returns a new array.
-   *
-   * This is a copy operation as it is with Uint8Arrays and Arrays
-   * - note this is different to the behaviour of Node Buffers.
-   */
-  slice(beginInclusive, endExclusive) {
-    const { bufs, length: length4 } = this._subList(beginInclusive, endExclusive);
-    return concat2(bufs, length4);
-  }
-  /**
-   * Returns a alloc from the given start and end element index.
-   *
-   * In the best case where the data extracted comes from a single Uint8Array
-   * internally this is a no-copy operation otherwise it is a copy operation.
-   */
-  subarray(beginInclusive, endExclusive) {
-    const { bufs, length: length4 } = this._subList(beginInclusive, endExclusive);
-    if (bufs.length === 1) {
-      return bufs[0];
-    }
-    return concat2(bufs, length4);
-  }
-  /**
-   * Returns a allocList from the given start and end element index.
-   *
-   * This is a no-copy operation.
-   */
-  sublist(beginInclusive, endExclusive) {
-    const { bufs, length: length4 } = this._subList(beginInclusive, endExclusive);
-    const list = new _Uint8ArrayList();
-    list.length = length4;
-    list.bufs = [...bufs];
-    return list;
-  }
-  _subList(beginInclusive, endExclusive) {
-    beginInclusive = beginInclusive ?? 0;
-    endExclusive = endExclusive ?? this.length;
-    if (beginInclusive < 0) {
-      beginInclusive = this.length + beginInclusive;
-    }
-    if (endExclusive < 0) {
-      endExclusive = this.length + endExclusive;
-    }
-    if (beginInclusive < 0 || endExclusive > this.length) {
-      throw new RangeError("index is out of bounds");
-    }
-    if (beginInclusive === endExclusive) {
-      return { bufs: [], length: 0 };
-    }
-    if (beginInclusive === 0 && endExclusive === this.length) {
-      return { bufs: this.bufs, length: this.length };
-    }
-    const bufs = [];
-    let offset = 0;
-    for (let i = 0; i < this.bufs.length; i++) {
-      const buf2 = this.bufs[i];
-      const bufStart = offset;
-      const bufEnd = bufStart + buf2.byteLength;
-      offset = bufEnd;
-      if (beginInclusive >= bufEnd) {
-        continue;
-      }
-      const sliceStartInBuf = beginInclusive >= bufStart && beginInclusive < bufEnd;
-      const sliceEndsInBuf = endExclusive > bufStart && endExclusive <= bufEnd;
-      if (sliceStartInBuf && sliceEndsInBuf) {
-        if (beginInclusive === bufStart && endExclusive === bufEnd) {
-          bufs.push(buf2);
-          break;
-        }
-        const start = beginInclusive - bufStart;
-        bufs.push(buf2.subarray(start, start + (endExclusive - beginInclusive)));
-        break;
-      }
-      if (sliceStartInBuf) {
-        if (beginInclusive === 0) {
-          bufs.push(buf2);
-          continue;
-        }
-        bufs.push(buf2.subarray(beginInclusive - bufStart));
-        continue;
-      }
-      if (sliceEndsInBuf) {
-        if (endExclusive === bufEnd) {
-          bufs.push(buf2);
-          break;
-        }
-        bufs.push(buf2.subarray(0, endExclusive - bufStart));
-        break;
-      }
-      bufs.push(buf2);
-    }
-    return { bufs, length: endExclusive - beginInclusive };
-  }
-  indexOf(search, offset = 0) {
-    if (!isUint8ArrayList(search) && !(search instanceof Uint8Array)) {
-      throw new TypeError('The "value" argument must be a Uint8ArrayList or Uint8Array');
-    }
-    const needle = search instanceof Uint8Array ? search : search.subarray();
-    offset = Number(offset ?? 0);
-    if (isNaN(offset)) {
-      offset = 0;
-    }
-    if (offset < 0) {
-      offset = this.length + offset;
-    }
-    if (offset < 0) {
-      offset = 0;
-    }
-    if (search.length === 0) {
-      return offset > this.length ? this.length : offset;
-    }
-    const M = needle.byteLength;
-    if (M === 0) {
-      throw new TypeError("search must be at least 1 byte long");
-    }
-    const radix = 256;
-    const rightmostPositions = new Int32Array(radix);
-    for (let c = 0; c < radix; c++) {
-      rightmostPositions[c] = -1;
-    }
-    for (let j = 0; j < M; j++) {
-      rightmostPositions[needle[j]] = j;
-    }
-    const right = rightmostPositions;
-    const lastIndex = this.byteLength - needle.byteLength;
-    const lastPatIndex = needle.byteLength - 1;
-    let skip;
-    for (let i = offset; i <= lastIndex; i += skip) {
-      skip = 0;
-      for (let j = lastPatIndex; j >= 0; j--) {
-        const char = this.get(i + j);
-        if (needle[j] !== char) {
-          skip = Math.max(1, j - right[char]);
-          break;
-        }
-      }
-      if (skip === 0) {
-        return i;
-      }
-    }
-    return -1;
-  }
-  getInt8(byteOffset) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 1);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getInt8(0);
-  }
-  setInt8(byteOffset, value) {
-    const buf2 = allocUnsafe(1);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setInt8(0, value);
-    this.write(buf2, byteOffset);
-  }
-  getInt16(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 2);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getInt16(0, littleEndian);
-  }
-  setInt16(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(2);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setInt16(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  getInt32(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 4);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getInt32(0, littleEndian);
-  }
-  setInt32(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(4);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setInt32(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  getBigInt64(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 8);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getBigInt64(0, littleEndian);
-  }
-  setBigInt64(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(8);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setBigInt64(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  getUint8(byteOffset) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 1);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getUint8(0);
-  }
-  setUint8(byteOffset, value) {
-    const buf2 = allocUnsafe(1);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setUint8(0, value);
-    this.write(buf2, byteOffset);
-  }
-  getUint16(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 2);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getUint16(0, littleEndian);
-  }
-  setUint16(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(2);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setUint16(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  getUint32(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 4);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getUint32(0, littleEndian);
-  }
-  setUint32(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(4);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setUint32(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  getBigUint64(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 8);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getBigUint64(0, littleEndian);
-  }
-  setBigUint64(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(8);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setBigUint64(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  getFloat32(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 4);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getFloat32(0, littleEndian);
-  }
-  setFloat32(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(4);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setFloat32(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  getFloat64(byteOffset, littleEndian) {
-    const buf2 = this.subarray(byteOffset, byteOffset + 8);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    return view.getFloat64(0, littleEndian);
-  }
-  setFloat64(byteOffset, value, littleEndian) {
-    const buf2 = alloc2(8);
-    const view = new DataView(buf2.buffer, buf2.byteOffset, buf2.byteLength);
-    view.setFloat64(0, value, littleEndian);
-    this.write(buf2, byteOffset);
-  }
-  equals(other) {
-    if (other == null) {
-      return false;
-    }
-    if (!(other instanceof _Uint8ArrayList)) {
-      return false;
-    }
-    if (other.bufs.length !== this.bufs.length) {
-      return false;
-    }
-    for (let i = 0; i < this.bufs.length; i++) {
-      if (!equals5(this.bufs[i], other.bufs[i])) {
-        return false;
-      }
-    }
-    return true;
-  }
-  /**
-   * Create a Uint8ArrayList from a pre-existing list of Uint8Arrays.  Use this
-   * method if you know the total size of all the Uint8Arrays ahead of time.
-   */
-  static fromUint8Arrays(bufs, length4) {
-    const list = new _Uint8ArrayList();
-    list.bufs = bufs;
-    if (length4 == null) {
-      length4 = bufs.reduce((acc, curr) => acc + curr.byteLength, 0);
-    }
-    list.length = length4;
-    return list;
-  }
-};
-
-// node_modules/ipfs-unixfs-importer/dist/src/chunker/fixed-size.js
-var DEFAULT_CHUNK_SIZE = 262144;
-var fixedSize = (options = {}) => {
-  const chunkSize = options.chunkSize ?? DEFAULT_CHUNK_SIZE;
-  return async function* fixedSizeChunker(source) {
-    let list = new Uint8ArrayList();
-    let currentLength = 0;
-    let emitted = false;
-    for await (const buffer2 of source) {
-      list.append(buffer2);
-      currentLength += buffer2.length;
-      while (currentLength >= chunkSize) {
-        yield list.slice(0, chunkSize);
-        emitted = true;
-        if (chunkSize === list.length) {
-          list = new Uint8ArrayList();
-          currentLength = 0;
-        } else {
-          const newBl = new Uint8ArrayList();
-          newBl.append(list.sublist(chunkSize));
-          list = newBl;
-          currentLength -= chunkSize;
-        }
-      }
-    }
-    if (!emitted || currentLength > 0) {
-      yield list.subarray(0, currentLength);
-    }
-  };
-};
-
-// node_modules/ipfs-unixfs/dist/src/index.js
-var import_err_code = __toESM(require_err_code(), 1);
-
-// node_modules/protons-runtime/dist/src/utils/float.js
-var f32 = new Float32Array([-0]);
-var f8b = new Uint8Array(f32.buffer);
-function writeFloatLE(val, buf2, pos) {
-  f32[0] = val;
-  buf2[pos] = f8b[0];
-  buf2[pos + 1] = f8b[1];
-  buf2[pos + 2] = f8b[2];
-  buf2[pos + 3] = f8b[3];
-}
-function readFloatLE(buf2, pos) {
-  f8b[0] = buf2[pos];
-  f8b[1] = buf2[pos + 1];
-  f8b[2] = buf2[pos + 2];
-  f8b[3] = buf2[pos + 3];
-  return f32[0];
-}
-var f64 = new Float64Array([-0]);
-var d8b = new Uint8Array(f64.buffer);
-function writeDoubleLE(val, buf2, pos) {
-  f64[0] = val;
-  buf2[pos] = d8b[0];
-  buf2[pos + 1] = d8b[1];
-  buf2[pos + 2] = d8b[2];
-  buf2[pos + 3] = d8b[3];
-  buf2[pos + 4] = d8b[4];
-  buf2[pos + 5] = d8b[5];
-  buf2[pos + 6] = d8b[6];
-  buf2[pos + 7] = d8b[7];
-}
-function readDoubleLE(buf2, pos) {
-  d8b[0] = buf2[pos];
-  d8b[1] = buf2[pos + 1];
-  d8b[2] = buf2[pos + 2];
-  d8b[3] = buf2[pos + 3];
-  d8b[4] = buf2[pos + 4];
-  d8b[5] = buf2[pos + 5];
-  d8b[6] = buf2[pos + 6];
-  d8b[7] = buf2[pos + 7];
-  return f64[0];
-}
-
-// node_modules/protons-runtime/dist/src/utils/longbits.js
-var MAX_SAFE_NUMBER_INTEGER = BigInt(Number.MAX_SAFE_INTEGER);
-var MIN_SAFE_NUMBER_INTEGER = BigInt(Number.MIN_SAFE_INTEGER);
-var LongBits = class _LongBits {
-  lo;
-  hi;
-  constructor(lo, hi) {
-    this.lo = lo | 0;
-    this.hi = hi | 0;
-  }
-  /**
-   * Converts this long bits to a possibly unsafe JavaScript number
-   */
-  toNumber(unsigned = false) {
-    if (!unsigned && this.hi >>> 31 > 0) {
-      const lo = ~this.lo + 1 >>> 0;
-      let hi = ~this.hi >>> 0;
-      if (lo === 0) {
-        hi = hi + 1 >>> 0;
-      }
-      return -(lo + hi * 4294967296);
-    }
-    return this.lo + this.hi * 4294967296;
-  }
-  /**
-   * Converts this long bits to a bigint
-   */
-  toBigInt(unsigned = false) {
-    if (unsigned) {
-      return BigInt(this.lo >>> 0) + (BigInt(this.hi >>> 0) << 32n);
-    }
-    if (this.hi >>> 31 !== 0) {
-      const lo = ~this.lo + 1 >>> 0;
-      let hi = ~this.hi >>> 0;
-      if (lo === 0) {
-        hi = hi + 1 >>> 0;
-      }
-      return -(BigInt(lo) + (BigInt(hi) << 32n));
-    }
-    return BigInt(this.lo >>> 0) + (BigInt(this.hi >>> 0) << 32n);
-  }
-  /**
-   * Converts this long bits to a string
-   */
-  toString(unsigned = false) {
-    return this.toBigInt(unsigned).toString();
-  }
-  /**
-   * Zig-zag encodes this long bits
-   */
-  zzEncode() {
-    const mask = this.hi >> 31;
-    this.hi = ((this.hi << 1 | this.lo >>> 31) ^ mask) >>> 0;
-    this.lo = (this.lo << 1 ^ mask) >>> 0;
-    return this;
-  }
-  /**
-   * Zig-zag decodes this long bits
-   */
-  zzDecode() {
-    const mask = -(this.lo & 1);
-    this.lo = ((this.lo >>> 1 | this.hi << 31) ^ mask) >>> 0;
-    this.hi = (this.hi >>> 1 ^ mask) >>> 0;
-    return this;
-  }
-  /**
-   * Calculates the length of this longbits when encoded as a varint.
-   */
-  length() {
-    const part0 = this.lo;
-    const part1 = (this.lo >>> 28 | this.hi << 4) >>> 0;
-    const part2 = this.hi >>> 24;
-    return part2 === 0 ? part1 === 0 ? part0 < 16384 ? part0 < 128 ? 1 : 2 : part0 < 2097152 ? 3 : 4 : part1 < 16384 ? part1 < 128 ? 5 : 6 : part1 < 2097152 ? 7 : 8 : part2 < 128 ? 9 : 10;
-  }
-  /**
-   * Constructs new long bits from the specified number
-   */
-  static fromBigInt(value) {
-    if (value === 0n) {
-      return zero;
-    }
-    if (value < MAX_SAFE_NUMBER_INTEGER && value > MIN_SAFE_NUMBER_INTEGER) {
-      return this.fromNumber(Number(value));
-    }
-    const negative = value < 0n;
-    if (negative) {
-      value = -value;
-    }
-    let hi = value >> 32n;
-    let lo = value - (hi << 32n);
-    if (negative) {
-      hi = ~hi | 0n;
-      lo = ~lo | 0n;
-      if (++lo > TWO_32) {
-        lo = 0n;
-        if (++hi > TWO_32) {
-          hi = 0n;
-        }
-      }
-    }
-    return new _LongBits(Number(lo), Number(hi));
-  }
-  /**
-   * Constructs new long bits from the specified number
-   */
-  static fromNumber(value) {
-    if (value === 0) {
-      return zero;
-    }
-    const sign = value < 0;
-    if (sign) {
-      value = -value;
-    }
-    let lo = value >>> 0;
-    let hi = (value - lo) / 4294967296 >>> 0;
-    if (sign) {
-      hi = ~hi >>> 0;
-      lo = ~lo >>> 0;
-      if (++lo > 4294967295) {
-        lo = 0;
-        if (++hi > 4294967295) {
-          hi = 0;
-        }
-      }
-    }
-    return new _LongBits(lo, hi);
-  }
-  /**
-   * Constructs new long bits from a number, long or string
-   */
-  static from(value) {
-    if (typeof value === "number") {
-      return _LongBits.fromNumber(value);
-    }
-    if (typeof value === "bigint") {
-      return _LongBits.fromBigInt(value);
-    }
-    if (typeof value === "string") {
-      return _LongBits.fromBigInt(BigInt(value));
-    }
-    return value.low != null || value.high != null ? new _LongBits(value.low >>> 0, value.high >>> 0) : zero;
-  }
-};
-var zero = new LongBits(0, 0);
-zero.toBigInt = function() {
-  return 0n;
-};
-zero.zzEncode = zero.zzDecode = function() {
-  return this;
-};
-zero.length = function() {
-  return 1;
-};
-var TWO_32 = 4294967296n;
-
-// node_modules/protons-runtime/dist/src/utils/utf8.js
-function length3(string2) {
-  let len = 0;
-  let c = 0;
-  for (let i = 0; i < string2.length; ++i) {
-    c = string2.charCodeAt(i);
-    if (c < 128) {
-      len += 1;
-    } else if (c < 2048) {
-      len += 2;
-    } else if ((c & 64512) === 55296 && (string2.charCodeAt(i + 1) & 64512) === 56320) {
-      ++i;
-      len += 4;
-    } else {
-      len += 3;
-    }
-  }
-  return len;
-}
-function read3(buffer2, start, end) {
-  const len = end - start;
-  if (len < 1) {
-    return "";
-  }
-  let parts;
-  const chunk = [];
-  let i = 0;
-  let t;
-  while (start < end) {
-    t = buffer2[start++];
-    if (t < 128) {
-      chunk[i++] = t;
-    } else if (t > 191 && t < 224) {
-      chunk[i++] = (t & 31) << 6 | buffer2[start++] & 63;
-    } else if (t > 239 && t < 365) {
-      t = ((t & 7) << 18 | (buffer2[start++] & 63) << 12 | (buffer2[start++] & 63) << 6 | buffer2[start++] & 63) - 65536;
-      chunk[i++] = 55296 + (t >> 10);
-      chunk[i++] = 56320 + (t & 1023);
-    } else {
-      chunk[i++] = (t & 15) << 12 | (buffer2[start++] & 63) << 6 | buffer2[start++] & 63;
-    }
-    if (i > 8191) {
-      (parts ?? (parts = [])).push(String.fromCharCode.apply(String, chunk));
-      i = 0;
-    }
-  }
-  if (parts != null) {
-    if (i > 0) {
-      parts.push(String.fromCharCode.apply(String, chunk.slice(0, i)));
-    }
-    return parts.join("");
-  }
-  return String.fromCharCode.apply(String, chunk.slice(0, i));
-}
-function write(string2, buffer2, offset) {
-  const start = offset;
-  let c1;
-  let c2;
-  for (let i = 0; i < string2.length; ++i) {
-    c1 = string2.charCodeAt(i);
-    if (c1 < 128) {
-      buffer2[offset++] = c1;
-    } else if (c1 < 2048) {
-      buffer2[offset++] = c1 >> 6 | 192;
-      buffer2[offset++] = c1 & 63 | 128;
-    } else if ((c1 & 64512) === 55296 && ((c2 = string2.charCodeAt(i + 1)) & 64512) === 56320) {
-      c1 = 65536 + ((c1 & 1023) << 10) + (c2 & 1023);
-      ++i;
-      buffer2[offset++] = c1 >> 18 | 240;
-      buffer2[offset++] = c1 >> 12 & 63 | 128;
-      buffer2[offset++] = c1 >> 6 & 63 | 128;
-      buffer2[offset++] = c1 & 63 | 128;
-    } else {
-      buffer2[offset++] = c1 >> 12 | 224;
-      buffer2[offset++] = c1 >> 6 & 63 | 128;
-      buffer2[offset++] = c1 & 63 | 128;
-    }
-  }
-  return offset - start;
-}
-
-// node_modules/protons-runtime/dist/src/utils/reader.js
-function indexOutOfRange(reader, writeLength) {
-  return RangeError(`index out of range: ${reader.pos} + ${writeLength ?? 1} > ${reader.len}`);
-}
-function readFixed32End(buf2, end) {
-  return (buf2[end - 4] | buf2[end - 3] << 8 | buf2[end - 2] << 16 | buf2[end - 1] << 24) >>> 0;
-}
-var Uint8ArrayReader = class {
-  buf;
-  pos;
-  len;
-  _slice = Uint8Array.prototype.subarray;
-  constructor(buffer2) {
-    this.buf = buffer2;
-    this.pos = 0;
-    this.len = buffer2.length;
-  }
-  /**
-   * Reads a varint as an unsigned 32 bit value
-   */
-  uint32() {
-    let value = 4294967295;
-    value = (this.buf[this.pos] & 127) >>> 0;
-    if (this.buf[this.pos++] < 128)
-      return value;
-    value = (value | (this.buf[this.pos] & 127) << 7) >>> 0;
-    if (this.buf[this.pos++] < 128)
-      return value;
-    value = (value | (this.buf[this.pos] & 127) << 14) >>> 0;
-    if (this.buf[this.pos++] < 128)
-      return value;
-    value = (value | (this.buf[this.pos] & 127) << 21) >>> 0;
-    if (this.buf[this.pos++] < 128)
-      return value;
-    value = (value | (this.buf[this.pos] & 15) << 28) >>> 0;
-    if (this.buf[this.pos++] < 128)
-      return value;
-    if ((this.pos += 5) > this.len) {
-      this.pos = this.len;
-      throw indexOutOfRange(this, 10);
-    }
-    return value;
-  }
-  /**
-   * Reads a varint as a signed 32 bit value
-   */
-  int32() {
-    return this.uint32() | 0;
-  }
-  /**
-   * Reads a zig-zag encoded varint as a signed 32 bit value
-   */
-  sint32() {
-    const value = this.uint32();
-    return value >>> 1 ^ -(value & 1) | 0;
-  }
-  /**
-   * Reads a varint as a boolean
-   */
-  bool() {
-    return this.uint32() !== 0;
-  }
-  /**
-   * Reads fixed 32 bits as an unsigned 32 bit integer
-   */
-  fixed32() {
-    if (this.pos + 4 > this.len) {
-      throw indexOutOfRange(this, 4);
-    }
-    const res = readFixed32End(this.buf, this.pos += 4);
-    return res;
-  }
-  /**
-   * Reads fixed 32 bits as a signed 32 bit integer
-   */
-  sfixed32() {
-    if (this.pos + 4 > this.len) {
-      throw indexOutOfRange(this, 4);
-    }
-    const res = readFixed32End(this.buf, this.pos += 4) | 0;
-    return res;
-  }
-  /**
-   * Reads a float (32 bit) as a number
-   */
-  float() {
-    if (this.pos + 4 > this.len) {
-      throw indexOutOfRange(this, 4);
-    }
-    const value = readFloatLE(this.buf, this.pos);
-    this.pos += 4;
-    return value;
-  }
-  /**
-   * Reads a double (64 bit float) as a number
-   */
-  double() {
-    if (this.pos + 8 > this.len) {
-      throw indexOutOfRange(this, 4);
-    }
-    const value = readDoubleLE(this.buf, this.pos);
-    this.pos += 8;
-    return value;
-  }
-  /**
-   * Reads a sequence of bytes preceded by its length as a varint
-   */
-  bytes() {
-    const length4 = this.uint32();
-    const start = this.pos;
-    const end = this.pos + length4;
-    if (end > this.len) {
-      throw indexOutOfRange(this, length4);
-    }
-    this.pos += length4;
-    return start === end ? new Uint8Array(0) : this.buf.subarray(start, end);
-  }
-  /**
-   * Reads a string preceded by its byte length as a varint
-   */
-  string() {
-    const bytes = this.bytes();
-    return read3(bytes, 0, bytes.length);
-  }
-  /**
-   * Skips the specified number of bytes if specified, otherwise skips a varint
-   */
-  skip(length4) {
-    if (typeof length4 === "number") {
-      if (this.pos + length4 > this.len) {
-        throw indexOutOfRange(this, length4);
-      }
-      this.pos += length4;
-    } else {
-      do {
-        if (this.pos >= this.len) {
-          throw indexOutOfRange(this);
-        }
-      } while ((this.buf[this.pos++] & 128) !== 0);
-    }
-    return this;
-  }
-  /**
-   * Skips the next element of the specified wire type
-   */
-  skipType(wireType) {
-    switch (wireType) {
-      case 0:
-        this.skip();
-        break;
-      case 1:
-        this.skip(8);
-        break;
-      case 2:
-        this.skip(this.uint32());
-        break;
-      case 3:
-        while ((wireType = this.uint32() & 7) !== 4) {
-          this.skipType(wireType);
-        }
-        break;
-      case 5:
-        this.skip(4);
-        break;
-      default:
-        throw Error(`invalid wire type ${wireType} at offset ${this.pos}`);
-    }
-    return this;
-  }
-  readLongVarint() {
-    const bits = new LongBits(0, 0);
-    let i = 0;
-    if (this.len - this.pos > 4) {
-      for (; i < 4; ++i) {
-        bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;
-        if (this.buf[this.pos++] < 128) {
-          return bits;
-        }
-      }
-      bits.lo = (bits.lo | (this.buf[this.pos] & 127) << 28) >>> 0;
-      bits.hi = (bits.hi | (this.buf[this.pos] & 127) >> 4) >>> 0;
-      if (this.buf[this.pos++] < 128) {
-        return bits;
-      }
-      i = 0;
-    } else {
-      for (; i < 3; ++i) {
-        if (this.pos >= this.len) {
-          throw indexOutOfRange(this);
-        }
-        bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;
-        if (this.buf[this.pos++] < 128) {
-          return bits;
-        }
-      }
-      bits.lo = (bits.lo | (this.buf[this.pos++] & 127) << i * 7) >>> 0;
-      return bits;
-    }
-    if (this.len - this.pos > 4) {
-      for (; i < 5; ++i) {
-        bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;
-        if (this.buf[this.pos++] < 128) {
-          return bits;
-        }
-      }
-    } else {
-      for (; i < 5; ++i) {
-        if (this.pos >= this.len) {
-          throw indexOutOfRange(this);
-        }
-        bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;
-        if (this.buf[this.pos++] < 128) {
-          return bits;
-        }
-      }
-    }
-    throw Error("invalid varint encoding");
-  }
-  readFixed64() {
-    if (this.pos + 8 > this.len) {
-      throw indexOutOfRange(this, 8);
-    }
-    const lo = readFixed32End(this.buf, this.pos += 4);
-    const hi = readFixed32End(this.buf, this.pos += 4);
-    return new LongBits(lo, hi);
-  }
-  /**
-   * Reads a varint as a signed 64 bit value
-   */
-  int64() {
-    return this.readLongVarint().toBigInt();
-  }
-  /**
-   * Reads a varint as a signed 64 bit value returned as a possibly unsafe
-   * JavaScript number
-   */
-  int64Number() {
-    return this.readLongVarint().toNumber();
-  }
-  /**
-   * Reads a varint as a signed 64 bit value returned as a string
-   */
-  int64String() {
-    return this.readLongVarint().toString();
-  }
-  /**
-   * Reads a varint as an unsigned 64 bit value
-   */
-  uint64() {
-    return this.readLongVarint().toBigInt(true);
-  }
-  /**
-   * Reads a varint as an unsigned 64 bit value returned as a possibly unsafe
-   * JavaScript number
-   */
-  uint64Number() {
-    return this.readLongVarint().toNumber(true);
-  }
-  /**
-   * Reads a varint as an unsigned 64 bit value returned as a string
-   */
-  uint64String() {
-    return this.readLongVarint().toString(true);
-  }
-  /**
-   * Reads a zig-zag encoded varint as a signed 64 bit value
-   */
-  sint64() {
-    return this.readLongVarint().zzDecode().toBigInt();
-  }
-  /**
-   * Reads a zig-zag encoded varint as a signed 64 bit value returned as a
-   * possibly unsafe JavaScript number
-   */
-  sint64Number() {
-    return this.readLongVarint().zzDecode().toNumber();
-  }
-  /**
-   * Reads a zig-zag encoded varint as a signed 64 bit value returned as a
-   * string
-   */
-  sint64String() {
-    return this.readLongVarint().zzDecode().toString();
-  }
-  /**
-   * Reads fixed 64 bits
-   */
-  fixed64() {
-    return this.readFixed64().toBigInt();
-  }
-  /**
-   * Reads fixed 64 bits returned as a possibly unsafe JavaScript number
-   */
-  fixed64Number() {
-    return this.readFixed64().toNumber();
-  }
-  /**
-   * Reads fixed 64 bits returned as a string
-   */
-  fixed64String() {
-    return this.readFixed64().toString();
-  }
-  /**
-   * Reads zig-zag encoded fixed 64 bits
-   */
-  sfixed64() {
-    return this.readFixed64().toBigInt();
-  }
-  /**
-   * Reads zig-zag encoded fixed 64 bits returned as a possibly unsafe
-   * JavaScript number
-   */
-  sfixed64Number() {
-    return this.readFixed64().toNumber();
-  }
-  /**
-   * Reads zig-zag encoded fixed 64 bits returned as a string
-   */
-  sfixed64String() {
-    return this.readFixed64().toString();
-  }
-};
-function createReader(buf2) {
-  return new Uint8ArrayReader(buf2 instanceof Uint8Array ? buf2 : buf2.subarray());
-}
-
-// node_modules/protons-runtime/dist/src/decode.js
-function decodeMessage(buf2, codec) {
-  const reader = createReader(buf2);
-  return codec.decode(reader);
-}
-
-// node_modules/multiformats/src/bases/base10.js
-var base10_exports = {};
-__export(base10_exports, {
-  base10: () => base10
-});
-var base10 = baseX2({
-  prefix: "9",
-  name: "base10",
-  alphabet: "0123456789"
-});
-
-// node_modules/multiformats/src/bases/base16.js
-var base16_exports = {};
-__export(base16_exports, {
-  base16: () => base16,
-  base16upper: () => base16upper
-});
-var base16 = rfc46482({
-  prefix: "f",
-  name: "base16",
-  alphabet: "0123456789abcdef",
-  bitsPerChar: 4
-});
-var base16upper = rfc46482({
-  prefix: "F",
-  name: "base16upper",
-  alphabet: "0123456789ABCDEF",
-  bitsPerChar: 4
-});
-
-// node_modules/multiformats/src/bases/base2.js
-var base2_exports = {};
-__export(base2_exports, {
-  base2: () => base22
-});
-var base22 = rfc46482({
-  prefix: "0",
-  name: "base2",
-  alphabet: "01",
-  bitsPerChar: 1
-});
-
-// node_modules/multiformats/src/bases/base256emoji.js
-var base256emoji_exports = {};
-__export(base256emoji_exports, {
-  base256emoji: () => base256emoji
-});
-var alphabet = Array.from("\u{1F680}\u{1FA90}\u2604\u{1F6F0}\u{1F30C}\u{1F311}\u{1F312}\u{1F313}\u{1F314}\u{1F315}\u{1F316}\u{1F317}\u{1F318}\u{1F30D}\u{1F30F}\u{1F30E}\u{1F409}\u2600\u{1F4BB}\u{1F5A5}\u{1F4BE}\u{1F4BF}\u{1F602}\u2764\u{1F60D}\u{1F923}\u{1F60A}\u{1F64F}\u{1F495}\u{1F62D}\u{1F618}\u{1F44D}\u{1F605}\u{1F44F}\u{1F601}\u{1F525}\u{1F970}\u{1F494}\u{1F496}\u{1F499}\u{1F622}\u{1F914}\u{1F606}\u{1F644}\u{1F4AA}\u{1F609}\u263A\u{1F44C}\u{1F917}\u{1F49C}\u{1F614}\u{1F60E}\u{1F607}\u{1F339}\u{1F926}\u{1F389}\u{1F49E}\u270C\u2728\u{1F937}\u{1F631}\u{1F60C}\u{1F338}\u{1F64C}\u{1F60B}\u{1F497}\u{1F49A}\u{1F60F}\u{1F49B}\u{1F642}\u{1F493}\u{1F929}\u{1F604}\u{1F600}\u{1F5A4}\u{1F603}\u{1F4AF}\u{1F648}\u{1F447}\u{1F3B6}\u{1F612}\u{1F92D}\u2763\u{1F61C}\u{1F48B}\u{1F440}\u{1F62A}\u{1F611}\u{1F4A5}\u{1F64B}\u{1F61E}\u{1F629}\u{1F621}\u{1F92A}\u{1F44A}\u{1F973}\u{1F625}\u{1F924}\u{1F449}\u{1F483}\u{1F633}\u270B\u{1F61A}\u{1F61D}\u{1F634}\u{1F31F}\u{1F62C}\u{1F643}\u{1F340}\u{1F337}\u{1F63B}\u{1F613}\u2B50\u2705\u{1F97A}\u{1F308}\u{1F608}\u{1F918}\u{1F4A6}\u2714\u{1F623}\u{1F3C3}\u{1F490}\u2639\u{1F38A}\u{1F498}\u{1F620}\u261D\u{1F615}\u{1F33A}\u{1F382}\u{1F33B}\u{1F610}\u{1F595}\u{1F49D}\u{1F64A}\u{1F639}\u{1F5E3}\u{1F4AB}\u{1F480}\u{1F451}\u{1F3B5}\u{1F91E}\u{1F61B}\u{1F534}\u{1F624}\u{1F33C}\u{1F62B}\u26BD\u{1F919}\u2615\u{1F3C6}\u{1F92B}\u{1F448}\u{1F62E}\u{1F646}\u{1F37B}\u{1F343}\u{1F436}\u{1F481}\u{1F632}\u{1F33F}\u{1F9E1}\u{1F381}\u26A1\u{1F31E}\u{1F388}\u274C\u270A\u{1F44B}\u{1F630}\u{1F928}\u{1F636}\u{1F91D}\u{1F6B6}\u{1F4B0}\u{1F353}\u{1F4A2}\u{1F91F}\u{1F641}\u{1F6A8}\u{1F4A8}\u{1F92C}\u2708\u{1F380}\u{1F37A}\u{1F913}\u{1F619}\u{1F49F}\u{1F331}\u{1F616}\u{1F476}\u{1F974}\u25B6\u27A1\u2753\u{1F48E}\u{1F4B8}\u2B07\u{1F628}\u{1F31A}\u{1F98B}\u{1F637}\u{1F57A}\u26A0\u{1F645}\u{1F61F}\u{1F635}\u{1F44E}\u{1F932}\u{1F920}\u{1F927}\u{1F4CC}\u{1F535}\u{1F485}\u{1F9D0}\u{1F43E}\u{1F352}\u{1F617}\u{1F911}\u{1F30A}\u{1F92F}\u{1F437}\u260E\u{1F4A7}\u{1F62F}\u{1F486}\u{1F446}\u{1F3A4}\u{1F647}\u{1F351}\u2744\u{1F334}\u{1F4A3}\u{1F438}\u{1F48C}\u{1F4CD}\u{1F940}\u{1F922}\u{1F445}\u{1F4A1}\u{1F4A9}\u{1F450}\u{1F4F8}\u{1F47B}\u{1F910}\u{1F92E}\u{1F3BC}\u{1F975}\u{1F6A9}\u{1F34E}\u{1F34A}\u{1F47C}\u{1F48D}\u{1F4E3}\u{1F942}");
-var alphabetBytesToChars = (
-  /** @type {string[]} */
-  alphabet.reduce(
-    (p, c, i) => {
-      p[i] = c;
-      return p;
-    },
-    /** @type {string[]} */
-    []
-  )
-);
-var alphabetCharsToBytes = (
-  /** @type {number[]} */
-  alphabet.reduce(
-    (p, c, i) => {
-      p[
-        /** @type {number} */
-        c.codePointAt(0)
-      ] = i;
-      return p;
-    },
-    /** @type {number[]} */
-    []
-  )
-);
-function encode10(data) {
-  return data.reduce((p, c) => {
-    p += alphabetBytesToChars[c];
-    return p;
-  }, "");
-}
-function decode14(str) {
-  const byts = [];
-  for (const char of str) {
-    const byt = alphabetCharsToBytes[
-      /** @type {number} */
-      char.codePointAt(0)
-    ];
-    if (byt === void 0) {
-      throw new Error(`Non-base256emoji character: ${char}`);
-    }
-    byts.push(byt);
-  }
-  return new Uint8Array(byts);
-}
-var base256emoji = from2({
-  prefix: "\u{1F680}",
-  name: "base256emoji",
-  encode: encode10,
-  decode: decode14
-});
-
-// node_modules/multiformats/src/bases/base36.js
-var base36_exports = {};
-__export(base36_exports, {
-  base36: () => base36,
-  base36upper: () => base36upper
-});
-var base36 = baseX2({
-  prefix: "k",
-  name: "base36",
-  alphabet: "0123456789abcdefghijklmnopqrstuvwxyz"
-});
-var base36upper = baseX2({
-  prefix: "K",
-  name: "base36upper",
-  alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
-});
-
-// node_modules/multiformats/src/bases/base8.js
-var base8_exports = {};
-__export(base8_exports, {
-  base8: () => base8
-});
-var base8 = rfc46482({
-  prefix: "7",
-  name: "base8",
-  alphabet: "01234567",
-  bitsPerChar: 3
-});
-
-// node_modules/multiformats/src/bases/identity.js
-var identity_exports = {};
-__export(identity_exports, {
-  identity: () => identity
-});
-var identity = from2({
-  prefix: "\0",
-  name: "identity",
-  encode: (buf2) => toString2(buf2),
-  decode: (str) => fromString2(str)
-});
-
-// node_modules/multiformats/src/codecs/json.js
-var textEncoder4 = new TextEncoder();
-var textDecoder3 = new TextDecoder();
-
-// node_modules/multiformats/src/hashes/identity.js
-var identity_exports2 = {};
-__export(identity_exports2, {
-  identity: () => identity2
-});
-var code4 = 0;
-var name3 = "identity";
-var encode11 = coerce2;
-var digest = (input) => create2(code4, encode11(input));
-var identity2 = { code: code4, name: name3, encode: encode11, digest };
-
-// node_modules/multiformats/src/hashes/sha2.js
-var sha2_exports = {};
-__export(sha2_exports, {
-  sha256: () => sha256,
-  sha512: () => sha512
-});
-import crypto from "crypto";
-
-// node_modules/multiformats/src/hashes/hasher.js
-var from3 = ({ name: name4, code: code5, encode: encode12 }) => new Hasher(name4, code5, encode12);
-var Hasher = class {
-  /**
-   *
-   * @param {Name} name
-   * @param {Code} code
-   * @param {(input: Uint8Array) => Await} encode
-   */
-  constructor(name4, code5, encode12) {
-    this.name = name4;
-    this.code = code5;
-    this.encode = encode12;
-  }
-  /**
-   * @param {Uint8Array} input
-   * @returns {Await>}
-   */
-  digest(input) {
-    if (input instanceof Uint8Array) {
-      const result = this.encode(input);
-      return result instanceof Uint8Array ? create2(this.code, result) : result.then((digest2) => create2(this.code, digest2));
-    } else {
-      throw Error("Unknown type, must be binary type");
-    }
-  }
-};
-
-// node_modules/multiformats/src/hashes/sha2.js
-var sha256 = from3({
-  name: "sha2-256",
-  code: 18,
-  encode: (input) => coerce2(crypto.createHash("sha256").update(input).digest())
-});
-var sha512 = from3({
-  name: "sha2-512",
-  code: 19,
-  encode: (input) => coerce2(crypto.createHash("sha512").update(input).digest())
-});
-
-// node_modules/multiformats/src/basics.js
-var bases = { ...identity_exports, ...base2_exports, ...base8_exports, ...base10_exports, ...base16_exports, ...base32_exports, ...base36_exports, ...base58_exports, ...base64_exports, ...base256emoji_exports };
-var hashes = { ...sha2_exports, ...identity_exports2 };
-
-// node_modules/uint8arrays/dist/src/util/bases.js
-function createCodec(name4, prefix, encode12, decode15) {
-  return {
-    name: name4,
-    prefix,
-    encoder: {
-      name: name4,
-      prefix,
-      encode: encode12
-    },
-    decoder: {
-      decode: decode15
-    }
-  };
-}
-var string = createCodec("utf8", "u", (buf2) => {
-  const decoder = new TextDecoder("utf8");
-  return "u" + decoder.decode(buf2);
-}, (str) => {
-  const encoder = new TextEncoder();
-  return encoder.encode(str.substring(1));
-});
-var ascii = createCodec("ascii", "a", (buf2) => {
-  let string2 = "a";
-  for (let i = 0; i < buf2.length; i++) {
-    string2 += String.fromCharCode(buf2[i]);
-  }
-  return string2;
-}, (str) => {
-  str = str.substring(1);
-  const buf2 = allocUnsafe(str.length);
-  for (let i = 0; i < str.length; i++) {
-    buf2[i] = str.charCodeAt(i);
-  }
-  return buf2;
-});
-var BASES = {
-  utf8: string,
-  "utf-8": string,
-  hex: bases.base16,
-  latin1: ascii,
-  ascii,
-  binary: ascii,
-  ...bases
-};
-var bases_default = BASES;
-
-// node_modules/uint8arrays/dist/src/from-string.js
-function fromString3(string2, encoding = "utf8") {
-  const base3 = bases_default[encoding];
-  if (base3 == null) {
-    throw new Error(`Unsupported encoding "${encoding}"`);
-  }
-  if ((encoding === "utf8" || encoding === "utf-8") && globalThis.Buffer != null && globalThis.Buffer.from != null) {
-    return asUint8Array(globalThis.Buffer.from(string2, "utf-8"));
-  }
-  return base3.decoder.decode(`${base3.prefix}${string2}`);
-}
-
-// node_modules/protons-runtime/dist/src/utils/pool.js
-function pool(size) {
-  const SIZE = size ?? 8192;
-  const MAX = SIZE >>> 1;
-  let slab;
-  let offset = SIZE;
-  return function poolAlloc(size2) {
-    if (size2 < 1 || size2 > MAX) {
-      return allocUnsafe(size2);
-    }
-    if (offset + size2 > SIZE) {
-      slab = allocUnsafe(SIZE);
-      offset = 0;
-    }
-    const buf2 = slab.subarray(offset, offset += size2);
-    if ((offset & 7) !== 0) {
-      offset = (offset | 7) + 1;
-    }
-    return buf2;
-  };
-}
-
-// node_modules/protons-runtime/dist/src/utils/writer.js
-var Op = class {
-  /**
-   * Function to call
-   */
-  fn;
-  /**
-   * Value byte length
-   */
-  len;
-  /**
-   * Next operation
-   */
-  next;
-  /**
-   * Value to write
-   */
-  val;
-  constructor(fn, len, val) {
-    this.fn = fn;
-    this.len = len;
-    this.next = void 0;
-    this.val = val;
-  }
-};
-function noop2() {
-}
-var State = class {
-  /**
-   * Current head
-   */
-  head;
-  /**
-   * Current tail
-   */
-  tail;
-  /**
-   * Current buffer length
-   */
-  len;
-  /**
-   * Next state
-   */
-  next;
-  constructor(writer) {
-    this.head = writer.head;
-    this.tail = writer.tail;
-    this.len = writer.len;
-    this.next = writer.states;
-  }
-};
-var bufferPool = pool();
-function alloc3(size) {
-  if (globalThis.Buffer != null) {
-    return allocUnsafe(size);
-  }
-  return bufferPool(size);
-}
-var Uint8ArrayWriter = class {
-  /**
-   * Current length
-   */
-  len;
-  /**
-   * Operations head
-   */
-  head;
-  /**
-   * Operations tail
-   */
-  tail;
-  /**
-   * Linked forked states
-   */
-  states;
-  constructor() {
-    this.len = 0;
-    this.head = new Op(noop2, 0, 0);
-    this.tail = this.head;
-    this.states = null;
-  }
-  /**
-   * Pushes a new operation to the queue
-   */
-  _push(fn, len, val) {
-    this.tail = this.tail.next = new Op(fn, len, val);
-    this.len += len;
-    return this;
-  }
-  /**
-   * Writes an unsigned 32 bit value as a varint
-   */
-  uint32(value) {
-    this.len += (this.tail = this.tail.next = new VarintOp((value = value >>> 0) < 128 ? 1 : value < 16384 ? 2 : value < 2097152 ? 3 : value < 268435456 ? 4 : 5, value)).len;
-    return this;
-  }
-  /**
-   * Writes a signed 32 bit value as a varint`
-   */
-  int32(value) {
-    return value < 0 ? this._push(writeVarint64, 10, LongBits.fromNumber(value)) : this.uint32(value);
-  }
-  /**
-   * Writes a 32 bit value as a varint, zig-zag encoded
-   */
-  sint32(value) {
-    return this.uint32((value << 1 ^ value >> 31) >>> 0);
-  }
-  /**
-   * Writes an unsigned 64 bit value as a varint
-   */
-  uint64(value) {
-    const bits = LongBits.fromBigInt(value);
-    return this._push(writeVarint64, bits.length(), bits);
-  }
-  /**
-   * Writes an unsigned 64 bit value as a varint
-   */
-  uint64Number(value) {
-    const bits = LongBits.fromNumber(value);
-    return this._push(writeVarint64, bits.length(), bits);
-  }
-  /**
-   * Writes an unsigned 64 bit value as a varint
-   */
-  uint64String(value) {
-    return this.uint64(BigInt(value));
-  }
-  /**
-   * Writes a signed 64 bit value as a varint
-   */
-  int64(value) {
-    return this.uint64(value);
-  }
-  /**
-   * Writes a signed 64 bit value as a varint
-   */
-  int64Number(value) {
-    return this.uint64Number(value);
-  }
-  /**
-   * Writes a signed 64 bit value as a varint
-   */
-  int64String(value) {
-    return this.uint64String(value);
-  }
-  /**
-   * Writes a signed 64 bit value as a varint, zig-zag encoded
-   */
-  sint64(value) {
-    const bits = LongBits.fromBigInt(value).zzEncode();
-    return this._push(writeVarint64, bits.length(), bits);
-  }
-  /**
-   * Writes a signed 64 bit value as a varint, zig-zag encoded
-   */
-  sint64Number(value) {
-    const bits = LongBits.fromNumber(value).zzEncode();
-    return this._push(writeVarint64, bits.length(), bits);
-  }
-  /**
-   * Writes a signed 64 bit value as a varint, zig-zag encoded
-   */
-  sint64String(value) {
-    return this.sint64(BigInt(value));
-  }
-  /**
-   * Writes a boolish value as a varint
-   */
-  bool(value) {
-    return this._push(writeByte, 1, value ? 1 : 0);
-  }
-  /**
-   * Writes an unsigned 32 bit value as fixed 32 bits
-   */
-  fixed32(value) {
-    return this._push(writeFixed32, 4, value >>> 0);
-  }
-  /**
-   * Writes a signed 32 bit value as fixed 32 bits
-   */
-  sfixed32(value) {
-    return this.fixed32(value);
-  }
-  /**
-   * Writes an unsigned 64 bit value as fixed 64 bits
-   */
-  fixed64(value) {
-    const bits = LongBits.fromBigInt(value);
-    return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi);
-  }
-  /**
-   * Writes an unsigned 64 bit value as fixed 64 bits
-   */
-  fixed64Number(value) {
-    const bits = LongBits.fromNumber(value);
-    return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi);
-  }
-  /**
-   * Writes an unsigned 64 bit value as fixed 64 bits
-   */
-  fixed64String(value) {
-    return this.fixed64(BigInt(value));
-  }
-  /**
-   * Writes a signed 64 bit value as fixed 64 bits
-   */
-  sfixed64(value) {
-    return this.fixed64(value);
-  }
-  /**
-   * Writes a signed 64 bit value as fixed 64 bits
-   */
-  sfixed64Number(value) {
-    return this.fixed64Number(value);
-  }
-  /**
-   * Writes a signed 64 bit value as fixed 64 bits
-   */
-  sfixed64String(value) {
-    return this.fixed64String(value);
-  }
-  /**
-   * Writes a float (32 bit)
-   */
-  float(value) {
-    return this._push(writeFloatLE, 4, value);
-  }
-  /**
-   * Writes a double (64 bit float).
-   *
-   * @function
-   * @param {number} value - Value to write
-   * @returns {Writer} `this`
-   */
-  double(value) {
-    return this._push(writeDoubleLE, 8, value);
-  }
-  /**
-   * Writes a sequence of bytes
-   */
-  bytes(value) {
-    const len = value.length >>> 0;
-    if (len === 0) {
-      return this._push(writeByte, 1, 0);
-    }
-    return this.uint32(len)._push(writeBytes, len, value);
-  }
-  /**
-   * Writes a string
-   */
-  string(value) {
-    const len = length3(value);
-    return len !== 0 ? this.uint32(len)._push(write, len, value) : this._push(writeByte, 1, 0);
-  }
-  /**
-   * Forks this writer's state by pushing it to a stack.
-   * Calling {@link Writer#reset|reset} or {@link Writer#ldelim|ldelim} resets the writer to the previous state.
-   */
-  fork() {
-    this.states = new State(this);
-    this.head = this.tail = new Op(noop2, 0, 0);
-    this.len = 0;
-    return this;
-  }
-  /**
-   * Resets this instance to the last state
-   */
-  reset() {
-    if (this.states != null) {
-      this.head = this.states.head;
-      this.tail = this.states.tail;
-      this.len = this.states.len;
-      this.states = this.states.next;
-    } else {
-      this.head = this.tail = new Op(noop2, 0, 0);
-      this.len = 0;
-    }
-    return this;
-  }
-  /**
-   * Resets to the last state and appends the fork state's current write length as a varint followed by its operations.
-   */
-  ldelim() {
-    const head = this.head;
-    const tail = this.tail;
-    const len = this.len;
-    this.reset().uint32(len);
-    if (len !== 0) {
-      this.tail.next = head.next;
-      this.tail = tail;
-      this.len += len;
-    }
-    return this;
-  }
-  /**
-   * Finishes the write operation
-   */
-  finish() {
-    let head = this.head.next;
-    const buf2 = alloc3(this.len);
-    let pos = 0;
-    while (head != null) {
-      head.fn(head.val, buf2, pos);
-      pos += head.len;
-      head = head.next;
-    }
-    return buf2;
-  }
-};
-function writeByte(val, buf2, pos) {
-  buf2[pos] = val & 255;
-}
-function writeVarint32(val, buf2, pos) {
-  while (val > 127) {
-    buf2[pos++] = val & 127 | 128;
-    val >>>= 7;
-  }
-  buf2[pos] = val;
-}
-var VarintOp = class extends Op {
-  next;
-  constructor(len, val) {
-    super(writeVarint32, len, val);
-    this.next = void 0;
-  }
-};
-function writeVarint64(val, buf2, pos) {
-  while (val.hi !== 0) {
-    buf2[pos++] = val.lo & 127 | 128;
-    val.lo = (val.lo >>> 7 | val.hi << 25) >>> 0;
-    val.hi >>>= 7;
-  }
-  while (val.lo > 127) {
-    buf2[pos++] = val.lo & 127 | 128;
-    val.lo = val.lo >>> 7;
-  }
-  buf2[pos++] = val.lo;
-}
-function writeFixed32(val, buf2, pos) {
-  buf2[pos] = val & 255;
-  buf2[pos + 1] = val >>> 8 & 255;
-  buf2[pos + 2] = val >>> 16 & 255;
-  buf2[pos + 3] = val >>> 24;
-}
-function writeBytes(val, buf2, pos) {
-  buf2.set(val, pos);
-}
-if (globalThis.Buffer != null) {
-  Uint8ArrayWriter.prototype.bytes = function(value) {
-    const len = value.length >>> 0;
-    this.uint32(len);
-    if (len > 0) {
-      this._push(writeBytesBuffer, len, value);
-    }
-    return this;
-  };
-  Uint8ArrayWriter.prototype.string = function(value) {
-    const len = globalThis.Buffer.byteLength(value);
-    this.uint32(len);
-    if (len > 0) {
-      this._push(writeStringBuffer, len, value);
-    }
-    return this;
-  };
-}
-function writeBytesBuffer(val, buf2, pos) {
-  buf2.set(val, pos);
-}
-function writeStringBuffer(val, buf2, pos) {
-  if (val.length < 40) {
-    write(val, buf2, pos);
-  } else if (buf2.utf8Write != null) {
-    buf2.utf8Write(val, pos);
-  } else {
-    buf2.set(fromString3(val), pos);
-  }
-}
-function createWriter() {
-  return new Uint8ArrayWriter();
-}
-
-// node_modules/protons-runtime/dist/src/encode.js
-function encodeMessage(message2, codec) {
-  const w = createWriter();
-  codec.encode(message2, w, {
-    lengthDelimited: false
-  });
-  return w.finish();
-}
-
-// node_modules/protons-runtime/dist/src/codec.js
-var CODEC_TYPES;
-(function(CODEC_TYPES2) {
-  CODEC_TYPES2[CODEC_TYPES2["VARINT"] = 0] = "VARINT";
-  CODEC_TYPES2[CODEC_TYPES2["BIT64"] = 1] = "BIT64";
-  CODEC_TYPES2[CODEC_TYPES2["LENGTH_DELIMITED"] = 2] = "LENGTH_DELIMITED";
-  CODEC_TYPES2[CODEC_TYPES2["START_GROUP"] = 3] = "START_GROUP";
-  CODEC_TYPES2[CODEC_TYPES2["END_GROUP"] = 4] = "END_GROUP";
-  CODEC_TYPES2[CODEC_TYPES2["BIT32"] = 5] = "BIT32";
-})(CODEC_TYPES || (CODEC_TYPES = {}));
-function createCodec2(name4, type, encode12, decode15) {
-  return {
-    name: name4,
-    type,
-    encode: encode12,
-    decode: decode15
-  };
-}
-
-// node_modules/protons-runtime/dist/src/codecs/enum.js
-function enumeration(v) {
-  function findValue(val) {
-    if (v[val.toString()] == null) {
-      throw new Error("Invalid enum value");
-    }
-    return v[val];
-  }
-  const encode12 = function enumEncode(val, writer) {
-    const enumValue = findValue(val);
-    writer.int32(enumValue);
-  };
-  const decode15 = function enumDecode(reader) {
-    const val = reader.int32();
-    return findValue(val);
-  };
-  return createCodec2("enum", CODEC_TYPES.VARINT, encode12, decode15);
-}
-
-// node_modules/protons-runtime/dist/src/codecs/message.js
-function message(encode12, decode15) {
-  return createCodec2("message", CODEC_TYPES.LENGTH_DELIMITED, encode12, decode15);
-}
-
-// node_modules/ipfs-unixfs/dist/src/unixfs.js
-var Data;
-(function(Data2) {
-  let DataType;
-  (function(DataType2) {
-    DataType2["Raw"] = "Raw";
-    DataType2["Directory"] = "Directory";
-    DataType2["File"] = "File";
-    DataType2["Metadata"] = "Metadata";
-    DataType2["Symlink"] = "Symlink";
-    DataType2["HAMTShard"] = "HAMTShard";
-  })(DataType = Data2.DataType || (Data2.DataType = {}));
-  let __DataTypeValues;
-  (function(__DataTypeValues2) {
-    __DataTypeValues2[__DataTypeValues2["Raw"] = 0] = "Raw";
-    __DataTypeValues2[__DataTypeValues2["Directory"] = 1] = "Directory";
-    __DataTypeValues2[__DataTypeValues2["File"] = 2] = "File";
-    __DataTypeValues2[__DataTypeValues2["Metadata"] = 3] = "Metadata";
-    __DataTypeValues2[__DataTypeValues2["Symlink"] = 4] = "Symlink";
-    __DataTypeValues2[__DataTypeValues2["HAMTShard"] = 5] = "HAMTShard";
-  })(__DataTypeValues || (__DataTypeValues = {}));
-  (function(DataType2) {
-    DataType2.codec = () => {
-      return enumeration(__DataTypeValues);
-    };
-  })(DataType = Data2.DataType || (Data2.DataType = {}));
-  let _codec;
-  Data2.codec = () => {
-    if (_codec == null) {
-      _codec = message((obj, w, opts = {}) => {
-        if (opts.lengthDelimited !== false) {
-          w.fork();
-        }
-        if (obj.Type != null) {
-          w.uint32(8);
-          Data2.DataType.codec().encode(obj.Type, w);
-        }
-        if (obj.Data != null) {
-          w.uint32(18);
-          w.bytes(obj.Data);
-        }
-        if (obj.filesize != null) {
-          w.uint32(24);
-          w.uint64(obj.filesize);
-        }
-        if (obj.blocksizes != null) {
-          for (const value of obj.blocksizes) {
-            w.uint32(32);
-            w.uint64(value);
-          }
-        }
-        if (obj.hashType != null) {
-          w.uint32(40);
-          w.uint64(obj.hashType);
-        }
-        if (obj.fanout != null) {
-          w.uint32(48);
-          w.uint64(obj.fanout);
-        }
-        if (obj.mode != null) {
-          w.uint32(56);
-          w.uint32(obj.mode);
-        }
-        if (obj.mtime != null) {
-          w.uint32(66);
-          UnixTime.codec().encode(obj.mtime, w);
-        }
-        if (opts.lengthDelimited !== false) {
-          w.ldelim();
-        }
-      }, (reader, length4) => {
-        const obj = {
-          blocksizes: []
-        };
-        const end = length4 == null ? reader.len : reader.pos + length4;
-        while (reader.pos < end) {
-          const tag = reader.uint32();
-          switch (tag >>> 3) {
-            case 1:
-              obj.Type = Data2.DataType.codec().decode(reader);
-              break;
-            case 2:
-              obj.Data = reader.bytes();
-              break;
-            case 3:
-              obj.filesize = reader.uint64();
-              break;
-            case 4:
-              obj.blocksizes.push(reader.uint64());
-              break;
-            case 5:
-              obj.hashType = reader.uint64();
-              break;
-            case 6:
-              obj.fanout = reader.uint64();
-              break;
-            case 7:
-              obj.mode = reader.uint32();
-              break;
-            case 8:
-              obj.mtime = UnixTime.codec().decode(reader, reader.uint32());
-              break;
-            default:
-              reader.skipType(tag & 7);
-              break;
-          }
-        }
-        return obj;
-      });
-    }
-    return _codec;
-  };
-  Data2.encode = (obj) => {
-    return encodeMessage(obj, Data2.codec());
-  };
-  Data2.decode = (buf2) => {
-    return decodeMessage(buf2, Data2.codec());
-  };
-})(Data || (Data = {}));
-var UnixTime;
-(function(UnixTime2) {
-  let _codec;
-  UnixTime2.codec = () => {
-    if (_codec == null) {
-      _codec = message((obj, w, opts = {}) => {
-        if (opts.lengthDelimited !== false) {
-          w.fork();
-        }
-        if (obj.Seconds != null) {
-          w.uint32(8);
-          w.int64(obj.Seconds);
-        }
-        if (obj.FractionalNanoseconds != null) {
-          w.uint32(21);
-          w.fixed32(obj.FractionalNanoseconds);
-        }
-        if (opts.lengthDelimited !== false) {
-          w.ldelim();
-        }
-      }, (reader, length4) => {
-        const obj = {};
-        const end = length4 == null ? reader.len : reader.pos + length4;
-        while (reader.pos < end) {
-          const tag = reader.uint32();
-          switch (tag >>> 3) {
-            case 1:
-              obj.Seconds = reader.int64();
-              break;
-            case 2:
-              obj.FractionalNanoseconds = reader.fixed32();
-              break;
-            default:
-              reader.skipType(tag & 7);
-              break;
-          }
-        }
-        return obj;
-      });
-    }
-    return _codec;
-  };
-  UnixTime2.encode = (obj) => {
-    return encodeMessage(obj, UnixTime2.codec());
-  };
-  UnixTime2.decode = (buf2) => {
-    return decodeMessage(buf2, UnixTime2.codec());
-  };
-})(UnixTime || (UnixTime = {}));
-var Metadata;
-(function(Metadata2) {
-  let _codec;
-  Metadata2.codec = () => {
-    if (_codec == null) {
-      _codec = message((obj, w, opts = {}) => {
-        if (opts.lengthDelimited !== false) {
-          w.fork();
-        }
-        if (obj.MimeType != null) {
-          w.uint32(10);
-          w.string(obj.MimeType);
-        }
-        if (opts.lengthDelimited !== false) {
-          w.ldelim();
-        }
-      }, (reader, length4) => {
-        const obj = {};
-        const end = length4 == null ? reader.len : reader.pos + length4;
-        while (reader.pos < end) {
-          const tag = reader.uint32();
-          switch (tag >>> 3) {
-            case 1:
-              obj.MimeType = reader.string();
-              break;
-            default:
-              reader.skipType(tag & 7);
-              break;
-          }
-        }
-        return obj;
-      });
-    }
-    return _codec;
-  };
-  Metadata2.encode = (obj) => {
-    return encodeMessage(obj, Metadata2.codec());
-  };
-  Metadata2.decode = (buf2) => {
-    return decodeMessage(buf2, Metadata2.codec());
-  };
-})(Metadata || (Metadata = {}));
-
-// node_modules/ipfs-unixfs/dist/src/index.js
-var types = {
-  Raw: "raw",
-  Directory: "directory",
-  File: "file",
-  Metadata: "metadata",
-  Symlink: "symlink",
-  HAMTShard: "hamt-sharded-directory"
-};
-var dirTypes = [
-  "directory",
-  "hamt-sharded-directory"
-];
-var DEFAULT_FILE_MODE = parseInt("0644", 8);
-var DEFAULT_DIRECTORY_MODE = parseInt("0755", 8);
-var UnixFS = class _UnixFS {
-  /**
-   * Decode from protobuf https://github.com/ipfs/specs/blob/master/UNIXFS.md
-   */
-  static unmarshal(marshaled) {
-    const message2 = Data.decode(marshaled);
-    const data = new _UnixFS({
-      type: types[message2.Type != null ? message2.Type.toString() : "File"],
-      data: message2.Data,
-      blockSizes: message2.blocksizes,
-      mode: message2.mode,
-      mtime: message2.mtime != null ? {
-        secs: message2.mtime.Seconds ?? 0n,
-        nsecs: message2.mtime.FractionalNanoseconds
-      } : void 0,
-      fanout: message2.fanout
-    });
-    data._originalMode = message2.mode ?? 0;
-    return data;
-  }
-  type;
-  data;
-  blockSizes;
-  hashType;
-  fanout;
-  mtime;
-  _mode;
-  _originalMode;
-  constructor(options = {
-    type: "file"
-  }) {
-    const { type, data, blockSizes, hashType, fanout, mtime, mode } = options;
-    if (type != null && !Object.values(types).includes(type)) {
-      throw (0, import_err_code.default)(new Error("Type: " + type + " is not valid"), "ERR_INVALID_TYPE");
-    }
-    this.type = type ?? "file";
-    this.data = data;
-    this.hashType = hashType;
-    this.fanout = fanout;
-    this.blockSizes = blockSizes ?? [];
-    this._originalMode = 0;
-    this.mode = mode;
-    this.mtime = mtime;
-  }
-  set mode(mode) {
-    if (mode == null) {
-      this._mode = this.isDirectory() ? DEFAULT_DIRECTORY_MODE : DEFAULT_FILE_MODE;
-    } else {
-      this._mode = mode & 4095;
-    }
-  }
-  get mode() {
-    return this._mode;
-  }
-  isDirectory() {
-    return dirTypes.includes(this.type);
-  }
-  addBlockSize(size) {
-    this.blockSizes.push(size);
-  }
-  removeBlockSize(index) {
-    this.blockSizes.splice(index, 1);
-  }
-  /**
-   * Returns `0n` for directories or `data.length + sum(blockSizes)` for everything else
-   */
-  fileSize() {
-    if (this.isDirectory()) {
-      return 0n;
-    }
-    let sum = 0n;
-    this.blockSizes.forEach((size) => {
-      sum += size;
-    });
-    if (this.data != null) {
-      sum += BigInt(this.data.length);
-    }
-    return sum;
-  }
-  /**
-   * encode to protobuf Uint8Array
-   */
-  marshal() {
-    let type;
-    switch (this.type) {
-      case "raw":
-        type = Data.DataType.Raw;
-        break;
-      case "directory":
-        type = Data.DataType.Directory;
-        break;
-      case "file":
-        type = Data.DataType.File;
-        break;
-      case "metadata":
-        type = Data.DataType.Metadata;
-        break;
-      case "symlink":
-        type = Data.DataType.Symlink;
-        break;
-      case "hamt-sharded-directory":
-        type = Data.DataType.HAMTShard;
-        break;
-      default:
-        throw (0, import_err_code.default)(new Error(`Type: ${type} is not valid`), "ERR_INVALID_TYPE");
-    }
-    let data = this.data;
-    if (this.data == null || this.data.length === 0) {
-      data = void 0;
-    }
-    let mode;
-    if (this.mode != null) {
-      mode = this._originalMode & 4294963200 | (this.mode ?? 0);
-      if (mode === DEFAULT_FILE_MODE && !this.isDirectory()) {
-        mode = void 0;
-      }
-      if (mode === DEFAULT_DIRECTORY_MODE && this.isDirectory()) {
-        mode = void 0;
-      }
-    }
-    let mtime;
-    if (this.mtime != null) {
-      mtime = {
-        Seconds: this.mtime.secs,
-        FractionalNanoseconds: this.mtime.nsecs
-      };
-    }
-    return Data.encode({
-      Type: type,
-      Data: data,
-      filesize: this.isDirectory() ? void 0 : this.fileSize(),
-      blocksizes: this.blockSizes,
-      hashType: this.hashType,
-      fanout: this.fanout,
-      mode,
-      mtime
-    });
-  }
-};
-
-// node_modules/progress-events/dist/src/index.js
-var CustomProgressEvent = class extends Event {
-  constructor(type, detail) {
-    super(type);
-    this.detail = detail;
-  }
-};
-
-// node_modules/ipfs-unixfs-importer/dist/src/utils/persist.js
-var persist = async (buffer2, blockstore, options) => {
-  if (options.codec == null) {
-    options.codec = src_exports2;
-  }
-  const multihash = await sha256.digest(buffer2);
-  const cid = CID2.create(options.cidVersion, options.codec.code, multihash);
-  await blockstore.put(cid, buffer2, options);
-  return cid;
-};
-
-// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/buffer-importer.js
-function defaultBufferImporter(options) {
-  return async function* bufferImporter(file, blockstore) {
-    let bytesWritten = 0n;
-    for await (let block of file.content) {
-      yield async () => {
-        var _a;
-        let unixfs2;
-        const opts = {
-          codec: src_exports2,
-          cidVersion: options.cidVersion,
-          onProgress: options.onProgress
-        };
-        if (options.rawLeaves) {
-          opts.codec = raw_exports;
-          opts.cidVersion = 1;
-        } else {
-          unixfs2 = new UnixFS({
-            type: options.leafType,
-            data: block
-          });
-          block = encode7({
-            Data: unixfs2.marshal(),
-            Links: []
-          });
-        }
-        const cid = await persist(block, blockstore, opts);
-        bytesWritten += BigInt(block.byteLength);
-        (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:importer:progress:file:write", {
-          bytesWritten,
-          cid,
-          path: file.path
-        }));
-        return {
-          cid,
-          unixfs: unixfs2,
-          size: BigInt(block.length),
-          block
-        };
-      };
-    }
-  };
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/index.js
-var import_err_code2 = __toESM(require_err_code(), 1);
-
-// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/dir.js
-var dirBuilder = async (dir, blockstore, options) => {
-  const unixfs2 = new UnixFS({
-    type: "directory",
-    mtime: dir.mtime,
-    mode: dir.mode
-  });
-  const block = encode7(prepare({ Data: unixfs2.marshal() }));
-  const cid = await persist(block, blockstore, options);
-  const path6 = dir.path;
-  return {
-    cid,
-    path: path6,
-    unixfs: unixfs2,
-    size: BigInt(block.length),
-    originalPath: dir.originalPath,
-    block
-  };
-};
-
-// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/file.js
-async function* buildFileBatch(file, blockstore, options) {
-  let count = -1;
-  let previous;
-  for await (const entry of parallelBatch(options.bufferImporter(file, blockstore), options.blockWriteConcurrency)) {
-    count++;
-    if (count === 0) {
-      previous = {
-        ...entry,
-        single: true
-      };
-      continue;
-    } else if (count === 1 && previous != null) {
-      yield {
-        ...previous,
-        block: void 0,
-        single: void 0
-      };
-      previous = void 0;
-    }
-    yield {
-      ...entry,
-      block: void 0
-    };
-  }
-  if (previous != null) {
-    yield previous;
-  }
-}
-function isSingleBlockImport(result) {
-  return result.single === true;
-}
-var reduce = (file, blockstore, options) => {
-  const reducer = async function(leaves) {
-    var _a, _b;
-    if (leaves.length === 1 && isSingleBlockImport(leaves[0]) && options.reduceSingleLeafToSelf) {
-      const leaf = leaves[0];
-      let node2 = leaf.block;
-      if (isSingleBlockImport(leaf) && (file.mtime !== void 0 || file.mode !== void 0)) {
-        leaf.unixfs = new UnixFS({
-          type: "file",
-          mtime: file.mtime,
-          mode: file.mode,
-          data: leaf.block
-        });
-        node2 = { Data: leaf.unixfs.marshal(), Links: [] };
-        leaf.block = encode7(prepare(node2));
-        leaf.cid = await persist(leaf.block, blockstore, {
-          ...options,
-          cidVersion: options.cidVersion
-        });
-        leaf.size = BigInt(leaf.block.length);
-      }
-      (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:importer:progress:file:layout", {
-        cid: leaf.cid,
-        path: leaf.originalPath
-      }));
-      return {
-        cid: leaf.cid,
-        path: file.path,
-        unixfs: leaf.unixfs,
-        size: leaf.size,
-        originalPath: leaf.originalPath
-      };
-    }
-    const f = new UnixFS({
-      type: "file",
-      mtime: file.mtime,
-      mode: file.mode
-    });
-    const links = leaves.filter((leaf) => {
-      var _a2, _b2;
-      if (leaf.cid.code === code3 && leaf.size > 0) {
-        return true;
-      }
-      if (leaf.unixfs != null && leaf.unixfs.data == null && leaf.unixfs.fileSize() > 0n) {
-        return true;
-      }
-      return Boolean((_b2 = (_a2 = leaf.unixfs) == null ? void 0 : _a2.data) == null ? void 0 : _b2.length);
-    }).map((leaf) => {
-      var _a2;
-      if (leaf.cid.code === code3) {
-        f.addBlockSize(leaf.size);
-        return {
-          Name: "",
-          Tsize: Number(leaf.size),
-          Hash: leaf.cid
-        };
-      }
-      if (leaf.unixfs == null || leaf.unixfs.data == null) {
-        f.addBlockSize(((_a2 = leaf.unixfs) == null ? void 0 : _a2.fileSize()) ?? 0n);
-      } else {
-        f.addBlockSize(BigInt(leaf.unixfs.data.length));
-      }
-      return {
-        Name: "",
-        Tsize: Number(leaf.size),
-        Hash: leaf.cid
-      };
-    });
-    const node = {
-      Data: f.marshal(),
-      Links: links
-    };
-    const block = encode7(prepare(node));
-    const cid = await persist(block, blockstore, options);
-    (_b = options.onProgress) == null ? void 0 : _b.call(options, new CustomProgressEvent("unixfs:importer:progress:file:layout", {
-      cid,
-      path: file.originalPath
-    }));
-    return {
-      cid,
-      path: file.path,
-      unixfs: f,
-      size: BigInt(block.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0)),
-      originalPath: file.originalPath,
-      block
-    };
-  };
-  return reducer;
-};
-var fileBuilder = async (file, block, options) => {
-  return options.layout(buildFileBatch(file, block, options), reduce(file, block, options));
-};
-
-// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/index.js
-function isIterable(thing) {
-  return Symbol.iterator in thing;
-}
-function isAsyncIterable5(thing) {
-  return Symbol.asyncIterator in thing;
-}
-function contentAsAsyncIterable(content) {
-  try {
-    if (content instanceof Uint8Array) {
-      return async function* () {
-        yield content;
-      }();
-    } else if (isIterable(content)) {
-      return async function* () {
-        yield* content;
-      }();
-    } else if (isAsyncIterable5(content)) {
-      return content;
-    }
-  } catch {
-    throw (0, import_err_code2.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT");
-  }
-  throw (0, import_err_code2.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT");
-}
-function defaultDagBuilder(options) {
-  return async function* dagBuilder(source, blockstore) {
-    for await (const entry of source) {
-      let originalPath;
-      if (entry.path != null) {
-        originalPath = entry.path;
-        entry.path = entry.path.split("/").filter((path6) => path6 != null && path6 !== ".").join("/");
-      }
-      if (isFileCandidate(entry)) {
-        const file = {
-          path: entry.path,
-          mtime: entry.mtime,
-          mode: entry.mode,
-          content: async function* () {
-            var _a;
-            let bytesRead = 0n;
-            for await (const chunk of options.chunker(options.chunkValidator(contentAsAsyncIterable(entry.content)))) {
-              const currentChunkSize = BigInt(chunk.byteLength);
-              bytesRead += currentChunkSize;
-              (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:importer:progress:file:read", {
-                bytesRead,
-                chunkSize: currentChunkSize,
-                path: entry.path
-              }));
-              yield chunk;
-            }
-          }(),
-          originalPath
-        };
-        yield async () => fileBuilder(file, blockstore, options);
-      } else if (entry.path != null) {
-        const dir = {
-          path: entry.path,
-          mtime: entry.mtime,
-          mode: entry.mode,
-          originalPath
-        };
-        yield async () => dirBuilder(dir, blockstore, options);
-      } else {
-        throw new Error("Import candidate must have content or path or both");
-      }
-    }
-  };
-}
-function isFileCandidate(entry) {
-  return entry.content != null;
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/dag-builder/validate-chunks.js
-var import_err_code3 = __toESM(require_err_code(), 1);
-var defaultChunkValidator = () => {
-  return async function* validateChunks(source) {
-    for await (const content of source) {
-      if (content.length === void 0) {
-        throw (0, import_err_code3.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT");
-      }
-      if (typeof content === "string" || content instanceof String) {
-        yield fromString3(content.toString());
-      } else if (Array.isArray(content)) {
-        yield Uint8Array.from(content);
-      } else if (content instanceof Uint8Array) {
-        yield content;
-      } else {
-        throw (0, import_err_code3.default)(new Error("Content was invalid"), "ERR_INVALID_CONTENT");
-      }
-    }
-  };
-};
-
-// node_modules/ipfs-unixfs-importer/dist/src/layout/balanced.js
-var DEFAULT_MAX_CHILDREN_PER_NODE = 174;
-function balanced(options) {
-  const maxChildrenPerNode = (options == null ? void 0 : options.maxChildrenPerNode) ?? DEFAULT_MAX_CHILDREN_PER_NODE;
-  return async function balancedLayout(source, reduce2) {
-    const roots = [];
-    for await (const chunked of src_default5(source, maxChildrenPerNode)) {
-      roots.push(await reduce2(chunked));
-    }
-    if (roots.length > 1) {
-      return balancedLayout(roots, reduce2);
-    }
-    return roots[0];
-  };
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/dir.js
-var Dir = class {
-  options;
-  root;
-  dir;
-  path;
-  dirty;
-  flat;
-  parent;
-  parentKey;
-  unixfs;
-  mode;
-  mtime;
-  cid;
-  size;
-  nodeSize;
-  constructor(props, options) {
-    this.options = options ?? {};
-    this.root = props.root;
-    this.dir = props.dir;
-    this.path = props.path;
-    this.dirty = props.dirty;
-    this.flat = props.flat;
-    this.parent = props.parent;
-    this.parentKey = props.parentKey;
-    this.unixfs = props.unixfs;
-    this.mode = props.mode;
-    this.mtime = props.mtime;
-  }
-};
-var CID_V0 = CID2.parse("QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn");
-var CID_V1 = CID2.parse("zdj7WbTaiJT1fgatdet9Ei9iDB5hdCxkbVyhyh8YTUnXMiwYi");
-
-// node_modules/ipfs-unixfs-importer/dist/src/dir-flat.js
-var DirFlat = class extends Dir {
-  _children;
-  constructor(props, options) {
-    super(props, options);
-    this._children = /* @__PURE__ */ new Map();
-  }
-  async put(name4, value) {
-    this.cid = void 0;
-    this.size = void 0;
-    this.nodeSize = void 0;
-    this._children.set(name4, value);
-  }
-  async get(name4) {
-    return Promise.resolve(this._children.get(name4));
-  }
-  childCount() {
-    return this._children.size;
-  }
-  directChildrenCount() {
-    return this.childCount();
-  }
-  onlyChild() {
-    return this._children.values().next().value;
-  }
-  async *eachChildSeries() {
-    for (const [key, child] of this._children.entries()) {
-      yield {
-        key,
-        child
-      };
-    }
-  }
-  estimateNodeSize() {
-    if (this.nodeSize !== void 0) {
-      return this.nodeSize;
-    }
-    this.nodeSize = 0;
-    for (const [name4, child] of this._children.entries()) {
-      if (child.size != null && child.cid != null) {
-        this.nodeSize += name4.length + (this.options.cidVersion === 1 ? CID_V1.bytes.byteLength : CID_V0.bytes.byteLength);
-      }
-    }
-    return this.nodeSize;
-  }
-  async *flush(block) {
-    const links = [];
-    for (const [name4, child] of this._children.entries()) {
-      let result = child;
-      if (child instanceof Dir) {
-        for await (const entry of child.flush(block)) {
-          result = entry;
-          yield entry;
-        }
-      }
-      if (result.size != null && result.cid != null) {
-        links.push({
-          Name: name4,
-          Tsize: Number(result.size),
-          Hash: result.cid
-        });
-      }
-    }
-    const unixfs2 = new UnixFS({
-      type: "directory",
-      mtime: this.mtime,
-      mode: this.mode
-    });
-    const node = { Data: unixfs2.marshal(), Links: links };
-    const buffer2 = encode7(prepare(node));
-    const cid = await persist(buffer2, block, this.options);
-    const size = buffer2.length + node.Links.reduce(
-      /**
-       * @param {number} acc
-       * @param {PBLink} curr
-       */
-      (acc, curr) => acc + (curr.Tsize == null ? 0 : curr.Tsize),
-      0
-    );
-    this.cid = cid;
-    this.size = size;
-    yield {
-      cid,
-      unixfs: unixfs2,
-      path: this.path,
-      size: BigInt(size)
-    };
-  }
-};
-
-// node_modules/@multiformats/murmur3/src/index.js
-var import_murmurhash3js_revisited = __toESM(require_murmurhash3js_revisited(), 1);
-function fromNumberTo32BitBuf(number) {
-  const bytes = new Array(4);
-  for (let i = 0; i < 4; i++) {
-    bytes[i] = number & 255;
-    number = number >> 8;
-  }
-  return new Uint8Array(bytes);
-}
-var murmur332 = from3({
-  name: "murmur3-32",
-  code: 35,
-  encode: (input) => fromNumberTo32BitBuf(import_murmurhash3js_revisited.default.x86.hash32(input))
-});
-var murmur3128 = from3({
-  name: "murmur3-128",
-  code: 34,
-  encode: (input) => bytes_exports2.fromHex(import_murmurhash3js_revisited.default.x64.hash128(input))
-});
-var murmur364 = from3({
-  name: "murmur3-x64-64",
-  code: 34,
-  encode: (input) => bytes_exports2.fromHex(import_murmurhash3js_revisited.default.x64.hash128(input)).subarray(0, 8)
-});
-
-// node_modules/hamt-sharding/dist/src/bucket.js
-var import_sparse_array = __toESM(require_sparse_array(), 1);
-var Bucket = class _Bucket {
-  constructor(options, parent, posAtParent = 0) {
-    this._options = options;
-    this._popCount = 0;
-    this._parent = parent;
-    this._posAtParent = posAtParent;
-    this._children = new import_sparse_array.default();
-    this.key = null;
-  }
-  async put(key, value) {
-    const place = await this._findNewBucketAndPos(key);
-    await place.bucket._putAt(place, key, value);
-  }
-  async get(key) {
-    const child = await this._findChild(key);
-    if (child != null) {
-      return child.value;
-    }
-  }
-  async del(key) {
-    const place = await this._findPlace(key);
-    const child = place.bucket._at(place.pos);
-    if (child != null && child.key === key) {
-      place.bucket._delAt(place.pos);
-    }
-  }
-  leafCount() {
-    const children = this._children.compactArray();
-    return children.reduce((acc, child) => {
-      if (child instanceof _Bucket) {
-        return acc + child.leafCount();
-      }
-      return acc + 1;
-    }, 0);
-  }
-  childrenCount() {
-    return this._children.length;
-  }
-  onlyChild() {
-    return this._children.get(0);
-  }
-  *eachLeafSeries() {
-    const children = this._children.compactArray();
-    for (const child of children) {
-      if (child instanceof _Bucket) {
-        yield* child.eachLeafSeries();
-      } else {
-        yield child;
-      }
-    }
-  }
-  serialize(map2, reduce2) {
-    const acc = [];
-    return reduce2(this._children.reduce((acc2, child, index) => {
-      if (child != null) {
-        if (child instanceof _Bucket) {
-          acc2.push(child.serialize(map2, reduce2));
-        } else {
-          acc2.push(map2(child, index));
-        }
-      }
-      return acc2;
-    }, acc));
-  }
-  async asyncTransform(asyncMap, asyncReduce) {
-    return await asyncTransformBucket(this, asyncMap, asyncReduce);
-  }
-  toJSON() {
-    return this.serialize(mapNode, reduceNodes);
-  }
-  prettyPrint() {
-    return JSON.stringify(this.toJSON(), null, "  ");
-  }
-  tableSize() {
-    return Math.pow(2, this._options.bits);
-  }
-  async _findChild(key) {
-    const result = await this._findPlace(key);
-    const child = result.bucket._at(result.pos);
-    if (child instanceof _Bucket) {
-      return void 0;
-    }
-    if (child != null && child.key === key) {
-      return child;
-    }
-  }
-  async _findPlace(key) {
-    const hashValue = this._options.hash(typeof key === "string" ? fromString3(key) : key);
-    const index = await hashValue.take(this._options.bits);
-    const child = this._children.get(index);
-    if (child instanceof _Bucket) {
-      return await child._findPlace(hashValue);
-    }
-    return {
-      bucket: this,
-      pos: index,
-      hash: hashValue,
-      existingChild: child
-    };
-  }
-  async _findNewBucketAndPos(key) {
-    const place = await this._findPlace(key);
-    if (place.existingChild != null && place.existingChild.key !== key) {
-      const bucket = new _Bucket(this._options, place.bucket, place.pos);
-      place.bucket._putObjectAt(place.pos, bucket);
-      const newPlace = await bucket._findPlace(place.existingChild.hash);
-      newPlace.bucket._putAt(newPlace, place.existingChild.key, place.existingChild.value);
-      return await bucket._findNewBucketAndPos(place.hash);
-    }
-    return place;
-  }
-  _putAt(place, key, value) {
-    this._putObjectAt(place.pos, {
-      key,
-      value,
-      hash: place.hash
-    });
-  }
-  _putObjectAt(pos, object) {
-    if (this._children.get(pos) == null) {
-      this._popCount++;
-    }
-    this._children.set(pos, object);
-  }
-  _delAt(pos) {
-    if (pos === -1) {
-      throw new Error("Invalid position");
-    }
-    if (this._children.get(pos) != null) {
-      this._popCount--;
-    }
-    this._children.unset(pos);
-    this._level();
-  }
-  _level() {
-    if (this._parent != null && this._popCount <= 1) {
-      if (this._popCount === 1) {
-        const onlyChild = this._children.find(exists);
-        if (onlyChild != null && !(onlyChild instanceof _Bucket)) {
-          const hash = onlyChild.hash;
-          hash.untake(this._options.bits);
-          const place = {
-            pos: this._posAtParent,
-            hash,
-            bucket: this._parent
-          };
-          this._parent._putAt(place, onlyChild.key, onlyChild.value);
-        }
-      } else {
-        this._parent._delAt(this._posAtParent);
-      }
-    }
-  }
-  _at(index) {
-    return this._children.get(index);
-  }
-};
-function exists(o) {
-  return Boolean(o);
-}
-function mapNode(node, _) {
-  return node.key;
-}
-function reduceNodes(nodes) {
-  return nodes;
-}
-async function asyncTransformBucket(bucket, asyncMap, asyncReduce) {
-  const output = [];
-  for (const child of bucket._children.compactArray()) {
-    if (child instanceof Bucket) {
-      await asyncTransformBucket(child, asyncMap, asyncReduce);
-    } else {
-      const mappedChildren = await asyncMap(child);
-      output.push({
-        bitField: bucket._children.bitField(),
-        children: mappedChildren
-      });
-    }
-  }
-  return await asyncReduce(output);
-}
-
-// node_modules/hamt-sharding/dist/src/consumable-buffer.js
-var START_MASKS = [
-  255,
-  254,
-  252,
-  248,
-  240,
-  224,
-  192,
-  128
-];
-var STOP_MASKS = [
-  1,
-  3,
-  7,
-  15,
-  31,
-  63,
-  127,
-  255
-];
-var ConsumableBuffer = class {
-  constructor(value) {
-    this._value = value;
-    this._currentBytePos = value.length - 1;
-    this._currentBitPos = 7;
-  }
-  availableBits() {
-    return this._currentBitPos + 1 + this._currentBytePos * 8;
-  }
-  totalBits() {
-    return this._value.length * 8;
-  }
-  take(bits) {
-    let pendingBits = bits;
-    let result = 0;
-    while (pendingBits > 0 && this._haveBits()) {
-      const byte = this._value[this._currentBytePos];
-      const availableBits = this._currentBitPos + 1;
-      const taking = Math.min(availableBits, pendingBits);
-      const value = byteBitsToInt(byte, availableBits - taking, taking);
-      result = (result << taking) + value;
-      pendingBits -= taking;
-      this._currentBitPos -= taking;
-      if (this._currentBitPos < 0) {
-        this._currentBitPos = 7;
-        this._currentBytePos--;
-      }
-    }
-    return result;
-  }
-  untake(bits) {
-    this._currentBitPos += bits;
-    while (this._currentBitPos > 7) {
-      this._currentBitPos -= 8;
-      this._currentBytePos += 1;
-    }
-  }
-  _haveBits() {
-    return this._currentBytePos >= 0;
-  }
-};
-function byteBitsToInt(byte, start, length4) {
-  const mask = maskFor(start, length4);
-  return (byte & mask) >>> start;
-}
-function maskFor(start, length4) {
-  return START_MASKS[start] & STOP_MASKS[Math.min(length4 + start - 1, 7)];
-}
-
-// node_modules/hamt-sharding/dist/src/consumable-hash.js
-function wrapHash(hashFn2) {
-  function hashing(value) {
-    if (value instanceof InfiniteHash) {
-      return value;
-    } else {
-      return new InfiniteHash(value, hashFn2);
-    }
-  }
-  return hashing;
-}
-var InfiniteHash = class {
-  constructor(value, hashFn2) {
-    if (!(value instanceof Uint8Array)) {
-      throw new Error("can only hash Uint8Arrays");
-    }
-    this._value = value;
-    this._hashFn = hashFn2;
-    this._depth = -1;
-    this._availableBits = 0;
-    this._currentBufferIndex = 0;
-    this._buffers = [];
-  }
-  async take(bits) {
-    let pendingBits = bits;
-    while (this._availableBits < pendingBits) {
-      await this._produceMoreBits();
-    }
-    let result = 0;
-    while (pendingBits > 0) {
-      const hash = this._buffers[this._currentBufferIndex];
-      const available = Math.min(hash.availableBits(), pendingBits);
-      const took = hash.take(available);
-      result = (result << available) + took;
-      pendingBits -= available;
-      this._availableBits -= available;
-      if (hash.availableBits() === 0) {
-        this._currentBufferIndex++;
-      }
-    }
-    return result;
-  }
-  untake(bits) {
-    let pendingBits = bits;
-    while (pendingBits > 0) {
-      const hash = this._buffers[this._currentBufferIndex];
-      const availableForUntake = Math.min(hash.totalBits() - hash.availableBits(), pendingBits);
-      hash.untake(availableForUntake);
-      pendingBits -= availableForUntake;
-      this._availableBits += availableForUntake;
-      if (this._currentBufferIndex > 0 && hash.totalBits() === hash.availableBits()) {
-        this._depth--;
-        this._currentBufferIndex--;
-      }
-    }
-  }
-  async _produceMoreBits() {
-    this._depth++;
-    const value = this._depth > 0 ? concat2([this._value, Uint8Array.from([this._depth])]) : this._value;
-    const hashValue = await this._hashFn(value);
-    const buffer2 = new ConsumableBuffer(hashValue);
-    this._buffers.push(buffer2);
-    this._availableBits += buffer2.availableBits();
-  }
-};
-
-// node_modules/hamt-sharding/dist/src/index.js
-function createHAMT(options) {
-  if (options == null || options.hashFn == null) {
-    throw new Error("please define an options.hashFn");
-  }
-  const bucketOptions = {
-    bits: options.bits ?? 8,
-    hash: wrapHash(options.hashFn)
-  };
-  return new Bucket(bucketOptions);
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/dir-sharded.js
-async function hamtHashFn(buf2) {
-  return (await murmur3128.encode(buf2)).slice(0, 8).reverse();
-}
-var HAMT_HASH_CODE = BigInt(34);
-var DEFAULT_FANOUT_BITS = 8;
-var DirSharded = class extends Dir {
-  _bucket;
-  constructor(props, options) {
-    super(props, options);
-    this._bucket = createHAMT({
-      hashFn: hamtHashFn,
-      bits: options.shardFanoutBits ?? DEFAULT_FANOUT_BITS
-    });
-  }
-  async put(name4, value) {
-    this.cid = void 0;
-    this.size = void 0;
-    this.nodeSize = void 0;
-    await this._bucket.put(name4, value);
-  }
-  async get(name4) {
-    return this._bucket.get(name4);
-  }
-  childCount() {
-    return this._bucket.leafCount();
-  }
-  directChildrenCount() {
-    return this._bucket.childrenCount();
-  }
-  onlyChild() {
-    return this._bucket.onlyChild();
-  }
-  async *eachChildSeries() {
-    for await (const { key, value } of this._bucket.eachLeafSeries()) {
-      yield {
-        key,
-        child: value
-      };
-    }
-  }
-  estimateNodeSize() {
-    if (this.nodeSize !== void 0) {
-      return this.nodeSize;
-    }
-    this.nodeSize = calculateSize(this._bucket, this, this.options);
-    return this.nodeSize;
-  }
-  async *flush(blockstore) {
-    for await (const entry of flush(this._bucket, blockstore, this, this.options)) {
-      yield {
-        ...entry,
-        path: this.path
-      };
-    }
-  }
-};
-var dir_sharded_default = DirSharded;
-async function* flush(bucket, blockstore, shardRoot, options) {
-  const children = bucket._children;
-  const padLength = (bucket.tableSize() - 1).toString(16).length;
-  const links = [];
-  let childrenSize = 0n;
-  for (let i = 0; i < children.length; i++) {
-    const child = children.get(i);
-    if (child == null) {
-      continue;
-    }
-    const labelPrefix = i.toString(16).toUpperCase().padStart(padLength, "0");
-    if (child instanceof Bucket) {
-      let shard;
-      for await (const subShard of flush(child, blockstore, null, options)) {
-        shard = subShard;
-      }
-      if (shard == null) {
-        throw new Error("Could not flush sharded directory, no subshard found");
-      }
-      links.push({
-        Name: labelPrefix,
-        Tsize: Number(shard.size),
-        Hash: shard.cid
-      });
-      childrenSize += shard.size;
-    } else if (isDir(child.value)) {
-      const dir2 = child.value;
-      let flushedDir;
-      for await (const entry of dir2.flush(blockstore)) {
-        flushedDir = entry;
-        yield flushedDir;
-      }
-      if (flushedDir == null) {
-        throw new Error("Did not flush dir");
-      }
-      const label = labelPrefix + child.key;
-      links.push({
-        Name: label,
-        Tsize: Number(flushedDir.size),
-        Hash: flushedDir.cid
-      });
-      childrenSize += flushedDir.size;
-    } else {
-      const value = child.value;
-      if (value.cid == null) {
-        continue;
-      }
-      const label = labelPrefix + child.key;
-      const size2 = value.size;
-      links.push({
-        Name: label,
-        Tsize: Number(size2),
-        Hash: value.cid
-      });
-      childrenSize += BigInt(size2 ?? 0);
-    }
-  }
-  const data = Uint8Array.from(children.bitField().reverse());
-  const dir = new UnixFS({
-    type: "hamt-sharded-directory",
-    data,
-    fanout: BigInt(bucket.tableSize()),
-    hashType: HAMT_HASH_CODE,
-    mtime: shardRoot == null ? void 0 : shardRoot.mtime,
-    mode: shardRoot == null ? void 0 : shardRoot.mode
-  });
-  const node = {
-    Data: dir.marshal(),
-    Links: links
-  };
-  const buffer2 = encode7(prepare(node));
-  const cid = await persist(buffer2, blockstore, options);
-  const size = BigInt(buffer2.byteLength) + childrenSize;
-  yield {
-    cid,
-    unixfs: dir,
-    size
-  };
-}
-function isDir(obj) {
-  return typeof obj.flush === "function";
-}
-function calculateSize(bucket, shardRoot, options) {
-  const children = bucket._children;
-  const padLength = (bucket.tableSize() - 1).toString(16).length;
-  const links = [];
-  for (let i = 0; i < children.length; i++) {
-    const child = children.get(i);
-    if (child == null) {
-      continue;
-    }
-    const labelPrefix = i.toString(16).toUpperCase().padStart(padLength, "0");
-    if (child instanceof Bucket) {
-      const size = calculateSize(child, null, options);
-      links.push({
-        Name: labelPrefix,
-        Tsize: Number(size),
-        Hash: options.cidVersion === 0 ? CID_V0 : CID_V1
-      });
-    } else if (typeof child.value.flush === "function") {
-      const dir2 = child.value;
-      const size = dir2.nodeSize();
-      links.push({
-        Name: labelPrefix + child.key,
-        Tsize: Number(size),
-        Hash: options.cidVersion === 0 ? CID_V0 : CID_V1
-      });
-    } else {
-      const value = child.value;
-      if (value.cid == null) {
-        continue;
-      }
-      const label = labelPrefix + child.key;
-      const size = value.size;
-      links.push({
-        Name: label,
-        Tsize: Number(size),
-        Hash: value.cid
-      });
-    }
-  }
-  const data = Uint8Array.from(children.bitField().reverse());
-  const dir = new UnixFS({
-    type: "hamt-sharded-directory",
-    data,
-    fanout: BigInt(bucket.tableSize()),
-    hashType: HAMT_HASH_CODE,
-    mtime: shardRoot == null ? void 0 : shardRoot.mtime,
-    mode: shardRoot == null ? void 0 : shardRoot.mode
-  });
-  const buffer2 = encode7(prepare({
-    Data: dir.marshal(),
-    Links: links
-  }));
-  return buffer2.length;
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/flat-to-shard.js
-async function flatToShard(child, dir, threshold, options) {
-  let newDir = dir;
-  if (dir instanceof DirFlat && dir.estimateNodeSize() > threshold) {
-    newDir = await convertToShard(dir, options);
-  }
-  const parent = newDir.parent;
-  if (parent != null) {
-    if (newDir !== dir) {
-      if (child != null) {
-        child.parent = newDir;
-      }
-      if (newDir.parentKey == null) {
-        throw new Error("No parent key found");
-      }
-      await parent.put(newDir.parentKey, newDir);
-    }
-    return flatToShard(newDir, parent, threshold, options);
-  }
-  return newDir;
-}
-async function convertToShard(oldDir, options) {
-  const newDir = new dir_sharded_default({
-    root: oldDir.root,
-    dir: true,
-    parent: oldDir.parent,
-    parentKey: oldDir.parentKey,
-    path: oldDir.path,
-    dirty: oldDir.dirty,
-    flat: false,
-    mtime: oldDir.mtime,
-    mode: oldDir.mode
-  }, options);
-  for await (const { key, child } of oldDir.eachChildSeries()) {
-    await newDir.put(key, child);
-  }
-  return newDir;
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/utils/to-path-components.js
-var toPathComponents = (path6 = "") => {
-  return path6.split(/(? 1) {
-      yield* flushAndYield(tree, block);
-    } else {
-      for await (const unwrapped of tree.eachChildSeries()) {
-        if (unwrapped == null) {
-          continue;
-        }
-        yield* flushAndYield(unwrapped.child, block);
-      }
-    }
-  };
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/index.js
-async function* importer(source, blockstore, options = {}) {
-  let candidates;
-  if (Symbol.asyncIterator in source || Symbol.iterator in source) {
-    candidates = source;
-  } else {
-    candidates = [source];
-  }
-  const wrapWithDirectory = options.wrapWithDirectory ?? false;
-  const shardSplitThresholdBytes = options.shardSplitThresholdBytes ?? 262144;
-  const shardFanoutBits = options.shardFanoutBits ?? 8;
-  const cidVersion = options.cidVersion ?? 1;
-  const rawLeaves = options.rawLeaves ?? true;
-  const leafType = options.leafType ?? "file";
-  const fileImportConcurrency = options.fileImportConcurrency ?? 50;
-  const blockWriteConcurrency = options.blockWriteConcurrency ?? 10;
-  const reduceSingleLeafToSelf = options.reduceSingleLeafToSelf ?? true;
-  const chunker = options.chunker ?? fixedSize();
-  const chunkValidator = options.chunkValidator ?? defaultChunkValidator();
-  const buildDag = options.dagBuilder ?? defaultDagBuilder({
-    chunker,
-    chunkValidator,
-    wrapWithDirectory,
-    layout: options.layout ?? balanced(),
-    bufferImporter: options.bufferImporter ?? defaultBufferImporter({
-      cidVersion,
-      rawLeaves,
-      leafType,
-      onProgress: options.onProgress
-    }),
-    blockWriteConcurrency,
-    reduceSingleLeafToSelf,
-    cidVersion,
-    onProgress: options.onProgress
-  });
-  const buildTree = options.treeBuilder ?? defaultTreeBuilder({
-    wrapWithDirectory,
-    shardSplitThresholdBytes,
-    shardFanoutBits,
-    cidVersion,
-    onProgress: options.onProgress
-  });
-  for await (const entry of buildTree(parallelBatch(buildDag(candidates, blockstore), fileImportConcurrency), blockstore)) {
-    yield {
-      cid: entry.cid,
-      path: entry.path,
-      unixfs: entry.unixfs,
-      size: entry.size
-    };
-  }
-}
-async function importFile(content, blockstore, options = {}) {
-  const result = await src_default4(importer([content], blockstore, options));
-  if (result == null) {
-    throw (0, import_err_code4.default)(new Error("Nothing imported"), "ERR_INVALID_PARAMS");
-  }
-  return result;
-}
-async function importDirectory(content, blockstore, options = {}) {
-  const result = await src_default4(importer([content], blockstore, options));
-  if (result == null) {
-    throw (0, import_err_code4.default)(new Error("Nothing imported"), "ERR_INVALID_PARAMS");
-  }
-  return result;
-}
-async function importBytes(buf2, blockstore, options = {}) {
-  return importFile({
-    content: buf2
-  }, blockstore, options);
-}
-async function importByteStream(bufs, blockstore, options = {}) {
-  return importFile({
-    content: bufs
-  }, blockstore, options);
-}
-
-// node_modules/ipfs-unixfs-importer/dist/src/chunker/rabin.js
-var import_err_code5 = __toESM(require_err_code(), 1);
-var import_rabin_wasm = __toESM(require_src(), 1);
-
-// node_modules/@helia/unixfs/dist/src/commands/add.js
-var defaultImporterSettings = {
-  cidVersion: 1,
-  rawLeaves: true,
-  layout: balanced({
-    maxChildrenPerNode: 1024
-  }),
-  chunker: fixedSize({
-    chunkSize: 1048576
-  })
-};
-async function* addAll(source, blockstore, options = {}) {
-  yield* importer(source, blockstore, {
-    ...defaultImporterSettings,
-    ...options
-  });
-}
-async function addBytes(bytes, blockstore, options = {}) {
-  const { cid } = await importBytes(bytes, blockstore, {
-    ...defaultImporterSettings,
-    ...options
-  });
-  return cid;
-}
-async function addByteStream(bytes, blockstore, options = {}) {
-  const { cid } = await importByteStream(bytes, blockstore, {
-    ...defaultImporterSettings,
-    ...options
-  });
-  return cid;
-}
-async function addFile(file, blockstore, options = {}) {
-  const { cid } = await importFile(file, blockstore, {
-    ...defaultImporterSettings,
-    ...options
-  });
-  return cid;
-}
-async function addDirectory(dir, blockstore, options = {}) {
-  const { cid } = await importDirectory({
-    ...dir,
-    path: dir.path ?? "-"
-  }, blockstore, {
-    ...defaultImporterSettings,
-    ...options
-  });
-  return cid;
-}
-
-// node_modules/ipfs-unixfs-exporter/dist/src/index.js
-var import_err_code15 = __toESM(require_err_code(), 1);
-
-// node_modules/it-last/dist/src/index.js
-function isAsyncIterable6(thing) {
-  return thing[Symbol.asyncIterator] != null;
-}
-function last(source) {
-  if (isAsyncIterable6(source)) {
-    return (async () => {
-      let res2;
-      for await (const entry of source) {
-        res2 = entry;
-      }
-      return res2;
-    })();
-  }
-  let res;
-  for (const entry of source) {
-    res = entry;
-  }
-  return res;
-}
-var src_default7 = last;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/index.js
-var import_err_code14 = __toESM(require_err_code(), 1);
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/dag-cbor.js
-var import_err_code6 = __toESM(require_err_code(), 1);
-var resolve = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => {
-  const block = await blockstore.get(cid, options);
-  const object = decode6(block);
-  let subObject = object;
-  let subPath = path6;
-  while (toResolve.length > 0) {
-    const prop = toResolve[0];
-    if (prop in subObject) {
-      toResolve.shift();
-      subPath = `${subPath}/${prop}`;
-      const subObjectCid = CID2.asCID(subObject[prop]);
-      if (subObjectCid != null) {
-        return {
-          entry: {
-            type: "object",
-            name: name4,
-            path: path6,
-            cid,
-            node: block,
-            depth,
-            size: BigInt(block.length),
-            content: async function* () {
-              yield object;
-            }
-          },
-          next: {
-            cid: subObjectCid,
-            name: prop,
-            path: subPath,
-            toResolve
-          }
-        };
-      }
-      subObject = subObject[prop];
-    } else {
-      throw (0, import_err_code6.default)(new Error(`No property named ${prop} found in cbor node ${cid}`), "ERR_NO_PROP");
-    }
-  }
-  return {
-    entry: {
-      type: "object",
-      name: name4,
-      path: path6,
-      cid,
-      node: block,
-      depth,
-      size: BigInt(block.length),
-      content: async function* () {
-        yield object;
-      }
-    }
-  };
-};
-var dag_cbor_default = resolve;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/identity.js
-var import_err_code8 = __toESM(require_err_code(), 1);
-
-// node_modules/ipfs-unixfs-exporter/dist/src/utils/extract-data-from-block.js
-function extractDataFromBlock(block, blockStart, requestedStart, requestedEnd) {
-  const blockLength = BigInt(block.length);
-  const blockEnd = BigInt(blockStart + blockLength);
-  if (requestedStart >= blockEnd || requestedEnd < blockStart) {
-    return new Uint8Array(0);
-  }
-  if (requestedEnd >= blockStart && requestedEnd < blockEnd) {
-    block = block.subarray(0, Number(requestedEnd - blockStart));
-  }
-  if (requestedStart >= blockStart && requestedStart < blockEnd) {
-    block = block.subarray(Number(requestedStart - blockStart));
-  }
-  return block;
-}
-var extract_data_from_block_default = extractDataFromBlock;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/utils/validate-offset-and-length.js
-var import_err_code7 = __toESM(require_err_code(), 1);
-var validateOffsetAndLength = (size, offset = 0, length4 = size) => {
-  const fileSize = BigInt(size);
-  const start = BigInt(offset ?? 0);
-  let end = BigInt(length4);
-  if (end !== fileSize) {
-    end = start + end;
-  }
-  if (end > fileSize) {
-    end = fileSize;
-  }
-  if (start < 0n) {
-    throw (0, import_err_code7.default)(new Error("Offset must be greater than or equal to 0"), "ERR_INVALID_PARAMS");
-  }
-  if (start > fileSize) {
-    throw (0, import_err_code7.default)(new Error("Offset must be less than the file size"), "ERR_INVALID_PARAMS");
-  }
-  if (end < 0n) {
-    throw (0, import_err_code7.default)(new Error("Length must be greater than or equal to 0"), "ERR_INVALID_PARAMS");
-  }
-  if (end > fileSize) {
-    throw (0, import_err_code7.default)(new Error("Length must be less than the file size"), "ERR_INVALID_PARAMS");
-  }
-  return {
-    start,
-    end
-  };
-};
-var validate_offset_and_length_default = validateOffsetAndLength;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/identity.js
-var rawContent = (node) => {
-  async function* contentGenerator(options = {}) {
-    var _a;
-    const { start, end } = validate_offset_and_length_default(node.length, options.offset, options.length);
-    const buf2 = extract_data_from_block_default(node, 0n, start, end);
-    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:progress:identity", {
-      bytesRead: BigInt(buf2.byteLength),
-      totalBytes: end - start,
-      fileSize: BigInt(node.byteLength)
-    }));
-    yield buf2;
-  }
-  return contentGenerator;
-};
-var resolve2 = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => {
-  if (toResolve.length > 0) {
-    throw (0, import_err_code8.default)(new Error(`No link named ${path6} found in raw node ${cid}`), "ERR_NOT_FOUND");
-  }
-  const buf2 = decode10(cid.multihash.bytes);
-  return {
-    entry: {
-      type: "identity",
-      name: name4,
-      path: path6,
-      cid,
-      content: rawContent(buf2.digest),
-      depth,
-      size: BigInt(buf2.digest.length),
-      node: buf2.digest
-    }
-  };
-};
-var identity_default = resolve2;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/raw.js
-var import_err_code9 = __toESM(require_err_code(), 1);
-var rawContent2 = (node) => {
-  async function* contentGenerator(options = {}) {
-    var _a;
-    const { start, end } = validate_offset_and_length_default(node.length, options.offset, options.length);
-    const buf2 = extract_data_from_block_default(node, 0n, start, end);
-    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:progress:raw", {
-      bytesRead: BigInt(buf2.byteLength),
-      totalBytes: end - start,
-      fileSize: BigInt(node.byteLength)
-    }));
-    yield buf2;
-  }
-  return contentGenerator;
-};
-var resolve3 = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => {
-  if (toResolve.length > 0) {
-    throw (0, import_err_code9.default)(new Error(`No link named ${path6} found in raw node ${cid}`), "ERR_NOT_FOUND");
-  }
-  const block = await blockstore.get(cid, options);
-  return {
-    entry: {
-      type: "raw",
-      name: name4,
-      path: path6,
-      cid,
-      content: rawContent2(block),
-      depth,
-      size: BigInt(block.length),
-      node: block
-    }
-  };
-};
-var raw_default = resolve3;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/index.js
-var import_err_code13 = __toESM(require_err_code(), 1);
-
-// node_modules/ipfs-unixfs-exporter/dist/src/utils/find-cid-in-shard.js
-var import_err_code10 = __toESM(require_err_code(), 1);
-var hashFn = async function(buf2) {
-  return (await murmur3128.encode(buf2)).slice(0, 8).reverse();
-};
-var addLinksToHamtBucket = async (links, bucket, rootBucket) => {
-  const padLength = (bucket.tableSize() - 1).toString(16).length;
-  await Promise.all(links.map(async (link) => {
-    if (link.Name == null) {
-      throw new Error("Unexpected Link without a Name");
-    }
-    if (link.Name.length === padLength) {
-      const pos = parseInt(link.Name, 16);
-      bucket._putObjectAt(pos, new Bucket({
-        hash: rootBucket._options.hash,
-        bits: rootBucket._options.bits
-      }, bucket, pos));
-      return;
-    }
-    await rootBucket.put(link.Name.substring(2), true);
-  }));
-};
-var toPrefix = (position, padLength) => {
-  return position.toString(16).toUpperCase().padStart(padLength, "0").substring(0, padLength);
-};
-var toBucketPath = (position) => {
-  let bucket = position.bucket;
-  const path6 = [];
-  while (bucket._parent != null) {
-    path6.push(bucket);
-    bucket = bucket._parent;
-  }
-  path6.push(bucket);
-  return path6.reverse();
-};
-var findShardCid = async (node, name4, blockstore, context, options) => {
-  if (context == null) {
-    if (node.Data == null) {
-      throw (0, import_err_code10.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS");
-    }
-    let dir;
-    try {
-      dir = UnixFS.unmarshal(node.Data);
-    } catch (err) {
-      throw (0, import_err_code10.default)(err, "ERR_NOT_UNIXFS");
-    }
-    if (dir.type !== "hamt-sharded-directory") {
-      throw (0, import_err_code10.default)(new Error("not a HAMT"), "ERR_NOT_UNIXFS");
-    }
-    if (dir.fanout == null) {
-      throw (0, import_err_code10.default)(new Error("missing fanout"), "ERR_NOT_UNIXFS");
-    }
-    const rootBucket = createHAMT({
-      hashFn,
-      bits: Math.log2(Number(dir.fanout))
-    });
-    context = {
-      rootBucket,
-      hamtDepth: 1,
-      lastBucket: rootBucket
-    };
-  }
-  const padLength = (context.lastBucket.tableSize() - 1).toString(16).length;
-  await addLinksToHamtBucket(node.Links, context.lastBucket, context.rootBucket);
-  const position = await context.rootBucket._findNewBucketAndPos(name4);
-  let prefix = toPrefix(position.pos, padLength);
-  const bucketPath = toBucketPath(position);
-  if (bucketPath.length > context.hamtDepth) {
-    context.lastBucket = bucketPath[context.hamtDepth];
-    prefix = toPrefix(context.lastBucket._posAtParent, padLength);
-  }
-  const link = node.Links.find((link2) => {
-    if (link2.Name == null) {
-      return false;
-    }
-    const entryPrefix = link2.Name.substring(0, padLength);
-    const entryName = link2.Name.substring(padLength);
-    if (entryPrefix !== prefix) {
-      return false;
-    }
-    if (entryName !== "" && entryName !== name4) {
-      return false;
-    }
-    return true;
-  });
-  if (link == null) {
-    return;
-  }
-  if (link.Name != null && link.Name.substring(padLength) === name4) {
-    return link.Hash;
-  }
-  context.hamtDepth++;
-  const block = await blockstore.get(link.Hash, options);
-  node = decode11(block);
-  return findShardCid(node, name4, blockstore, context, options);
-};
-var find_cid_in_shard_default = findShardCid;
-
-// node_modules/it-filter/dist/src/index.js
-function isAsyncIterable7(thing) {
-  return thing[Symbol.asyncIterator] != null;
-}
-function filter(source, fn) {
-  if (isAsyncIterable7(source)) {
-    return async function* () {
-      for await (const entry of source) {
-        if (await fn(entry)) {
-          yield entry;
-        }
-      }
-    }();
-  }
-  const peekable2 = src_default2(source);
-  const { value, done } = peekable2.next();
-  if (done === true) {
-    return function* () {
-    }();
-  }
-  const res = fn(value);
-  if (typeof res.then === "function") {
-    return async function* () {
-      if (await res) {
-        yield value;
-      }
-      for await (const entry of peekable2) {
-        if (await fn(entry)) {
-          yield entry;
-        }
-      }
-    }();
-  }
-  const func = fn;
-  return function* () {
-    if (res === true) {
-      yield value;
-    }
-    for (const entry of peekable2) {
-      if (func(entry)) {
-        yield entry;
-      }
-    }
-  }();
-}
-var src_default8 = filter;
-
-// node_modules/it-parallel/dist/src/index.js
-var CustomEvent = globalThis.CustomEvent ?? Event;
-async function* parallel(source, options = {}) {
-  let concurrency = options.concurrency ?? Infinity;
-  if (concurrency < 1) {
-    concurrency = Infinity;
-  }
-  const ordered = options.ordered == null ? false : options.ordered;
-  const emitter = new EventTarget();
-  const ops = [];
-  let slotAvailable = pDefer();
-  let resultAvailable = pDefer();
-  let sourceFinished = false;
-  let sourceErr;
-  let opErred = false;
-  emitter.addEventListener("task-complete", () => {
-    resultAvailable.resolve();
-  });
-  void Promise.resolve().then(async () => {
-    try {
-      for await (const task of source) {
-        if (ops.length === concurrency) {
-          slotAvailable = pDefer();
-          await slotAvailable.promise;
-        }
-        if (opErred) {
-          break;
-        }
-        const op = {
-          done: false
-        };
-        ops.push(op);
-        task().then((result) => {
-          op.done = true;
-          op.ok = true;
-          op.value = result;
-          emitter.dispatchEvent(new CustomEvent("task-complete"));
-        }, (err) => {
-          op.done = true;
-          op.err = err;
-          emitter.dispatchEvent(new CustomEvent("task-complete"));
-        });
-      }
-      sourceFinished = true;
-      emitter.dispatchEvent(new CustomEvent("task-complete"));
-    } catch (err) {
-      sourceErr = err;
-      emitter.dispatchEvent(new CustomEvent("task-complete"));
-    }
-  });
-  function valuesAvailable() {
-    var _a;
-    if (ordered) {
-      return (_a = ops[0]) == null ? void 0 : _a.done;
-    }
-    return Boolean(ops.find((op) => op.done));
-  }
-  function* yieldOrderedValues() {
-    while (ops.length > 0 && ops[0].done) {
-      const op = ops[0];
-      ops.shift();
-      if (op.ok) {
-        yield op.value;
-      } else {
-        opErred = true;
-        slotAvailable.resolve();
-        throw op.err;
-      }
-      slotAvailable.resolve();
-    }
-  }
-  function* yieldUnOrderedValues() {
-    while (valuesAvailable()) {
-      for (let i = 0; i < ops.length; i++) {
-        if (ops[i].done) {
-          const op = ops[i];
-          ops.splice(i, 1);
-          i--;
-          if (op.ok) {
-            yield op.value;
-          } else {
-            opErred = true;
-            slotAvailable.resolve();
-            throw op.err;
-          }
-          slotAvailable.resolve();
-        }
-      }
-    }
-  }
-  while (true) {
-    if (!valuesAvailable()) {
-      resultAvailable = pDefer();
-      await resultAvailable.promise;
-    }
-    if (sourceErr != null) {
-      throw sourceErr;
-    }
-    if (ordered) {
-      yield* yieldOrderedValues();
-    } else {
-      yield* yieldUnOrderedValues();
-    }
-    if (sourceFinished && ops.length === 0) {
-      break;
-    }
-  }
-}
-
-// node_modules/it-pushable/dist/src/fifo.js
-var FixedFIFO = class {
-  buffer;
-  mask;
-  top;
-  btm;
-  next;
-  constructor(hwm) {
-    if (!(hwm > 0) || (hwm - 1 & hwm) !== 0) {
-      throw new Error("Max size for a FixedFIFO should be a power of two");
-    }
-    this.buffer = new Array(hwm);
-    this.mask = hwm - 1;
-    this.top = 0;
-    this.btm = 0;
-    this.next = null;
-  }
-  push(data) {
-    if (this.buffer[this.top] !== void 0) {
-      return false;
-    }
-    this.buffer[this.top] = data;
-    this.top = this.top + 1 & this.mask;
-    return true;
-  }
-  shift() {
-    const last2 = this.buffer[this.btm];
-    if (last2 === void 0) {
-      return void 0;
-    }
-    this.buffer[this.btm] = void 0;
-    this.btm = this.btm + 1 & this.mask;
-    return last2;
-  }
-  isEmpty() {
-    return this.buffer[this.btm] === void 0;
-  }
-};
-var FIFO = class {
-  size;
-  hwm;
-  head;
-  tail;
-  constructor(options = {}) {
-    this.hwm = options.splitLimit ?? 16;
-    this.head = new FixedFIFO(this.hwm);
-    this.tail = this.head;
-    this.size = 0;
-  }
-  calculateSize(obj) {
-    if ((obj == null ? void 0 : obj.byteLength) != null) {
-      return obj.byteLength;
-    }
-    return 1;
-  }
-  push(val) {
-    if ((val == null ? void 0 : val.value) != null) {
-      this.size += this.calculateSize(val.value);
-    }
-    if (!this.head.push(val)) {
-      const prev = this.head;
-      this.head = prev.next = new FixedFIFO(2 * this.head.buffer.length);
-      this.head.push(val);
-    }
-  }
-  shift() {
-    let val = this.tail.shift();
-    if (val === void 0 && this.tail.next != null) {
-      const next = this.tail.next;
-      this.tail.next = null;
-      this.tail = next;
-      val = this.tail.shift();
-    }
-    if ((val == null ? void 0 : val.value) != null) {
-      this.size -= this.calculateSize(val.value);
-    }
-    return val;
-  }
-  isEmpty() {
-    return this.head.isEmpty();
-  }
-};
-
-// node_modules/it-pushable/dist/src/index.js
-var AbortError3 = class extends Error {
-  type;
-  code;
-  constructor(message2, code5) {
-    super(message2 ?? "The operation was aborted");
-    this.type = "aborted";
-    this.code = code5 ?? "ABORT_ERR";
-  }
-};
-function pushable(options = {}) {
-  const getNext = (buffer2) => {
-    const next = buffer2.shift();
-    if (next == null) {
-      return { done: true };
-    }
-    if (next.error != null) {
-      throw next.error;
-    }
-    return {
-      done: next.done === true,
-      // @ts-expect-error if done is false, value will be present
-      value: next.value
-    };
-  };
-  return _pushable(getNext, options);
-}
-function _pushable(getNext, options) {
-  options = options ?? {};
-  let onEnd = options.onEnd;
-  let buffer2 = new FIFO();
-  let pushable2;
-  let onNext;
-  let ended;
-  let drain2 = pDefer();
-  const waitNext = async () => {
-    try {
-      if (!buffer2.isEmpty()) {
-        return getNext(buffer2);
-      }
-      if (ended) {
-        return { done: true };
-      }
-      return await new Promise((resolve6, reject) => {
-        onNext = (next) => {
-          onNext = null;
-          buffer2.push(next);
-          try {
-            resolve6(getNext(buffer2));
-          } catch (err) {
-            reject(err);
-          }
-          return pushable2;
-        };
-      });
-    } finally {
-      if (buffer2.isEmpty()) {
-        queueMicrotask(() => {
-          drain2.resolve();
-          drain2 = pDefer();
-        });
-      }
-    }
-  };
-  const bufferNext = (next) => {
-    if (onNext != null) {
-      return onNext(next);
-    }
-    buffer2.push(next);
-    return pushable2;
-  };
-  const bufferError = (err) => {
-    buffer2 = new FIFO();
-    if (onNext != null) {
-      return onNext({ error: err });
-    }
-    buffer2.push({ error: err });
-    return pushable2;
-  };
-  const push = (value) => {
-    if (ended) {
-      return pushable2;
-    }
-    if ((options == null ? void 0 : options.objectMode) !== true && (value == null ? void 0 : value.byteLength) == null) {
-      throw new Error("objectMode was not true but tried to push non-Uint8Array value");
-    }
-    return bufferNext({ done: false, value });
-  };
-  const end = (err) => {
-    if (ended)
-      return pushable2;
-    ended = true;
-    return err != null ? bufferError(err) : bufferNext({ done: true });
-  };
-  const _return = () => {
-    buffer2 = new FIFO();
-    end();
-    return { done: true };
-  };
-  const _throw = (err) => {
-    end(err);
-    return { done: true };
-  };
-  pushable2 = {
-    [Symbol.asyncIterator]() {
-      return this;
-    },
-    next: waitNext,
-    return: _return,
-    throw: _throw,
-    push,
-    end,
-    get readableLength() {
-      return buffer2.size;
-    },
-    onEmpty: async (options2) => {
-      const signal = options2 == null ? void 0 : options2.signal;
-      signal == null ? void 0 : signal.throwIfAborted();
-      if (buffer2.isEmpty()) {
-        return;
-      }
-      let cancel;
-      let listener;
-      if (signal != null) {
-        cancel = new Promise((resolve6, reject) => {
-          listener = () => {
-            reject(new AbortError3());
-          };
-          signal.addEventListener("abort", listener);
-        });
-      }
-      try {
-        await Promise.race([
-          drain2.promise,
-          cancel
-        ]);
-      } finally {
-        if (listener != null && signal != null) {
-          signal == null ? void 0 : signal.removeEventListener("abort", listener);
-        }
-      }
-    }
-  };
-  if (onEnd == null) {
-    return pushable2;
-  }
-  const _pushable2 = pushable2;
-  pushable2 = {
-    [Symbol.asyncIterator]() {
-      return this;
-    },
-    next() {
-      return _pushable2.next();
-    },
-    throw(err) {
-      _pushable2.throw(err);
-      if (onEnd != null) {
-        onEnd(err);
-        onEnd = void 0;
-      }
-      return { done: true };
-    },
-    return() {
-      _pushable2.return();
-      if (onEnd != null) {
-        onEnd();
-        onEnd = void 0;
-      }
-      return { done: true };
-    },
-    push,
-    end(err) {
-      _pushable2.end(err);
-      if (onEnd != null) {
-        onEnd(err);
-        onEnd = void 0;
-      }
-      return pushable2;
-    },
-    get readableLength() {
-      return _pushable2.readableLength;
-    },
-    onEmpty: (opts) => {
-      return _pushable2.onEmpty(opts);
-    }
-  };
-  return pushable2;
-}
-
-// node_modules/it-merge/dist/src/index.js
-function isAsyncIterable8(thing) {
-  return thing[Symbol.asyncIterator] != null;
-}
-function merge(...sources) {
-  const syncSources = [];
-  for (const source of sources) {
-    if (!isAsyncIterable8(source)) {
-      syncSources.push(source);
-    }
-  }
-  if (syncSources.length === sources.length) {
-    return function* () {
-      for (const source of syncSources) {
-        yield* source;
-      }
-    }();
-  }
-  return async function* () {
-    const output = pushable({
-      objectMode: true
-    });
-    void Promise.resolve().then(async () => {
-      try {
-        await Promise.all(sources.map(async (source) => {
-          for await (const item of source) {
-            output.push(item);
-          }
-        }));
-        output.end();
-      } catch (err) {
-        output.end(err);
-      }
-    });
-    yield* output;
-  }();
-}
-var src_default9 = merge;
-
-// node_modules/it-pipe/dist/src/index.js
-function pipe(first2, ...rest) {
-  if (first2 == null) {
-    throw new Error("Empty pipeline");
-  }
-  if (isDuplex(first2)) {
-    const duplex = first2;
-    first2 = () => duplex.source;
-  } else if (isIterable2(first2) || isAsyncIterable9(first2)) {
-    const source = first2;
-    first2 = () => source;
-  }
-  const fns = [first2, ...rest];
-  if (fns.length > 1) {
-    if (isDuplex(fns[fns.length - 1])) {
-      fns[fns.length - 1] = fns[fns.length - 1].sink;
-    }
-  }
-  if (fns.length > 2) {
-    for (let i = 1; i < fns.length - 1; i++) {
-      if (isDuplex(fns[i])) {
-        fns[i] = duplexPipelineFn(fns[i]);
-      }
-    }
-  }
-  return rawPipe(...fns);
-}
-var rawPipe = (...fns) => {
-  let res;
-  while (fns.length > 0) {
-    res = fns.shift()(res);
-  }
-  return res;
-};
-var isAsyncIterable9 = (obj) => {
-  return (obj == null ? void 0 : obj[Symbol.asyncIterator]) != null;
-};
-var isIterable2 = (obj) => {
-  return (obj == null ? void 0 : obj[Symbol.iterator]) != null;
-};
-var isDuplex = (obj) => {
-  if (obj == null) {
-    return false;
-  }
-  return obj.sink != null && obj.source != null;
-};
-var duplexPipelineFn = (duplex) => {
-  return (source) => {
-    const p = duplex.sink(source);
-    if ((p == null ? void 0 : p.then) != null) {
-      const stream = pushable({
-        objectMode: true
-      });
-      p.then(() => {
-        stream.end();
-      }, (err) => {
-        stream.end(err);
-      });
-      let sourceWrap;
-      const source2 = duplex.source;
-      if (isAsyncIterable9(source2)) {
-        sourceWrap = async function* () {
-          yield* source2;
-          stream.end();
-        };
-      } else if (isIterable2(source2)) {
-        sourceWrap = function* () {
-          yield* source2;
-          stream.end();
-        };
-      } else {
-        throw new Error("Unknown duplex source type - must be Iterable or AsyncIterable");
-      }
-      return src_default9(stream, sourceWrap());
-    }
-    return duplex.source;
-  };
-};
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/directory.js
-var directoryContent = (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
-  async function* yieldDirectoryContent(options = {}) {
-    var _a;
-    const offset = options.offset ?? 0;
-    const length4 = options.length ?? node.Links.length;
-    const links = node.Links.slice(offset, length4);
-    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:directory", {
-      cid
-    }));
-    yield* pipe(links, (source) => src_default3(source, (link) => {
-      return async () => {
-        const linkName = link.Name ?? "";
-        const linkPath = `${path6}/${linkName}`;
-        const result = await resolve6(link.Hash, linkName, linkPath, [], depth + 1, blockstore, options);
-        return result.entry;
-      };
-    }), (source) => parallel(source, { ordered: true }), (source) => src_default8(source, (entry) => entry != null));
-  }
-  return yieldDirectoryContent;
-};
-var directory_default = directoryContent;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/file.js
-var import_err_code11 = __toESM(require_err_code(), 1);
-async function walkDAG(blockstore, node, queue, streamPosition, start, end, options) {
-  if (node instanceof Uint8Array) {
-    const buf2 = extract_data_from_block_default(node, streamPosition, start, end);
-    queue.push(buf2);
-    return;
-  }
-  if (node.Data == null) {
-    throw (0, import_err_code11.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS");
-  }
-  let file;
-  try {
-    file = UnixFS.unmarshal(node.Data);
-  } catch (err) {
-    throw (0, import_err_code11.default)(err, "ERR_NOT_UNIXFS");
-  }
-  if (file.data != null) {
-    const data = file.data;
-    const buf2 = extract_data_from_block_default(data, streamPosition, start, end);
-    queue.push(buf2);
-    streamPosition += BigInt(buf2.byteLength);
-  }
-  const childOps = [];
-  if (node.Links.length !== file.blockSizes.length) {
-    throw (0, import_err_code11.default)(new Error("Inconsistent block sizes and dag links"), "ERR_NOT_UNIXFS");
-  }
-  for (let i = 0; i < node.Links.length; i++) {
-    const childLink = node.Links[i];
-    const childStart = streamPosition;
-    const childEnd = childStart + file.blockSizes[i];
-    if (start >= childStart && start < childEnd || // child has offset byte
-    end >= childStart && end <= childEnd || // child has end byte
-    start < childStart && end > childEnd) {
-      childOps.push({
-        link: childLink,
-        blockStart: streamPosition
-      });
-    }
-    streamPosition = childEnd;
-    if (streamPosition > end) {
-      break;
-    }
-  }
-  await pipe(childOps, (source) => src_default3(source, (op) => {
-    return async () => {
-      const block = await blockstore.get(op.link.Hash, options);
-      return {
-        ...op,
-        block
-      };
-    };
-  }), (source) => parallel(source, {
-    ordered: true
-  }), async (source) => {
-    for await (const { link, block, blockStart } of source) {
-      let child;
-      switch (link.Hash.code) {
-        case code2:
-          child = decode11(block);
-          break;
-        case code3:
-          child = block;
-          break;
-        default:
-          queue.end((0, import_err_code11.default)(new Error(`Unsupported codec: ${link.Hash.code}`), "ERR_NOT_UNIXFS"));
-          return;
-      }
-      const childQueue = new dist_default({
-        concurrency: 1
-      });
-      childQueue.on("error", (error) => {
-        queue.end(error);
-      });
-      void childQueue.add(async () => {
-        var _a;
-        (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:file", {
-          cid: link.Hash
-        }));
-        await walkDAG(blockstore, child, queue, blockStart, start, end, options);
-      });
-      await childQueue.onIdle();
-    }
-  });
-  if (streamPosition >= end) {
-    queue.end();
-  }
-}
-var fileContent = (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
-  async function* yieldFileContent(options = {}) {
-    var _a, _b;
-    const fileSize = unixfs2.fileSize();
-    if (fileSize === void 0) {
-      throw new Error("File was a directory");
-    }
-    const { start, end } = validate_offset_and_length_default(fileSize, options.offset, options.length);
-    if (end === 0n) {
-      return;
-    }
-    let read4 = 0n;
-    const wanted = end - start;
-    const queue = pushable();
-    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:file", {
-      cid
-    }));
-    void walkDAG(blockstore, node, queue, 0n, start, end, options).catch((err) => {
-      queue.end(err);
-    });
-    for await (const buf2 of queue) {
-      if (buf2 == null) {
-        continue;
-      }
-      read4 += BigInt(buf2.byteLength);
-      if (read4 > wanted) {
-        queue.end();
-        throw (0, import_err_code11.default)(new Error("Read too many bytes - the file size reported by the UnixFS data in the root node may be incorrect"), "ERR_OVER_READ");
-      }
-      if (read4 === wanted) {
-        queue.end();
-      }
-      (_b = options.onProgress) == null ? void 0 : _b.call(options, new CustomProgressEvent("unixfs:exporter:progress:unixfs:file", {
-        bytesRead: read4,
-        totalBytes: wanted,
-        fileSize
-      }));
-      yield buf2;
-    }
-    if (read4 < wanted) {
-      throw (0, import_err_code11.default)(new Error("Traversed entire DAG but did not read enough bytes"), "ERR_UNDER_READ");
-    }
-  }
-  return yieldFileContent;
-};
-var file_default = fileContent;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js
-var import_err_code12 = __toESM(require_err_code(), 1);
-var hamtShardedDirectoryContent = (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
-  function yieldHamtDirectoryContent(options = {}) {
-    var _a;
-    (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:hamt-sharded-directory", {
-      cid
-    }));
-    return listDirectory(node, path6, resolve6, depth, blockstore, options);
-  }
-  return yieldHamtDirectoryContent;
-};
-async function* listDirectory(node, path6, resolve6, depth, blockstore, options) {
-  const links = node.Links;
-  if (node.Data == null) {
-    throw (0, import_err_code12.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS");
-  }
-  let dir;
-  try {
-    dir = UnixFS.unmarshal(node.Data);
-  } catch (err) {
-    throw (0, import_err_code12.default)(err, "ERR_NOT_UNIXFS");
-  }
-  if (dir.fanout == null) {
-    throw (0, import_err_code12.default)(new Error("missing fanout"), "ERR_NOT_UNIXFS");
-  }
-  const padLength = (dir.fanout - 1n).toString(16).length;
-  const results = pipe(links, (source) => src_default3(source, (link) => {
-    return async () => {
-      var _a;
-      const name4 = link.Name != null ? link.Name.substring(padLength) : null;
-      if (name4 != null && name4 !== "") {
-        const result = await resolve6(link.Hash, name4, `${path6}/${name4}`, [], depth + 1, blockstore, options);
-        return { entries: result.entry == null ? [] : [result.entry] };
-      } else {
-        const block = await blockstore.get(link.Hash, options);
-        node = decode11(block);
-        (_a = options.onProgress) == null ? void 0 : _a.call(options, new CustomProgressEvent("unixfs:exporter:walk:hamt-sharded-directory", {
-          cid: link.Hash
-        }));
-        return { entries: listDirectory(node, path6, resolve6, depth, blockstore, options) };
-      }
-    };
-  }), (source) => parallel(source, { ordered: true }));
-  for await (const { entries } of results) {
-    yield* entries;
-  }
-}
-var hamt_sharded_directory_default = hamtShardedDirectoryContent;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/unixfs-v1/index.js
-var findLinkCid = (node, name4) => {
-  const link = node.Links.find((link2) => link2.Name === name4);
-  return link == null ? void 0 : link.Hash;
-};
-var contentExporters = {
-  raw: file_default,
-  file: file_default,
-  directory: directory_default,
-  "hamt-sharded-directory": hamt_sharded_directory_default,
-  metadata: (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
-    return () => [];
-  },
-  symlink: (cid, node, unixfs2, path6, resolve6, depth, blockstore) => {
-    return () => [];
-  }
-};
-var unixFsResolver = async (cid, name4, path6, toResolve, resolve6, depth, blockstore, options) => {
-  const block = await blockstore.get(cid, options);
-  const node = decode11(block);
-  let unixfs2;
-  let next;
-  if (name4 == null) {
-    name4 = cid.toString();
-  }
-  if (node.Data == null) {
-    throw (0, import_err_code13.default)(new Error("no data in PBNode"), "ERR_NOT_UNIXFS");
-  }
-  try {
-    unixfs2 = UnixFS.unmarshal(node.Data);
-  } catch (err) {
-    throw (0, import_err_code13.default)(err, "ERR_NOT_UNIXFS");
-  }
-  if (path6 == null) {
-    path6 = name4;
-  }
-  if (toResolve.length > 0) {
-    let linkCid;
-    if ((unixfs2 == null ? void 0 : unixfs2.type) === "hamt-sharded-directory") {
-      linkCid = await find_cid_in_shard_default(node, toResolve[0], blockstore);
-    } else {
-      linkCid = findLinkCid(node, toResolve[0]);
-    }
-    if (linkCid == null) {
-      throw (0, import_err_code13.default)(new Error("file does not exist"), "ERR_NOT_FOUND");
-    }
-    const nextName = toResolve.shift();
-    const nextPath = `${path6}/${nextName}`;
-    next = {
-      cid: linkCid,
-      toResolve,
-      name: nextName ?? "",
-      path: nextPath
-    };
-  }
-  const content = contentExporters[unixfs2.type](cid, node, unixfs2, path6, resolve6, depth, blockstore);
-  if (content == null) {
-    throw (0, import_err_code13.default)(new Error("could not find content exporter"), "ERR_NOT_FOUND");
-  }
-  if (unixfs2.isDirectory()) {
-    return {
-      entry: {
-        type: "directory",
-        name: name4,
-        path: path6,
-        cid,
-        content,
-        unixfs: unixfs2,
-        depth,
-        node,
-        size: unixfs2.fileSize()
-      },
-      next
-    };
-  }
-  return {
-    entry: {
-      type: "file",
-      name: name4,
-      path: path6,
-      cid,
-      content,
-      unixfs: unixfs2,
-      depth,
-      node,
-      size: unixfs2.fileSize()
-    },
-    next
-  };
-};
-var unixfs_v1_default = unixFsResolver;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/resolvers/index.js
-var resolvers = {
-  [code2]: unixfs_v1_default,
-  [code3]: raw_default,
-  [code]: dag_cbor_default,
-  [identity2.code]: identity_default
-};
-var resolve4 = async (cid, name4, path6, toResolve, depth, blockstore, options) => {
-  const resolver = resolvers[cid.code];
-  if (resolver == null) {
-    throw (0, import_err_code14.default)(new Error(`No resolver for code ${cid.code}`), "ERR_NO_RESOLVER");
-  }
-  return resolver(cid, name4, path6, toResolve, resolve4, depth, blockstore, options);
-};
-var resolvers_default = resolve4;
-
-// node_modules/ipfs-unixfs-exporter/dist/src/index.js
-var toPathComponents2 = (path6 = "") => {
-  return (path6.trim().match(/([^\\^/]|\\\/)+/g) ?? []).filter(Boolean);
-};
-var cidAndRest = (path6) => {
-  if (path6 instanceof Uint8Array) {
-    return {
-      cid: CID2.decode(path6),
-      toResolve: []
-    };
-  }
-  const cid = CID2.asCID(path6);
-  if (cid != null) {
-    return {
-      cid,
-      toResolve: []
-    };
-  }
-  if (typeof path6 === "string") {
-    if (path6.indexOf("/ipfs/") === 0) {
-      path6 = path6.substring(6);
-    }
-    const output = toPathComponents2(path6);
-    return {
-      cid: CID2.parse(output[0]),
-      toResolve: output.slice(1)
-    };
-  }
-  throw (0, import_err_code15.default)(new Error(`Unknown path type ${path6}`), "ERR_BAD_PATH");
-};
-async function* walkPath(path6, blockstore, options = {}) {
-  let { cid, toResolve } = cidAndRest(path6);
-  let name4 = cid.toString();
-  let entryPath = name4;
-  const startingDepth = toResolve.length;
-  while (true) {
-    const result = await resolvers_default(cid, name4, entryPath, toResolve, startingDepth, blockstore, options);
-    if (result.entry == null && result.next == null) {
-      throw (0, import_err_code15.default)(new Error(`Could not resolve ${path6}`), "ERR_NOT_FOUND");
-    }
-    if (result.entry != null) {
-      yield result.entry;
-    }
-    if (result.next == null) {
-      return;
-    }
-    toResolve = result.next.toResolve;
-    cid = result.next.cid;
-    name4 = result.next.name;
-    entryPath = result.next.path;
-  }
-}
-async function exporter(path6, blockstore, options = {}) {
-  const result = await src_default7(walkPath(path6, blockstore, options));
-  if (result == null) {
-    throw (0, import_err_code15.default)(new Error(`Could not resolve ${path6}`), "ERR_NOT_FOUND");
-  }
-  return result;
-}
-async function* recursive(path6, blockstore, options = {}) {
-  const node = await exporter(path6, blockstore, options);
-  if (node == null) {
-    return;
-  }
-  yield node;
-  if (node.type === "directory") {
-    for await (const child of recurse(node, options)) {
-      yield child;
-    }
-  }
-  async function* recurse(node2, options2) {
-    for await (const file of node2.content(options2)) {
-      yield file;
-      if (file instanceof Uint8Array) {
-        continue;
-      }
-      if (file.type === "directory") {
-        yield* recurse(file, options2);
-      }
-    }
-  }
-}
-
-// node_modules/merge-options/index.mjs
-var import_index3 = __toESM(require_merge_options(), 1);
-var merge_options_default = import_index3.default;
-
-// node_modules/@helia/unixfs/dist/src/errors.js
-var UnixFSError = class extends Error {
-  name;
-  code;
-  constructor(message2, name4, code5) {
-    super(message2);
-    this.name = name4;
-    this.code = code5;
-  }
-};
-var NotUnixFSError = class extends UnixFSError {
-  constructor(message2 = "not a Unixfs node") {
-    super(message2, "NotUnixFSError", "ERR_NOT_UNIXFS");
-  }
-};
-var InvalidPBNodeError = class extends UnixFSError {
-  constructor(message2 = "invalid PBNode") {
-    super(message2, "InvalidPBNodeError", "ERR_INVALID_PBNODE");
-  }
-};
-var UnknownError = class extends UnixFSError {
-  constructor(message2 = "unknown error") {
-    super(message2, "InvalidPBNodeError", "ERR_UNKNOWN_ERROR");
-  }
-};
-var AlreadyExistsError = class extends UnixFSError {
-  constructor(message2 = "path already exists") {
-    super(message2, "AlreadyExistsError", "ERR_ALREADY_EXISTS");
-  }
-};
-var DoesNotExistError = class extends UnixFSError {
-  constructor(message2 = "path does not exist") {
-    super(message2, "DoesNotExistError", "ERR_DOES_NOT_EXIST");
-  }
-};
-var NoContentError = class extends UnixFSError {
-  constructor(message2 = "no content") {
-    super(message2, "NoContentError", "ERR_NO_CONTENT");
-  }
-};
-var NotAFileError = class extends UnixFSError {
-  constructor(message2 = "not a file") {
-    super(message2, "NotAFileError", "ERR_NOT_A_FILE");
-  }
-};
-var NotADirectoryError = class extends UnixFSError {
-  constructor(message2 = "not a directory") {
-    super(message2, "NotADirectoryError", "ERR_NOT_A_DIRECTORY");
-  }
-};
-var InvalidParametersError = class extends UnixFSError {
-  constructor(message2 = "invalid parameters") {
-    super(message2, "InvalidParametersError", "ERR_INVALID_PARAMETERS");
-  }
-};
-
-// node_modules/@libp2p/logger/dist/src/index.js
-var import_debug = __toESM(require_src2(), 1);
-import_debug.default.formatters.b = (v) => {
-  return v == null ? "undefined" : base58btc2.baseEncode(v);
-};
-import_debug.default.formatters.t = (v) => {
-  return v == null ? "undefined" : base322.baseEncode(v);
-};
-import_debug.default.formatters.m = (v) => {
-  return v == null ? "undefined" : base64.baseEncode(v);
-};
-import_debug.default.formatters.p = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-import_debug.default.formatters.c = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-import_debug.default.formatters.k = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-import_debug.default.formatters.a = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-function createDisabledLogger(namespace) {
-  const logger3 = () => {
-  };
-  logger3.enabled = false;
-  logger3.color = "";
-  logger3.diff = 0;
-  logger3.log = () => {
-  };
-  logger3.namespace = namespace;
-  logger3.destroy = () => true;
-  logger3.extend = () => logger3;
-  return logger3;
-}
-function logger(name4) {
-  let trace = createDisabledLogger(`${name4}:trace`);
-  if (import_debug.default.enabled(`${name4}:trace`) && import_debug.default.names.map((r) => r.toString()).find((n) => n.includes(":trace")) != null) {
-    trace = (0, import_debug.default)(`${name4}:trace`);
-  }
-  return Object.assign((0, import_debug.default)(name4), {
-    error: (0, import_debug.default)(`${name4}:error`),
-    trace
-  });
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/add-link.js
-var import_sparse_array3 = __toESM(require_sparse_array(), 1);
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/consumable-hash.js
-function wrapHash2(hashFn2) {
-  function hashing(value) {
-    if (value instanceof InfiniteHash2) {
-      return value;
-    } else {
-      return new InfiniteHash2(value, hashFn2);
-    }
-  }
-  return hashing;
-}
-var InfiniteHash2 = class {
-  _value;
-  _hashFn;
-  _depth;
-  _availableBits;
-  _currentBufferIndex;
-  _buffers;
-  constructor(value, hashFn2) {
-    if (!(value instanceof Uint8Array)) {
-      throw new Error("can only hash Uint8Arrays");
-    }
-    this._value = value;
-    this._hashFn = hashFn2;
-    this._depth = -1;
-    this._availableBits = 0;
-    this._currentBufferIndex = 0;
-    this._buffers = [];
-  }
-  async take(bits) {
-    let pendingBits = bits;
-    while (this._availableBits < pendingBits) {
-      await this._produceMoreBits();
-    }
-    let result = 0;
-    while (pendingBits > 0) {
-      const hash = this._buffers[this._currentBufferIndex];
-      const available = Math.min(hash.availableBits(), pendingBits);
-      const took = hash.take(available);
-      result = (result << available) + took;
-      pendingBits -= available;
-      this._availableBits -= available;
-      if (hash.availableBits() === 0) {
-        this._currentBufferIndex++;
-      }
-    }
-    return result;
-  }
-  untake(bits) {
-    let pendingBits = bits;
-    while (pendingBits > 0) {
-      const hash = this._buffers[this._currentBufferIndex];
-      const availableForUntake = Math.min(hash.totalBits() - hash.availableBits(), pendingBits);
-      hash.untake(availableForUntake);
-      pendingBits -= availableForUntake;
-      this._availableBits += availableForUntake;
-      if (this._currentBufferIndex > 0 && hash.totalBits() === hash.availableBits()) {
-        this._depth--;
-        this._currentBufferIndex--;
-      }
-    }
-  }
-  async _produceMoreBits() {
-    this._depth++;
-    const value = this._depth > 0 ? concat2([this._value, Uint8Array.from([this._depth])]) : this._value;
-    const hashValue = await this._hashFn(value);
-    const buffer2 = new ConsumableBuffer2(hashValue);
-    this._buffers.push(buffer2);
-    this._availableBits += buffer2.availableBits();
-  }
-};
-var START_MASKS2 = [
-  255,
-  254,
-  252,
-  248,
-  240,
-  224,
-  192,
-  128
-];
-var STOP_MASKS2 = [
-  1,
-  3,
-  7,
-  15,
-  31,
-  63,
-  127,
-  255
-];
-var ConsumableBuffer2 = class {
-  _value;
-  _currentBytePos;
-  _currentBitPos;
-  constructor(value) {
-    this._value = value;
-    this._currentBytePos = value.length - 1;
-    this._currentBitPos = 7;
-  }
-  availableBits() {
-    return this._currentBitPos + 1 + this._currentBytePos * 8;
-  }
-  totalBits() {
-    return this._value.length * 8;
-  }
-  take(bits) {
-    let pendingBits = bits;
-    let result = 0;
-    while (pendingBits > 0 && this._haveBits()) {
-      const byte = this._value[this._currentBytePos];
-      const availableBits = this._currentBitPos + 1;
-      const taking = Math.min(availableBits, pendingBits);
-      const value = byteBitsToInt2(byte, availableBits - taking, taking);
-      result = (result << taking) + value;
-      pendingBits -= taking;
-      this._currentBitPos -= taking;
-      if (this._currentBitPos < 0) {
-        this._currentBitPos = 7;
-        this._currentBytePos--;
-      }
-    }
-    return result;
-  }
-  untake(bits) {
-    this._currentBitPos += bits;
-    while (this._currentBitPos > 7) {
-      this._currentBitPos -= 8;
-      this._currentBytePos += 1;
-    }
-  }
-  _haveBits() {
-    return this._currentBytePos >= 0;
-  }
-};
-function byteBitsToInt2(byte, start, length4) {
-  const mask = maskFor2(start, length4);
-  return (byte & mask) >>> start;
-}
-function maskFor2(start, length4) {
-  return START_MASKS2[start] & STOP_MASKS2[Math.min(length4 + start - 1, 7)];
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/hamt-constants.js
-var hamtHashCode = BigInt(murmur3128.code);
-var hamtBucketBits = 8;
-async function hamtHashFn2(buf2) {
-  return (await murmur3128.encode(buf2)).subarray(0, 8).reverse();
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/hamt-utils.js
-var import_sparse_array2 = __toESM(require_sparse_array(), 1);
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/persist.js
-var persist2 = async (buffer2, blockstore, options) => {
-  if (options.codec == null) {
-    options.codec = src_exports2;
-  }
-  const multihash = await sha256.digest(buffer2);
-  const cid = CID2.create(options.cidVersion, options.codec.code, multihash);
-  await blockstore.put(cid, buffer2, {
-    ...options,
-    signal: options.signal
-  });
-  return cid;
-};
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/dir-sharded.js
-var Dir2 = class {
-  options;
-  root;
-  dir;
-  path;
-  dirty;
-  flat;
-  parent;
-  parentKey;
-  unixfs;
-  mode;
-  mtime;
-  cid;
-  size;
-  nodeSize;
-  constructor(props, options) {
-    this.options = options ?? {};
-    this.root = props.root;
-    this.dir = props.dir;
-    this.path = props.path;
-    this.dirty = props.dirty;
-    this.flat = props.flat;
-    this.parent = props.parent;
-    this.parentKey = props.parentKey;
-    this.unixfs = props.unixfs;
-    this.mode = props.mode;
-    this.mtime = props.mtime;
-  }
-};
-var DirSharded2 = class extends Dir2 {
-  _bucket;
-  constructor(props, options) {
-    super(props, options);
-    this._bucket = createHAMT({
-      hashFn: hamtHashFn2,
-      bits: 8
-    });
-  }
-  async put(name4, value) {
-    this.cid = void 0;
-    this.size = void 0;
-    this.nodeSize = void 0;
-    await this._bucket.put(name4, value);
-  }
-  async get(name4) {
-    return this._bucket.get(name4);
-  }
-  childCount() {
-    return this._bucket.leafCount();
-  }
-  directChildrenCount() {
-    return this._bucket.childrenCount();
-  }
-  onlyChild() {
-    return this._bucket.onlyChild();
-  }
-  async *eachChildSeries() {
-    for await (const { key, value } of this._bucket.eachLeafSeries()) {
-      yield {
-        key,
-        child: value
-      };
-    }
-  }
-  estimateNodeSize() {
-    if (this.nodeSize !== void 0) {
-      return this.nodeSize;
-    }
-    this.nodeSize = calculateSize2(this._bucket, this, this.options);
-    return this.nodeSize;
-  }
-  async *flush(blockstore) {
-    for await (const entry of flush2(this._bucket, blockstore, this, this.options)) {
-      yield {
-        ...entry,
-        path: this.path
-      };
-    }
-  }
-};
-async function* flush2(bucket, blockstore, shardRoot, options) {
-  const children = bucket._children;
-  const links = [];
-  let childrenSize = 0n;
-  for (let i = 0; i < children.length; i++) {
-    const child = children.get(i);
-    if (child == null) {
-      continue;
-    }
-    const labelPrefix = i.toString(16).toUpperCase().padStart(2, "0");
-    if (child instanceof Bucket) {
-      let shard;
-      for await (const subShard of flush2(child, blockstore, null, options)) {
-        shard = subShard;
-      }
-      if (shard == null) {
-        throw new Error("Could not flush sharded directory, no subshard found");
-      }
-      links.push({
-        Name: labelPrefix,
-        Tsize: Number(shard.size),
-        Hash: shard.cid
-      });
-      childrenSize += shard.size;
-    } else if (isDir2(child.value)) {
-      const dir2 = child.value;
-      let flushedDir;
-      for await (const entry of dir2.flush(blockstore)) {
-        flushedDir = entry;
-        yield flushedDir;
-      }
-      if (flushedDir == null) {
-        throw new Error("Did not flush dir");
-      }
-      const label = labelPrefix + child.key;
-      links.push({
-        Name: label,
-        Tsize: Number(flushedDir.size),
-        Hash: flushedDir.cid
-      });
-      childrenSize += flushedDir.size;
-    } else {
-      const value = child.value;
-      if (value.cid == null) {
-        continue;
-      }
-      const label = labelPrefix + child.key;
-      const size2 = value.size;
-      links.push({
-        Name: label,
-        Tsize: Number(size2),
-        Hash: value.cid
-      });
-      childrenSize += BigInt(size2 ?? 0);
-    }
-  }
-  const data = Uint8Array.from(children.bitField().reverse());
-  const dir = new UnixFS({
-    type: "hamt-sharded-directory",
-    data,
-    fanout: BigInt(bucket.tableSize()),
-    hashType: hamtHashCode,
-    mtime: shardRoot == null ? void 0 : shardRoot.mtime,
-    mode: shardRoot == null ? void 0 : shardRoot.mode
-  });
-  const node = {
-    Data: dir.marshal(),
-    Links: links
-  };
-  const buffer2 = encode7(prepare(node));
-  const cid = await persist2(buffer2, blockstore, options);
-  const size = BigInt(buffer2.byteLength) + childrenSize;
-  yield {
-    cid,
-    unixfs: dir,
-    size
-  };
-}
-function isDir2(obj) {
-  return typeof obj.flush === "function";
-}
-function calculateSize2(bucket, shardRoot, options) {
-  const children = bucket._children;
-  const links = [];
-  for (let i = 0; i < children.length; i++) {
-    const child = children.get(i);
-    if (child == null) {
-      continue;
-    }
-    const labelPrefix = i.toString(16).toUpperCase().padStart(2, "0");
-    if (child instanceof Bucket) {
-      const size = calculateSize2(child, null, options);
-      links.push({
-        Name: labelPrefix,
-        Tsize: Number(size),
-        Hash: options.cidVersion === 0 ? CID_V02 : CID_V12
-      });
-    } else if (typeof child.value.flush === "function") {
-      const dir2 = child.value;
-      const size = dir2.nodeSize();
-      links.push({
-        Name: labelPrefix + child.key,
-        Tsize: Number(size),
-        Hash: options.cidVersion === 0 ? CID_V02 : CID_V12
-      });
-    } else {
-      const value = child.value;
-      if (value.cid == null) {
-        continue;
-      }
-      const label = labelPrefix + child.key;
-      const size = value.size;
-      links.push({
-        Name: label,
-        Tsize: Number(size),
-        Hash: value.cid
-      });
-    }
-  }
-  const data = Uint8Array.from(children.bitField().reverse());
-  const dir = new UnixFS({
-    type: "hamt-sharded-directory",
-    data,
-    fanout: BigInt(bucket.tableSize()),
-    hashType: hamtHashCode,
-    mtime: shardRoot == null ? void 0 : shardRoot.mtime,
-    mode: shardRoot == null ? void 0 : shardRoot.mode
-  });
-  const buffer2 = encode7(prepare({
-    Data: dir.marshal(),
-    Links: links
-  }));
-  return buffer2.length;
-}
-var CID_V02 = CID2.parse("QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn");
-var CID_V12 = CID2.parse("zdj7WbTaiJT1fgatdet9Ei9iDB5hdCxkbVyhyh8YTUnXMiwYi");
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/hamt-utils.js
-var log = logger("helia:unixfs:commands:utils:hamt-utils");
-var toPrefix2 = (position) => {
-  return position.toString(16).toUpperCase().padStart(2, "0").substring(0, 2);
-};
-var createShard = async (blockstore, contents, options) => {
-  const shard = new DirSharded2({
-    root: true,
-    dir: true,
-    parent: void 0,
-    parentKey: void 0,
-    path: "",
-    dirty: true,
-    flat: false,
-    mtime: options.mtime,
-    mode: options.mode
-  }, options);
-  for (let i = 0; i < contents.length; i++) {
-    await shard._bucket.put(contents[i].name, {
-      size: contents[i].size,
-      cid: contents[i].cid
-    });
-  }
-  const res = await src_default7(shard.flush(blockstore));
-  if (res == null) {
-    throw new Error("Flushing shard yielded no result");
-  }
-  return res;
-};
-var updateShardedDirectory = async (path6, blockstore, options) => {
-  const shardRoot = UnixFS.unmarshal(path6[0].node.Data ?? new Uint8Array(0));
-  const fanout = BigInt(Math.pow(2, hamtBucketBits));
-  path6.reverse();
-  let cid;
-  let node;
-  for (let i = 0; i < path6.length; i++) {
-    const isRoot = i === path6.length - 1;
-    const segment = path6[i];
-    const data = Uint8Array.from(segment.children.bitField().reverse());
-    const dir = new UnixFS({
-      type: "hamt-sharded-directory",
-      data,
-      fanout,
-      hashType: hamtHashCode
-    });
-    if (isRoot) {
-      dir.mtime = shardRoot.mtime;
-      dir.mode = shardRoot.mode;
-    }
-    node = {
-      Data: dir.marshal(),
-      Links: segment.node.Links
-    };
-    const block = encode7(prepare(node));
-    cid = await persist2(block, blockstore, options);
-    if (!isRoot) {
-      const nextSegment = path6[i + 1];
-      if (nextSegment == null) {
-        throw new Error("Was not operating on shard root but also had no parent?");
-      }
-      log("updating link in parent sub-shard with prefix %s", nextSegment.prefix);
-      nextSegment.node.Links = nextSegment.node.Links.filter((l) => l.Name !== nextSegment.prefix);
-      nextSegment.node.Links.push({
-        Name: nextSegment.prefix,
-        Hash: cid,
-        Tsize: segment.node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), block.byteLength)
-      });
-    }
-  }
-  if (cid == null || node == null) {
-    throw new Error("Noting persisted");
-  }
-  return { cid, node };
-};
-var recreateShardedDirectory = async (cid, fileName, blockstore, options) => {
-  const wrapped = wrapHash2(hamtHashFn2);
-  const hash = wrapped(fromString3(fileName));
-  const path6 = [];
-  while (true) {
-    const block = await blockstore.get(cid, options);
-    const node = decode11(block);
-    const children = new import_sparse_array2.default();
-    const index = await hash.take(hamtBucketBits);
-    const prefix = toPrefix2(index);
-    path6.push({
-      prefix,
-      children,
-      node
-    });
-    let childLink;
-    for (const link of node.Links) {
-      const linkName2 = link.Name ?? "";
-      if (linkName2.length < 2) {
-        throw new Error("Invalid HAMT - link name was too short");
-      }
-      const position = parseInt(linkName2.substring(0, 2), 16);
-      children.set(position, true);
-      if (linkName2.startsWith(prefix)) {
-        childLink = link;
-      }
-    }
-    if (childLink == null) {
-      log("no link found with prefix %s for %s", prefix, fileName);
-      break;
-    }
-    const linkName = childLink.Name ?? "";
-    if (linkName.length < 2) {
-      throw new Error("Invalid HAMT - link name was too short");
-    }
-    if (linkName.length === 2) {
-      cid = childLink.Hash;
-      log("descend into sub-shard with prefix %s", linkName);
-      continue;
-    }
-    break;
-  }
-  return { path: path6, hash };
-};
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/is-over-shard-threshold.js
-async function isOverShardThreshold(node, blockstore, threshold, options) {
-  if (node.Data == null) {
-    throw new Error("DagPB node had no data");
-  }
-  const unixfs2 = UnixFS.unmarshal(node.Data);
-  let size;
-  if (unixfs2.type === "directory") {
-    size = estimateNodeSize(node);
-  } else if (unixfs2.type === "hamt-sharded-directory") {
-    size = await estimateShardSize(node, 0, threshold, blockstore, options);
-  } else {
-    throw new Error("Can only estimate the size of directories or shards");
-  }
-  return size > threshold;
-}
-function estimateNodeSize(node) {
-  let size = 0;
-  for (const link of node.Links) {
-    size += (link.Name ?? "").length;
-    size += link.Hash.version === 1 ? CID_V12.bytes.byteLength : CID_V02.bytes.byteLength;
-  }
-  return size;
-}
-async function estimateShardSize(node, current, max, blockstore, options) {
-  if (current > max) {
-    return max;
-  }
-  if (node.Data == null) {
-    return current;
-  }
-  const unixfs2 = UnixFS.unmarshal(node.Data);
-  if (!unixfs2.isDirectory()) {
-    return current;
-  }
-  for (const link of node.Links) {
-    let name4 = link.Name ?? "";
-    name4 = name4.substring(2);
-    current += name4.length;
-    current += link.Hash.bytes.byteLength;
-    if (link.Hash.code === code2) {
-      const block = await blockstore.get(link.Hash, options);
-      const node2 = decode11(block);
-      current += await estimateShardSize(node2, current, max, blockstore, options);
-    }
-  }
-  return current;
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/add-link.js
-var log2 = logger("helia:unixfs:components:utils:add-link");
-async function addLink(parent, child, blockstore, options) {
-  if (parent.node.Data == null) {
-    throw new InvalidParametersError("Invalid parent passed to addLink");
-  }
-  const meta = UnixFS.unmarshal(parent.node.Data);
-  if (meta.type === "hamt-sharded-directory") {
-    log2("adding link to sharded directory");
-    return addToShardedDirectory(parent, child, blockstore, options);
-  }
-  log2(`adding ${child.Name} (${child.Hash}) to regular directory`);
-  const result = await addToDirectory(parent, child, blockstore, options);
-  if (await isOverShardThreshold(result.node, blockstore, options.shardSplitThresholdBytes, options)) {
-    log2("converting directory to sharded directory");
-    const converted = await convertToShardedDirectory(result, blockstore);
-    result.cid = converted.cid;
-    result.node = decode11(await blockstore.get(converted.cid, options));
-  }
-  return result;
-}
-var convertToShardedDirectory = async (parent, blockstore) => {
-  if (parent.node.Data == null) {
-    throw new InvalidParametersError("Invalid parent passed to convertToShardedDirectory");
-  }
-  const unixfs2 = UnixFS.unmarshal(parent.node.Data);
-  const result = await createShard(blockstore, parent.node.Links.map((link) => ({
-    name: link.Name ?? "",
-    size: BigInt(link.Tsize ?? 0),
-    cid: link.Hash
-  })), {
-    mode: unixfs2.mode,
-    mtime: unixfs2.mtime,
-    cidVersion: parent.cid.version
-  });
-  log2(`converted directory to sharded directory ${result.cid}`);
-  return result;
-};
-var addToDirectory = async (parent, child, blockstore, options) => {
-  const parentLinks = parent.node.Links.filter((link) => {
-    const matches = link.Name === child.Name;
-    if (matches && !options.allowOverwriting) {
-      throw new AlreadyExistsError();
-    }
-    return !matches;
-  });
-  parentLinks.push(child);
-  if (parent.node.Data == null) {
-    throw new InvalidPBNodeError("Parent node with no data passed to addToDirectory");
-  }
-  const node = UnixFS.unmarshal(parent.node.Data);
-  let data;
-  if (node.mtime != null) {
-    const ms = Date.now();
-    const secs = Math.floor(ms / 1e3);
-    node.mtime = {
-      secs: BigInt(secs),
-      nsecs: (ms - secs * 1e3) * 1e3
-    };
-    data = node.marshal();
-  } else {
-    data = parent.node.Data;
-  }
-  parent.node = prepare({
-    Data: data,
-    Links: parentLinks
-  });
-  const buf2 = encode7(parent.node);
-  const hash = await sha256.digest(buf2);
-  const cid = CID2.create(parent.cid.version, code2, hash);
-  await blockstore.put(cid, buf2);
-  return {
-    node: parent.node,
-    cid
-  };
-};
-var addToShardedDirectory = async (parent, child, blockstore, options) => {
-  var _a;
-  const { path: path6, hash } = await recreateShardedDirectory(parent.cid, child.Name, blockstore, options);
-  const finalSegment = path6[path6.length - 1];
-  if (finalSegment == null) {
-    throw new Error("Invalid HAMT, could not generate path");
-  }
-  const prefix = finalSegment.prefix;
-  const index = parseInt(prefix, 16);
-  log2("next prefix for %s is %s", child.Name, prefix);
-  const linkName = `${prefix}${child.Name}`;
-  const existingLink = finalSegment.node.Links.find((l) => (l.Name ?? "").startsWith(prefix));
-  if (existingLink != null) {
-    log2("link %s was present in shard", linkName);
-    if (existingLink.Name === linkName) {
-      if (!options.allowOverwriting) {
-        throw new AlreadyExistsError();
-      }
-      log2("overwriting %s in subshard", child.Name);
-      finalSegment.node.Links = finalSegment.node.Links.filter((l) => l.Name !== linkName);
-      finalSegment.node.Links.push({
-        Name: linkName,
-        Hash: child.Hash,
-        Tsize: child.Tsize
-      });
-    } else if (((_a = existingLink.Name) == null ? void 0 : _a.length) === 2) {
-      throw new Error("Existing link was subshard?!");
-    } else {
-      log2("prefix %s already exists, creating new subshard", prefix);
-      const index2 = finalSegment.node.Links.findIndex((l) => {
-        var _a2;
-        return (_a2 = l.Name) == null ? void 0 : _a2.startsWith(prefix);
-      });
-      const sibling = finalSegment.node.Links.splice(index2, 1)[0];
-      const siblingName = (sibling.Name ?? "").substring(2);
-      const wrapped = wrapHash2(hamtHashFn2);
-      const siblingHash = wrapped(fromString3(siblingName));
-      for (let i = 0; i < path6.length; i++) {
-        await siblingHash.take(hamtBucketBits);
-      }
-      while (true) {
-        const siblingIndex = await siblingHash.take(hamtBucketBits);
-        const siblingPrefix = toPrefix2(siblingIndex);
-        sibling.Name = `${siblingPrefix}${siblingName}`;
-        const newIndex = await hash.take(hamtBucketBits);
-        const newPrefix = toPrefix2(newIndex);
-        if (siblingPrefix === newPrefix) {
-          const children2 = new import_sparse_array3.default();
-          children2.set(newIndex, true);
-          path6.push({
-            prefix: newPrefix,
-            children: children2,
-            node: {
-              Links: []
-            }
-          });
-          continue;
-        }
-        const children = new import_sparse_array3.default();
-        children.set(newIndex, true);
-        children.set(siblingIndex, true);
-        path6.push({
-          prefix,
-          children,
-          node: {
-            Links: [
-              sibling,
-              {
-                Name: `${newPrefix}${child.Name}`,
-                Hash: child.Hash,
-                Tsize: child.Tsize
-              }
-            ]
-          }
-        });
-        break;
-      }
-    }
-  } else {
-    log2("link %s was not present in sub-shard", linkName);
-    child.Name = linkName;
-    finalSegment.node.Links.push(child);
-    finalSegment.children.set(index, true);
-    log2("adding %s to existing sub-shard", linkName);
-  }
-  return updateShardedDirectory(path6, blockstore, options);
-};
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/cid-to-directory.js
-async function cidToDirectory(cid, blockstore, options = {}) {
-  const entry = await exporter(cid, blockstore, options);
-  if (entry.type !== "directory") {
-    throw new NotADirectoryError(`${cid.toString()} was not a UnixFS directory`);
-  }
-  return {
-    cid,
-    node: entry.node
-  };
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/cid-to-pblink.js
-async function cidToPBLink(cid, name4, blockstore, options) {
-  const sourceEntry = await exporter(cid, blockstore, options);
-  if (sourceEntry.type !== "directory" && sourceEntry.type !== "file" && sourceEntry.type !== "raw") {
-    throw new NotUnixFSError(`${cid.toString()} was not a UnixFS node`);
-  }
-  return {
-    Name: name4,
-    Tsize: sourceEntry.node instanceof Uint8Array ? sourceEntry.node.byteLength : dagNodeTsize(sourceEntry.node),
-    Hash: cid
-  };
-}
-function dagNodeTsize(node) {
-  const linkSizes = node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0);
-  return encode7(node).byteLength + linkSizes;
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/resolve.js
-var log3 = logger("helia:unixfs:components:utils:resolve");
-async function resolve5(cid, path6, blockstore, options) {
-  if (path6 == null || path6 === "") {
-    return { cid };
-  }
-  log3('resolve "%s" under %c', path6, cid);
-  const parts = path6.split("/").filter(Boolean);
-  const segments = [{
-    name: "",
-    cid,
-    size: 0n
-  }];
-  for (let i = 0; i < parts.length; i++) {
-    const part = parts[i];
-    const result = await exporter(cid, blockstore, options);
-    log3('resolving "%s"', part, result);
-    if (result.type === "file") {
-      if (i < parts.length - 1) {
-        throw new InvalidParametersError("Path was invalid");
-      }
-      cid = result.cid;
-    } else if (result.type === "directory") {
-      let dirCid;
-      for await (const entry of result.content()) {
-        if (entry.name === part) {
-          dirCid = entry.cid;
-          break;
-        }
-      }
-      if (dirCid == null) {
-        throw new DoesNotExistError("Could not find path in directory");
-      }
-      cid = dirCid;
-      segments.push({
-        name: part,
-        cid,
-        size: result.size
-      });
-    } else {
-      throw new InvalidParametersError("Could not resolve path");
-    }
-  }
-  log3("resolved %s to %c", path6, cid);
-  return {
-    cid,
-    path: path6,
-    segments
-  };
-}
-async function updatePathCids(cid, result, blockstore, options) {
-  if (result.segments == null || result.segments.length === 0) {
-    return cid;
-  }
-  let child = result.segments.pop();
-  if (child == null) {
-    throw new Error("Insufficient segments");
-  }
-  child.cid = cid;
-  result.segments.reverse();
-  for (const parent of result.segments) {
-    const [directory, pblink] = await Promise.all([
-      cidToDirectory(parent.cid, blockstore, options),
-      cidToPBLink(child.cid, child.name, blockstore, options)
-    ]);
-    const result2 = await addLink(directory, pblink, blockstore, {
-      ...options,
-      allowOverwriting: true,
-      cidVersion: cid.version
-    });
-    cid = result2.cid;
-    parent.cid = cid;
-    child = parent;
-  }
-  return cid;
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/cat.js
-var mergeOptions2 = merge_options_default.bind({ ignoreUndefined: true });
-var defaultOptions = {};
-async function* cat(cid, blockstore, options = {}) {
-  const opts = mergeOptions2(defaultOptions, options);
-  const resolved = await resolve5(cid, opts.path, blockstore, opts);
-  const result = await exporter(resolved.cid, blockstore, opts);
-  if (result.type !== "file" && result.type !== "raw") {
-    throw new NotAFileError();
-  }
-  if (result.content == null) {
-    throw new NoContentError();
-  }
-  yield* result.content(opts);
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/constants.js
-var SHARD_SPLIT_THRESHOLD_BYTES = 262144;
-
-// node_modules/@helia/unixfs/dist/src/commands/chmod.js
-var mergeOptions3 = merge_options_default.bind({ ignoreUndefined: true });
-var log4 = logger("helia:unixfs:chmod");
-var defaultOptions2 = {
-  recursive: false,
-  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
-};
-async function chmod(cid, mode, blockstore, options = {}) {
-  const opts = mergeOptions3(defaultOptions2, options);
-  const resolved = await resolve5(cid, opts.path, blockstore, options);
-  log4("chmod %c %d", resolved.cid, mode);
-  if (opts.recursive) {
-    const root = await pipe(
-      async function* () {
-        for await (const entry of recursive(resolved.cid, blockstore, options)) {
-          let metadata2;
-          let links2 = [];
-          if (entry.type === "raw") {
-            metadata2 = new UnixFS({ type: "file", data: entry.node });
-          } else if (entry.type === "file" || entry.type === "directory") {
-            metadata2 = entry.unixfs;
-            links2 = entry.node.Links;
-          } else {
-            throw new NotUnixFSError();
-          }
-          metadata2.mode = mode;
-          const node = {
-            Data: metadata2.marshal(),
-            Links: links2
-          };
-          yield {
-            path: entry.path,
-            content: node
-          };
-        }
-      },
-      // @ts-expect-error cannot combine progress types
-      (source) => importer(source, blockstore, {
-        ...opts,
-        dagBuilder: async function* (source2, block2) {
-          for await (const entry of source2) {
-            yield async function() {
-              const node = entry.content;
-              const buf2 = encode7(node);
-              const updatedCid2 = await persist2(buf2, block2, {
-                ...opts,
-                cidVersion: cid.version
-              });
-              if (node.Data == null) {
-                throw new InvalidPBNodeError(`${updatedCid2} had no data`);
-              }
-              const unixfs2 = UnixFS.unmarshal(node.Data);
-              return {
-                cid: updatedCid2,
-                size: BigInt(buf2.length),
-                path: entry.path,
-                unixfs: unixfs2
-              };
-            };
-          }
-        }
-      }),
-      async (nodes) => src_default7(nodes)
-    );
-    if (root == null) {
-      throw new UnknownError(`Could not chmod ${resolved.cid.toString()}`);
-    }
-    return updatePathCids(root.cid, resolved, blockstore, opts);
-  }
-  const block = await blockstore.get(resolved.cid, options);
-  let metadata;
-  let links = [];
-  if (resolved.cid.code === code3) {
-    metadata = new UnixFS({ type: "file", data: block });
-  } else {
-    const node = decode11(block);
-    if (node.Data == null) {
-      throw new InvalidPBNodeError(`${resolved.cid.toString()} had no data`);
-    }
-    links = node.Links;
-    metadata = UnixFS.unmarshal(node.Data);
-  }
-  metadata.mode = mode;
-  const updatedBlock = encode7({
-    Data: metadata.marshal(),
-    Links: links
-  });
-  const hash = await sha256.digest(updatedBlock);
-  const updatedCid = CID2.create(resolved.cid.version, code2, hash);
-  await blockstore.put(updatedCid, updatedBlock);
-  return updatePathCids(updatedCid, resolved, blockstore, opts);
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/cp.js
-var mergeOptions4 = merge_options_default.bind({ ignoreUndefined: true });
-var log5 = logger("helia:unixfs:cp");
-var defaultOptions3 = {
-  force: false,
-  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
-};
-async function cp(source, target, name4, blockstore, options = {}) {
-  const opts = mergeOptions4(defaultOptions3, options);
-  if (name4.includes("/")) {
-    throw new InvalidParametersError("Name must not have slashes");
-  }
-  const [directory, pblink] = await Promise.all([
-    cidToDirectory(target, blockstore, opts),
-    cidToPBLink(source, name4, blockstore, opts)
-  ]);
-  log5('Adding %c as "%s" to %c', source, name4, target);
-  const result = await addLink(directory, pblink, blockstore, {
-    allowOverwriting: opts.force,
-    cidVersion: target.version,
-    ...opts
-  });
-  return result.cid;
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/ls.js
-var mergeOptions5 = merge_options_default.bind({ ignoreUndefined: true });
-var defaultOptions4 = {};
-async function* ls(cid, blockstore, options = {}) {
-  const opts = mergeOptions5(defaultOptions4, options);
-  const resolved = await resolve5(cid, opts.path, blockstore, opts);
-  const result = await exporter(resolved.cid, blockstore);
-  if (result.type === "file" || result.type === "raw") {
-    yield result;
-    return;
-  }
-  if (result.content == null) {
-    throw new NoContentError();
-  }
-  if (result.type !== "directory") {
-    throw new NotADirectoryError();
-  }
-  yield* result.content({
-    offset: options.offset,
-    length: options.length
-  });
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/mkdir.js
-var mergeOptions6 = merge_options_default.bind({ ignoreUndefined: true });
-var log6 = logger("helia:unixfs:mkdir");
-var defaultOptions5 = {
-  cidVersion: 1,
-  force: false,
-  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
-};
-async function mkdir(parentCid, dirname, blockstore, options = {}) {
-  const opts = mergeOptions6(defaultOptions5, options);
-  if (dirname.includes("/")) {
-    throw new InvalidParametersError("Path must not have slashes");
-  }
-  const entry = await exporter(parentCid, blockstore, options);
-  if (entry.type !== "directory") {
-    throw new NotADirectoryError(`${parentCid.toString()} was not a UnixFS directory`);
-  }
-  log6("creating %s", dirname);
-  const metadata = new UnixFS({
-    type: "directory",
-    mode: opts.mode,
-    mtime: opts.mtime
-  });
-  const node = {
-    Data: metadata.marshal(),
-    Links: []
-  };
-  const buf2 = encode7(node);
-  const hash = await sha256.digest(buf2);
-  const emptyDirCid = CID2.create(opts.cidVersion, code2, hash);
-  await blockstore.put(emptyDirCid, buf2);
-  const [directory, pblink] = await Promise.all([
-    cidToDirectory(parentCid, blockstore, opts),
-    cidToPBLink(emptyDirCid, dirname, blockstore, opts)
-  ]);
-  log6("adding empty dir called %s to %c", dirname, parentCid);
-  const result = await addLink(directory, pblink, blockstore, {
-    ...opts,
-    allowOverwriting: opts.force
-  });
-  return result.cid;
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/utils/remove-link.js
-var log7 = logger("helia:unixfs:utils:remove-link");
-async function removeLink(parent, name4, blockstore, options) {
-  if (parent.node.Data == null) {
-    throw new InvalidPBNodeError("Parent node had no data");
-  }
-  const meta = UnixFS.unmarshal(parent.node.Data);
-  if (meta.type === "hamt-sharded-directory") {
-    log7(`removing ${name4} from sharded directory`);
-    const result = await removeFromShardedDirectory(parent, name4, blockstore, options);
-    if (!await isOverShardThreshold(result.node, blockstore, options.shardSplitThresholdBytes, options)) {
-      log7("converting shard to flat directory %c", parent.cid);
-      return convertToFlatDirectory(result, blockstore, options);
-    }
-    return result;
-  }
-  log7(`removing link ${name4} regular directory`);
-  return removeFromDirectory(parent, name4, blockstore, options);
-}
-var removeFromDirectory = async (parent, name4, blockstore, options) => {
-  parent.node.Links = parent.node.Links.filter((link) => {
-    return link.Name !== name4;
-  });
-  const parentBlock = encode7(parent.node);
-  const parentCid = await persist2(parentBlock, blockstore, {
-    ...options,
-    cidVersion: parent.cid.version
-  });
-  log7(`Updated regular directory ${parentCid}`);
-  return {
-    node: parent.node,
-    cid: parentCid
-  };
-};
-var removeFromShardedDirectory = async (parent, name4, blockstore, options) => {
-  const { path: path6 } = await recreateShardedDirectory(parent.cid, name4, blockstore, options);
-  const finalSegment = path6[path6.length - 1];
-  if (finalSegment == null) {
-    throw new Error("Invalid HAMT, could not generate path");
-  }
-  const linkName = finalSegment.node.Links.filter((l) => (l.Name ?? "").substring(2) === name4).map((l) => l.Name).pop();
-  if (linkName == null) {
-    throw new Error("File not found");
-  }
-  const prefix = linkName.substring(0, 2);
-  const index = parseInt(prefix, 16);
-  finalSegment.node.Links = finalSegment.node.Links.filter((link) => link.Name !== linkName);
-  finalSegment.children.unset(index);
-  if (finalSegment.node.Links.length === 1) {
-    while (true) {
-      if (path6.length === 1) {
-        break;
-      }
-      const segment = path6[path6.length - 1];
-      if (segment == null || segment.node.Links.length > 1) {
-        break;
-      }
-      path6.pop();
-      const nextSegment = path6[path6.length - 1];
-      if (nextSegment == null) {
-        break;
-      }
-      const link = segment.node.Links[0];
-      nextSegment.node.Links = nextSegment.node.Links.filter((l) => !(l.Name ?? "").startsWith(nextSegment.prefix));
-      nextSegment.node.Links.push({
-        Hash: link.Hash,
-        Name: `${nextSegment.prefix}${(link.Name ?? "").substring(2)}`,
-        Tsize: link.Tsize
-      });
-    }
-  }
-  return updateShardedDirectory(path6, blockstore, options);
-};
-var convertToFlatDirectory = async (parent, blockstore, options) => {
-  if (parent.node.Data == null) {
-    throw new InvalidParametersError("Invalid parent passed to convertToFlatDirectory");
-  }
-  const rootNode = {
-    Links: []
-  };
-  const dir = await exporter(parent.cid, blockstore);
-  if (dir.type !== "directory") {
-    throw new Error("Unexpected node type");
-  }
-  for await (const entry of dir.content()) {
-    let tsize = 0;
-    if (entry.node instanceof Uint8Array) {
-      tsize = entry.node.byteLength;
-    } else {
-      tsize = encode7(entry.node).length;
-    }
-    rootNode.Links.push({
-      Hash: entry.cid,
-      Name: entry.name,
-      Tsize: tsize
-    });
-  }
-  const oldUnixfs = UnixFS.unmarshal(parent.node.Data);
-  rootNode.Data = new UnixFS({ type: "directory", mode: oldUnixfs.mode, mtime: oldUnixfs.mtime }).marshal();
-  const block = encode7(prepare(rootNode));
-  const cid = await persist2(block, blockstore, {
-    codec: src_exports2,
-    cidVersion: parent.cid.version,
-    signal: options.signal
-  });
-  return {
-    cid,
-    node: rootNode
-  };
-};
-
-// node_modules/@helia/unixfs/dist/src/commands/rm.js
-var mergeOptions7 = merge_options_default.bind({ ignoreUndefined: true });
-var log8 = logger("helia:unixfs:rm");
-var defaultOptions6 = {
-  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
-};
-async function rm(target, name4, blockstore, options = {}) {
-  const opts = mergeOptions7(defaultOptions6, options);
-  if (name4.includes("/")) {
-    throw new InvalidParametersError("Name must not have slashes");
-  }
-  const directory = await cidToDirectory(target, blockstore, opts);
-  log8("Removing %s from %c", name4, target);
-  const result = await removeLink(directory, name4, blockstore, {
-    ...opts,
-    cidVersion: target.version
-  });
-  return result.cid;
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/stat.js
-var mergeOptions8 = merge_options_default.bind({ ignoreUndefined: true });
-var log9 = logger("helia:unixfs:stat");
-var defaultOptions7 = {};
-async function stat(cid, blockstore, options = {}) {
-  var _a;
-  const opts = mergeOptions8(defaultOptions7, options);
-  const resolved = await resolve5(cid, options.path, blockstore, opts);
-  log9("stat %c", resolved.cid);
-  const result = await exporter(resolved.cid, blockstore, opts);
-  if (result.type !== "file" && result.type !== "directory" && result.type !== "raw") {
-    throw new NotUnixFSError();
-  }
-  let fileSize = 0n;
-  let dagSize = 0n;
-  let localFileSize = 0n;
-  let localDagSize = 0n;
-  let blocks = 0;
-  let mode;
-  let mtime;
-  const type = result.type;
-  let unixfs2;
-  if (result.type === "raw") {
-    fileSize = BigInt(result.node.byteLength);
-    dagSize = BigInt(result.node.byteLength);
-    localFileSize = BigInt(result.node.byteLength);
-    localDagSize = BigInt(result.node.byteLength);
-    blocks = 1;
-  }
-  if (result.type === "directory") {
-    fileSize = 0n;
-    dagSize = BigInt(result.unixfs.marshal().byteLength);
-    localFileSize = 0n;
-    localDagSize = dagSize;
-    blocks = 1;
-    mode = result.unixfs.mode;
-    mtime = result.unixfs.mtime;
-    unixfs2 = result.unixfs;
-  }
-  if (result.type === "file") {
-    const results = await inspectDag(resolved.cid, blockstore, opts);
-    fileSize = result.unixfs.fileSize();
-    dagSize = BigInt((((_a = result.node.Data) == null ? void 0 : _a.byteLength) ?? 0) + result.node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0));
-    localFileSize = BigInt(results.localFileSize);
-    localDagSize = BigInt(results.localDagSize);
-    blocks = results.blocks;
-    mode = result.unixfs.mode;
-    mtime = result.unixfs.mtime;
-    unixfs2 = result.unixfs;
-  }
-  return {
-    cid: resolved.cid,
-    mode,
-    mtime,
-    fileSize,
-    dagSize,
-    localFileSize,
-    localDagSize,
-    blocks,
-    type,
-    unixfs: unixfs2
-  };
-}
-async function inspectDag(cid, blockstore, options) {
-  const results = {
-    localFileSize: 0,
-    localDagSize: 0,
-    blocks: 0
-  };
-  if (await blockstore.has(cid, options)) {
-    const block = await blockstore.get(cid, options);
-    results.blocks++;
-    results.localDagSize += block.byteLength;
-    if (cid.code === code3) {
-      results.localFileSize += block.byteLength;
-    } else if (cid.code === code2) {
-      const pbNode = decode11(block);
-      if (pbNode.Links.length > 0) {
-        for (const link of pbNode.Links) {
-          const linkResult = await inspectDag(link.Hash, blockstore, options);
-          results.localFileSize += linkResult.localFileSize;
-          results.localDagSize += linkResult.localDagSize;
-          results.blocks += linkResult.blocks;
-        }
-      } else {
-        if (pbNode.Data == null) {
-          throw new InvalidPBNodeError(`PBNode ${cid.toString()} had no data`);
-        }
-        const unixfs2 = UnixFS.unmarshal(pbNode.Data);
-        if (unixfs2.data == null) {
-          throw new InvalidPBNodeError(`UnixFS node ${cid.toString()} had no data`);
-        }
-        results.localFileSize += unixfs2.data.byteLength ?? 0;
-      }
-    } else {
-      throw new UnknownError(`${cid.toString()} was neither DAG_PB nor RAW`);
-    }
-  }
-  return results;
-}
-
-// node_modules/@helia/unixfs/dist/src/commands/touch.js
-var mergeOptions9 = merge_options_default.bind({ ignoreUndefined: true });
-var log10 = logger("helia:unixfs:touch");
-var defaultOptions8 = {
-  recursive: false,
-  shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD_BYTES
-};
-async function touch(cid, blockstore, options = {}) {
-  const opts = mergeOptions9(defaultOptions8, options);
-  const resolved = await resolve5(cid, opts.path, blockstore, opts);
-  const mtime = opts.mtime ?? {
-    secs: BigInt(Math.round(Date.now() / 1e3)),
-    nsecs: 0
-  };
-  log10("touch %c %o", resolved.cid, mtime);
-  if (opts.recursive) {
-    const root = await pipe(
-      async function* () {
-        for await (const entry of recursive(resolved.cid, blockstore)) {
-          let metadata2;
-          let links2;
-          if (entry.type === "raw") {
-            metadata2 = new UnixFS({ data: entry.node });
-            links2 = [];
-          } else if (entry.type === "file" || entry.type === "directory") {
-            metadata2 = entry.unixfs;
-            links2 = entry.node.Links;
-          } else {
-            throw new NotUnixFSError();
-          }
-          metadata2.mtime = mtime;
-          const node = {
-            Data: metadata2.marshal(),
-            Links: links2
-          };
-          yield {
-            path: entry.path,
-            content: node
-          };
-        }
-      },
-      // @ts-expect-error blockstore types are incompatible
-      (source) => importer(source, blockstore, {
-        ...opts,
-        dagBuilder: async function* (source2, block2) {
-          for await (const entry of source2) {
-            yield async function() {
-              const node = entry.content;
-              const buf2 = encode7(node);
-              const updatedCid2 = await persist2(buf2, block2, {
-                ...opts,
-                cidVersion: cid.version
-              });
-              if (node.Data == null) {
-                throw new InvalidPBNodeError(`${updatedCid2} had no data`);
-              }
-              const unixfs2 = UnixFS.unmarshal(node.Data);
-              return {
-                cid: updatedCid2,
-                size: BigInt(buf2.length),
-                path: entry.path,
-                unixfs: unixfs2
-              };
-            };
-          }
-        }
-      }),
-      async (nodes) => src_default7(nodes)
-    );
-    if (root == null) {
-      throw new UnknownError(`Could not chmod ${resolved.cid.toString()}`);
-    }
-    return updatePathCids(root.cid, resolved, blockstore, opts);
-  }
-  const block = await blockstore.get(resolved.cid, options);
-  let metadata;
-  let links = [];
-  if (resolved.cid.code === code3) {
-    metadata = new UnixFS({ data: block });
-  } else {
-    const node = decode11(block);
-    links = node.Links;
-    if (node.Data == null) {
-      throw new InvalidPBNodeError(`${resolved.cid.toString()} had no data`);
-    }
-    metadata = UnixFS.unmarshal(node.Data);
-  }
-  metadata.mtime = mtime;
-  const updatedBlock = encode7({
-    Data: metadata.marshal(),
-    Links: links
-  });
-  const hash = await sha256.digest(updatedBlock);
-  const updatedCid = CID2.create(resolved.cid.version, code2, hash);
-  await blockstore.put(updatedCid, updatedBlock);
-  return updatePathCids(updatedCid, resolved, blockstore, opts);
-}
-
-// node_modules/it-glob/dist/src/index.js
-import fs4 from "fs/promises";
-import path2 from "path";
-
-// node_modules/minimatch/dist/mjs/index.js
-var import_brace_expansion = __toESM(require_brace_expansion(), 1);
-
-// node_modules/minimatch/dist/mjs/assert-valid-pattern.js
-var MAX_PATTERN_LENGTH = 1024 * 64;
-var assertValidPattern = (pattern) => {
-  if (typeof pattern !== "string") {
-    throw new TypeError("invalid pattern");
-  }
-  if (pattern.length > MAX_PATTERN_LENGTH) {
-    throw new TypeError("pattern is too long");
-  }
-};
-
-// node_modules/minimatch/dist/mjs/brace-expressions.js
-var posixClasses = {
-  "[:alnum:]": ["\\p{L}\\p{Nl}\\p{Nd}", true],
-  "[:alpha:]": ["\\p{L}\\p{Nl}", true],
-  "[:ascii:]": ["\\x00-\\x7f", false],
-  "[:blank:]": ["\\p{Zs}\\t", true],
-  "[:cntrl:]": ["\\p{Cc}", true],
-  "[:digit:]": ["\\p{Nd}", true],
-  "[:graph:]": ["\\p{Z}\\p{C}", true, true],
-  "[:lower:]": ["\\p{Ll}", true],
-  "[:print:]": ["\\p{C}", true],
-  "[:punct:]": ["\\p{P}", true],
-  "[:space:]": ["\\p{Z}\\t\\r\\n\\v\\f", true],
-  "[:upper:]": ["\\p{Lu}", true],
-  "[:word:]": ["\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}", true],
-  "[:xdigit:]": ["A-Fa-f0-9", false]
-};
-var braceEscape = (s) => s.replace(/[[\]\\-]/g, "\\$&");
-var regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
-var rangesToString = (ranges) => ranges.join("");
-var parseClass = (glob2, position) => {
-  const pos = position;
-  if (glob2.charAt(pos) !== "[") {
-    throw new Error("not in a brace expression");
-  }
-  const ranges = [];
-  const negs = [];
-  let i = pos + 1;
-  let sawStart = false;
-  let uflag = false;
-  let escaping = false;
-  let negate = false;
-  let endPos = pos;
-  let rangeStart = "";
-  WHILE:
-    while (i < glob2.length) {
-      const c = glob2.charAt(i);
-      if ((c === "!" || c === "^") && i === pos + 1) {
-        negate = true;
-        i++;
-        continue;
-      }
-      if (c === "]" && sawStart && !escaping) {
-        endPos = i + 1;
-        break;
-      }
-      sawStart = true;
-      if (c === "\\") {
-        if (!escaping) {
-          escaping = true;
-          i++;
-          continue;
-        }
-      }
-      if (c === "[" && !escaping) {
-        for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
-          if (glob2.startsWith(cls, i)) {
-            if (rangeStart) {
-              return ["$.", false, glob2.length - pos, true];
-            }
-            i += cls.length;
-            if (neg)
-              negs.push(unip);
-            else
-              ranges.push(unip);
-            uflag = uflag || u;
-            continue WHILE;
-          }
-        }
-      }
-      escaping = false;
-      if (rangeStart) {
-        if (c > rangeStart) {
-          ranges.push(braceEscape(rangeStart) + "-" + braceEscape(c));
-        } else if (c === rangeStart) {
-          ranges.push(braceEscape(c));
-        }
-        rangeStart = "";
-        i++;
-        continue;
-      }
-      if (glob2.startsWith("-]", i + 1)) {
-        ranges.push(braceEscape(c + "-"));
-        i += 2;
-        continue;
-      }
-      if (glob2.startsWith("-", i + 1)) {
-        rangeStart = c;
-        i += 2;
-        continue;
-      }
-      ranges.push(braceEscape(c));
-      i++;
-    }
-  if (endPos < i) {
-    return ["", false, 0, false];
-  }
-  if (!ranges.length && !negs.length) {
-    return ["$.", false, glob2.length - pos, true];
-  }
-  if (negs.length === 0 && ranges.length === 1 && /^\\?.$/.test(ranges[0]) && !negate) {
-    const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
-    return [regexpEscape(r), false, endPos - pos, false];
-  }
-  const sranges = "[" + (negate ? "^" : "") + rangesToString(ranges) + "]";
-  const snegs = "[" + (negate ? "" : "^") + rangesToString(negs) + "]";
-  const comb = ranges.length && negs.length ? "(" + sranges + "|" + snegs + ")" : ranges.length ? sranges : snegs;
-  return [comb, uflag, endPos - pos, true];
-};
-
-// node_modules/minimatch/dist/mjs/unescape.js
-var unescape = (s, { windowsPathsNoEscape = false } = {}) => {
-  return windowsPathsNoEscape ? s.replace(/\[([^\/\\])\]/g, "$1") : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, "$1$2").replace(/\\([^\/])/g, "$1");
-};
-
-// node_modules/minimatch/dist/mjs/ast.js
-var types2 = /* @__PURE__ */ new Set(["!", "?", "+", "*", "@"]);
-var isExtglobType = (c) => types2.has(c);
-var startNoTraversal = "(?!(?:^|/)\\.\\.?(?:$|/))";
-var startNoDot = "(?!\\.)";
-var addPatternStart = /* @__PURE__ */ new Set(["[", "."]);
-var justDots = /* @__PURE__ */ new Set(["..", "."]);
-var reSpecials = new Set("().*{}+?[]^$\\!");
-var regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
-var qmark = "[^/]";
-var star = qmark + "*?";
-var starNoEmpty = qmark + "+?";
-var AST = class _AST {
-  type;
-  #root;
-  #hasMagic;
-  #uflag = false;
-  #parts = [];
-  #parent;
-  #parentIndex;
-  #negs;
-  #filledNegs = false;
-  #options;
-  #toString;
-  // set to true if it's an extglob with no children
-  // (which really means one child of '')
-  #emptyExt = false;
-  constructor(type, parent, options = {}) {
-    this.type = type;
-    if (type)
-      this.#hasMagic = true;
-    this.#parent = parent;
-    this.#root = this.#parent ? this.#parent.#root : this;
-    this.#options = this.#root === this ? options : this.#root.#options;
-    this.#negs = this.#root === this ? [] : this.#root.#negs;
-    if (type === "!" && !this.#root.#filledNegs)
-      this.#negs.push(this);
-    this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
-  }
-  get hasMagic() {
-    if (this.#hasMagic !== void 0)
-      return this.#hasMagic;
-    for (const p of this.#parts) {
-      if (typeof p === "string")
-        continue;
-      if (p.type || p.hasMagic)
-        return this.#hasMagic = true;
-    }
-    return this.#hasMagic;
-  }
-  // reconstructs the pattern
-  toString() {
-    if (this.#toString !== void 0)
-      return this.#toString;
-    if (!this.type) {
-      return this.#toString = this.#parts.map((p) => String(p)).join("");
-    } else {
-      return this.#toString = this.type + "(" + this.#parts.map((p) => String(p)).join("|") + ")";
-    }
-  }
-  #fillNegs() {
-    if (this !== this.#root)
-      throw new Error("should only call on root");
-    if (this.#filledNegs)
-      return this;
-    this.toString();
-    this.#filledNegs = true;
-    let n;
-    while (n = this.#negs.pop()) {
-      if (n.type !== "!")
-        continue;
-      let p = n;
-      let pp = p.#parent;
-      while (pp) {
-        for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
-          for (const part of n.#parts) {
-            if (typeof part === "string") {
-              throw new Error("string part in extglob AST??");
-            }
-            part.copyIn(pp.#parts[i]);
-          }
-        }
-        p = pp;
-        pp = p.#parent;
-      }
-    }
-    return this;
-  }
-  push(...parts) {
-    for (const p of parts) {
-      if (p === "")
-        continue;
-      if (typeof p !== "string" && !(p instanceof _AST && p.#parent === this)) {
-        throw new Error("invalid part: " + p);
-      }
-      this.#parts.push(p);
-    }
-  }
-  toJSON() {
-    var _a;
-    const ret = this.type === null ? this.#parts.slice().map((p) => typeof p === "string" ? p : p.toJSON()) : [this.type, ...this.#parts.map((p) => p.toJSON())];
-    if (this.isStart() && !this.type)
-      ret.unshift([]);
-    if (this.isEnd() && (this === this.#root || this.#root.#filledNegs && ((_a = this.#parent) == null ? void 0 : _a.type) === "!")) {
-      ret.push({});
-    }
-    return ret;
-  }
-  isStart() {
-    var _a;
-    if (this.#root === this)
-      return true;
-    if (!((_a = this.#parent) == null ? void 0 : _a.isStart()))
-      return false;
-    if (this.#parentIndex === 0)
-      return true;
-    const p = this.#parent;
-    for (let i = 0; i < this.#parentIndex; i++) {
-      const pp = p.#parts[i];
-      if (!(pp instanceof _AST && pp.type === "!")) {
-        return false;
-      }
-    }
-    return true;
-  }
-  isEnd() {
-    var _a, _b, _c;
-    if (this.#root === this)
-      return true;
-    if (((_a = this.#parent) == null ? void 0 : _a.type) === "!")
-      return true;
-    if (!((_b = this.#parent) == null ? void 0 : _b.isEnd()))
-      return false;
-    if (!this.type)
-      return (_c = this.#parent) == null ? void 0 : _c.isEnd();
-    const pl = this.#parent ? this.#parent.#parts.length : 0;
-    return this.#parentIndex === pl - 1;
-  }
-  copyIn(part) {
-    if (typeof part === "string")
-      this.push(part);
-    else
-      this.push(part.clone(this));
-  }
-  clone(parent) {
-    const c = new _AST(this.type, parent);
-    for (const p of this.#parts) {
-      c.copyIn(p);
-    }
-    return c;
-  }
-  static #parseAST(str, ast, pos, opt) {
-    let escaping = false;
-    let inBrace = false;
-    let braceStart = -1;
-    let braceNeg = false;
-    if (ast.type === null) {
-      let i2 = pos;
-      let acc2 = "";
-      while (i2 < str.length) {
-        const c = str.charAt(i2++);
-        if (escaping || c === "\\") {
-          escaping = !escaping;
-          acc2 += c;
-          continue;
-        }
-        if (inBrace) {
-          if (i2 === braceStart + 1) {
-            if (c === "^" || c === "!") {
-              braceNeg = true;
-            }
-          } else if (c === "]" && !(i2 === braceStart + 2 && braceNeg)) {
-            inBrace = false;
-          }
-          acc2 += c;
-          continue;
-        } else if (c === "[") {
-          inBrace = true;
-          braceStart = i2;
-          braceNeg = false;
-          acc2 += c;
-          continue;
-        }
-        if (!opt.noext && isExtglobType(c) && str.charAt(i2) === "(") {
-          ast.push(acc2);
-          acc2 = "";
-          const ext2 = new _AST(c, ast);
-          i2 = _AST.#parseAST(str, ext2, i2, opt);
-          ast.push(ext2);
-          continue;
-        }
-        acc2 += c;
-      }
-      ast.push(acc2);
-      return i2;
-    }
-    let i = pos + 1;
-    let part = new _AST(null, ast);
-    const parts = [];
-    let acc = "";
-    while (i < str.length) {
-      const c = str.charAt(i++);
-      if (escaping || c === "\\") {
-        escaping = !escaping;
-        acc += c;
-        continue;
-      }
-      if (inBrace) {
-        if (i === braceStart + 1) {
-          if (c === "^" || c === "!") {
-            braceNeg = true;
-          }
-        } else if (c === "]" && !(i === braceStart + 2 && braceNeg)) {
-          inBrace = false;
-        }
-        acc += c;
-        continue;
-      } else if (c === "[") {
-        inBrace = true;
-        braceStart = i;
-        braceNeg = false;
-        acc += c;
-        continue;
-      }
-      if (isExtglobType(c) && str.charAt(i) === "(") {
-        part.push(acc);
-        acc = "";
-        const ext2 = new _AST(c, part);
-        part.push(ext2);
-        i = _AST.#parseAST(str, ext2, i, opt);
-        continue;
-      }
-      if (c === "|") {
-        part.push(acc);
-        acc = "";
-        parts.push(part);
-        part = new _AST(null, ast);
-        continue;
-      }
-      if (c === ")") {
-        if (acc === "" && ast.#parts.length === 0) {
-          ast.#emptyExt = true;
-        }
-        part.push(acc);
-        acc = "";
-        ast.push(...parts, part);
-        return i;
-      }
-      acc += c;
-    }
-    ast.type = null;
-    ast.#hasMagic = void 0;
-    ast.#parts = [str.substring(pos - 1)];
-    return i;
-  }
-  static fromGlob(pattern, options = {}) {
-    const ast = new _AST(null, void 0, options);
-    _AST.#parseAST(pattern, ast, 0, options);
-    return ast;
-  }
-  // returns the regular expression if there's magic, or the unescaped
-  // string if not.
-  toMMPattern() {
-    if (this !== this.#root)
-      return this.#root.toMMPattern();
-    const glob2 = this.toString();
-    const [re, body, hasMagic, uflag] = this.toRegExpSource();
-    const anyMagic = hasMagic || this.#hasMagic || this.#options.nocase && !this.#options.nocaseMagicOnly && glob2.toUpperCase() !== glob2.toLowerCase();
-    if (!anyMagic) {
-      return body;
-    }
-    const flags = (this.#options.nocase ? "i" : "") + (uflag ? "u" : "");
-    return Object.assign(new RegExp(`^${re}$`, flags), {
-      _src: re,
-      _glob: glob2
-    });
-  }
-  // returns the string match, the regexp source, whether there's magic
-  // in the regexp (so a regular expression is required) and whether or
-  // not the uflag is needed for the regular expression (for posix classes)
-  // TODO: instead of injecting the start/end at this point, just return
-  // the BODY of the regexp, along with the start/end portions suitable
-  // for binding the start/end in either a joined full-path makeRe context
-  // (where we bind to (^|/), or a standalone matchPart context (where
-  // we bind to ^, and not /).  Otherwise slashes get duped!
-  //
-  // In part-matching mode, the start is:
-  // - if not isStart: nothing
-  // - if traversal possible, but not allowed: ^(?!\.\.?$)
-  // - if dots allowed or not possible: ^
-  // - if dots possible and not allowed: ^(?!\.)
-  // end is:
-  // - if not isEnd(): nothing
-  // - else: $
-  //
-  // In full-path matching mode, we put the slash at the START of the
-  // pattern, so start is:
-  // - if first pattern: same as part-matching mode
-  // - if not isStart(): nothing
-  // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
-  // - if dots allowed or not possible: /
-  // - if dots possible and not allowed: /(?!\.)
-  // end is:
-  // - if last pattern, same as part-matching mode
-  // - else nothing
-  //
-  // Always put the (?:$|/) on negated tails, though, because that has to be
-  // there to bind the end of the negated pattern portion, and it's easier to
-  // just stick it in now rather than try to inject it later in the middle of
-  // the pattern.
-  //
-  // We can just always return the same end, and leave it up to the caller
-  // to know whether it's going to be used joined or in parts.
-  // And, if the start is adjusted slightly, can do the same there:
-  // - if not isStart: nothing
-  // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
-  // - if dots allowed or not possible: (?:/|^)
-  // - if dots possible and not allowed: (?:/|^)(?!\.)
-  //
-  // But it's better to have a simpler binding without a conditional, for
-  // performance, so probably better to return both start options.
-  //
-  // Then the caller just ignores the end if it's not the first pattern,
-  // and the start always gets applied.
-  //
-  // But that's always going to be $ if it's the ending pattern, or nothing,
-  // so the caller can just attach $ at the end of the pattern when building.
-  //
-  // So the todo is:
-  // - better detect what kind of start is needed
-  // - return both flavors of starting pattern
-  // - attach $ at the end of the pattern when creating the actual RegExp
-  //
-  // Ah, but wait, no, that all only applies to the root when the first pattern
-  // is not an extglob. If the first pattern IS an extglob, then we need all
-  // that dot prevention biz to live in the extglob portions, because eg
-  // +(*|.x*) can match .xy but not .yx.
-  //
-  // So, return the two flavors if it's #root and the first child is not an
-  // AST, otherwise leave it to the child AST to handle it, and there,
-  // use the (?:^|/) style of start binding.
-  //
-  // Even simplified further:
-  // - Since the start for a join is eg /(?!\.) and the start for a part
-  // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
-  // or start or whatever) and prepend ^ or / at the Regexp construction.
-  toRegExpSource(allowDot) {
-    var _a;
-    const dot = allowDot ?? !!this.#options.dot;
-    if (this.#root === this)
-      this.#fillNegs();
-    if (!this.type) {
-      const noEmpty = this.isStart() && this.isEnd();
-      const src3 = this.#parts.map((p) => {
-        const [re, _, hasMagic, uflag] = typeof p === "string" ? _AST.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot);
-        this.#hasMagic = this.#hasMagic || hasMagic;
-        this.#uflag = this.#uflag || uflag;
-        return re;
-      }).join("");
-      let start2 = "";
-      if (this.isStart()) {
-        if (typeof this.#parts[0] === "string") {
-          const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
-          if (!dotTravAllowed) {
-            const aps = addPatternStart;
-            const needNoTrav = (
-              // dots are allowed, and the pattern starts with [ or .
-              dot && aps.has(src3.charAt(0)) || // the pattern starts with \., and then [ or .
-              src3.startsWith("\\.") && aps.has(src3.charAt(2)) || // the pattern starts with \.\., and then [ or .
-              src3.startsWith("\\.\\.") && aps.has(src3.charAt(4))
-            );
-            const needNoDot = !dot && !allowDot && aps.has(src3.charAt(0));
-            start2 = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : "";
-          }
-        }
-      }
-      let end = "";
-      if (this.isEnd() && this.#root.#filledNegs && ((_a = this.#parent) == null ? void 0 : _a.type) === "!") {
-        end = "(?:$|\\/)";
-      }
-      const final2 = start2 + src3 + end;
-      return [
-        final2,
-        unescape(src3),
-        this.#hasMagic = !!this.#hasMagic,
-        this.#uflag
-      ];
-    }
-    const repeated = this.type === "*" || this.type === "+";
-    const start = this.type === "!" ? "(?:(?!(?:" : "(?:";
-    let body = this.#partsToRegExp(dot);
-    if (this.isStart() && this.isEnd() && !body && this.type !== "!") {
-      const s = this.toString();
-      this.#parts = [s];
-      this.type = null;
-      this.#hasMagic = void 0;
-      return [s, unescape(this.toString()), false, false];
-    }
-    let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot ? "" : this.#partsToRegExp(true);
-    if (bodyDotAllowed === body) {
-      bodyDotAllowed = "";
-    }
-    if (bodyDotAllowed) {
-      body = `(?:${body})(?:${bodyDotAllowed})*?`;
-    }
-    let final = "";
-    if (this.type === "!" && this.#emptyExt) {
-      final = (this.isStart() && !dot ? startNoDot : "") + starNoEmpty;
-    } else {
-      const close = this.type === "!" ? (
-        // !() must match something,but !(x) can match ''
-        "))" + (this.isStart() && !dot && !allowDot ? startNoDot : "") + star + ")"
-      ) : this.type === "@" ? ")" : this.type === "?" ? ")?" : this.type === "+" && bodyDotAllowed ? ")" : this.type === "*" && bodyDotAllowed ? `)?` : `)${this.type}`;
-      final = start + body + close;
-    }
-    return [
-      final,
-      unescape(body),
-      this.#hasMagic = !!this.#hasMagic,
-      this.#uflag
-    ];
-  }
-  #partsToRegExp(dot) {
-    return this.#parts.map((p) => {
-      if (typeof p === "string") {
-        throw new Error("string type in extglob ast??");
-      }
-      const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
-      this.#uflag = this.#uflag || uflag;
-      return re;
-    }).filter((p) => !(this.isStart() && this.isEnd()) || !!p).join("|");
-  }
-  static #parseGlob(glob2, hasMagic, noEmpty = false) {
-    let escaping = false;
-    let re = "";
-    let uflag = false;
-    for (let i = 0; i < glob2.length; i++) {
-      const c = glob2.charAt(i);
-      if (escaping) {
-        escaping = false;
-        re += (reSpecials.has(c) ? "\\" : "") + c;
-        continue;
-      }
-      if (c === "\\") {
-        if (i === glob2.length - 1) {
-          re += "\\\\";
-        } else {
-          escaping = true;
-        }
-        continue;
-      }
-      if (c === "[") {
-        const [src3, needUflag, consumed, magic] = parseClass(glob2, i);
-        if (consumed) {
-          re += src3;
-          uflag = uflag || needUflag;
-          i += consumed - 1;
-          hasMagic = hasMagic || magic;
-          continue;
-        }
-      }
-      if (c === "*") {
-        if (noEmpty && glob2 === "*")
-          re += starNoEmpty;
-        else
-          re += star;
-        hasMagic = true;
-        continue;
-      }
-      if (c === "?") {
-        re += qmark;
-        hasMagic = true;
-        continue;
-      }
-      re += regExpEscape(c);
-    }
-    return [re, unescape(glob2), !!hasMagic, uflag];
-  }
-};
-
-// node_modules/minimatch/dist/mjs/escape.js
-var escape = (s, { windowsPathsNoEscape = false } = {}) => {
-  return windowsPathsNoEscape ? s.replace(/[?*()[\]]/g, "[$&]") : s.replace(/[?*()[\]\\]/g, "\\$&");
-};
-
-// node_modules/minimatch/dist/mjs/index.js
-var minimatch = (p, pattern, options = {}) => {
-  assertValidPattern(pattern);
-  if (!options.nocomment && pattern.charAt(0) === "#") {
-    return false;
-  }
-  return new Minimatch(pattern, options).match(p);
-};
-var starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
-var starDotExtTest = (ext2) => (f) => !f.startsWith(".") && f.endsWith(ext2);
-var starDotExtTestDot = (ext2) => (f) => f.endsWith(ext2);
-var starDotExtTestNocase = (ext2) => {
-  ext2 = ext2.toLowerCase();
-  return (f) => !f.startsWith(".") && f.toLowerCase().endsWith(ext2);
-};
-var starDotExtTestNocaseDot = (ext2) => {
-  ext2 = ext2.toLowerCase();
-  return (f) => f.toLowerCase().endsWith(ext2);
-};
-var starDotStarRE = /^\*+\.\*+$/;
-var starDotStarTest = (f) => !f.startsWith(".") && f.includes(".");
-var starDotStarTestDot = (f) => f !== "." && f !== ".." && f.includes(".");
-var dotStarRE = /^\.\*+$/;
-var dotStarTest = (f) => f !== "." && f !== ".." && f.startsWith(".");
-var starRE = /^\*+$/;
-var starTest = (f) => f.length !== 0 && !f.startsWith(".");
-var starTestDot = (f) => f.length !== 0 && f !== "." && f !== "..";
-var qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
-var qmarksTestNocase = ([$0, ext2 = ""]) => {
-  const noext = qmarksTestNoExt([$0]);
-  if (!ext2)
-    return noext;
-  ext2 = ext2.toLowerCase();
-  return (f) => noext(f) && f.toLowerCase().endsWith(ext2);
-};
-var qmarksTestNocaseDot = ([$0, ext2 = ""]) => {
-  const noext = qmarksTestNoExtDot([$0]);
-  if (!ext2)
-    return noext;
-  ext2 = ext2.toLowerCase();
-  return (f) => noext(f) && f.toLowerCase().endsWith(ext2);
-};
-var qmarksTestDot = ([$0, ext2 = ""]) => {
-  const noext = qmarksTestNoExtDot([$0]);
-  return !ext2 ? noext : (f) => noext(f) && f.endsWith(ext2);
-};
-var qmarksTest = ([$0, ext2 = ""]) => {
-  const noext = qmarksTestNoExt([$0]);
-  return !ext2 ? noext : (f) => noext(f) && f.endsWith(ext2);
-};
-var qmarksTestNoExt = ([$0]) => {
-  const len = $0.length;
-  return (f) => f.length === len && !f.startsWith(".");
-};
-var qmarksTestNoExtDot = ([$0]) => {
-  const len = $0.length;
-  return (f) => f.length === len && f !== "." && f !== "..";
-};
-var defaultPlatform = typeof process === "object" && process ? typeof process.env === "object" && process.env && process.env.__MINIMATCH_TESTING_PLATFORM__ || process.platform : "posix";
-var path = {
-  win32: { sep: "\\" },
-  posix: { sep: "/" }
-};
-var sep = defaultPlatform === "win32" ? path.win32.sep : path.posix.sep;
-minimatch.sep = sep;
-var GLOBSTAR = Symbol("globstar **");
-minimatch.GLOBSTAR = GLOBSTAR;
-var qmark2 = "[^/]";
-var star2 = qmark2 + "*?";
-var twoStarDot = "(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?";
-var twoStarNoDot = "(?:(?!(?:\\/|^)\\.).)*?";
-var filter2 = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
-minimatch.filter = filter2;
-var ext = (a, b = {}) => Object.assign({}, a, b);
-var defaults = (def) => {
-  if (!def || typeof def !== "object" || !Object.keys(def).length) {
-    return minimatch;
-  }
-  const orig = minimatch;
-  const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
-  return Object.assign(m, {
-    Minimatch: class Minimatch extends orig.Minimatch {
-      constructor(pattern, options = {}) {
-        super(pattern, ext(def, options));
-      }
-      static defaults(options) {
-        return orig.defaults(ext(def, options)).Minimatch;
-      }
-    },
-    AST: class AST extends orig.AST {
-      /* c8 ignore start */
-      constructor(type, parent, options = {}) {
-        super(type, parent, ext(def, options));
-      }
-      /* c8 ignore stop */
-      static fromGlob(pattern, options = {}) {
-        return orig.AST.fromGlob(pattern, ext(def, options));
-      }
-    },
-    unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
-    escape: (s, options = {}) => orig.escape(s, ext(def, options)),
-    filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
-    defaults: (options) => orig.defaults(ext(def, options)),
-    makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
-    braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
-    match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
-    sep: orig.sep,
-    GLOBSTAR
-  });
-};
-minimatch.defaults = defaults;
-var braceExpand = (pattern, options = {}) => {
-  assertValidPattern(pattern);
-  if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
-    return [pattern];
-  }
-  return (0, import_brace_expansion.default)(pattern);
-};
-minimatch.braceExpand = braceExpand;
-var makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
-minimatch.makeRe = makeRe;
-var match = (list, pattern, options = {}) => {
-  const mm = new Minimatch(pattern, options);
-  list = list.filter((f) => mm.match(f));
-  if (mm.options.nonull && !list.length) {
-    list.push(pattern);
-  }
-  return list;
-};
-minimatch.match = match;
-var globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
-var regExpEscape2 = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
-var Minimatch = class {
-  options;
-  set;
-  pattern;
-  windowsPathsNoEscape;
-  nonegate;
-  negate;
-  comment;
-  empty;
-  preserveMultipleSlashes;
-  partial;
-  globSet;
-  globParts;
-  nocase;
-  isWindows;
-  platform;
-  windowsNoMagicRoot;
-  regexp;
-  constructor(pattern, options = {}) {
-    assertValidPattern(pattern);
-    options = options || {};
-    this.options = options;
-    this.pattern = pattern;
-    this.platform = options.platform || defaultPlatform;
-    this.isWindows = this.platform === "win32";
-    this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
-    if (this.windowsPathsNoEscape) {
-      this.pattern = this.pattern.replace(/\\/g, "/");
-    }
-    this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
-    this.regexp = null;
-    this.negate = false;
-    this.nonegate = !!options.nonegate;
-    this.comment = false;
-    this.empty = false;
-    this.partial = !!options.partial;
-    this.nocase = !!this.options.nocase;
-    this.windowsNoMagicRoot = options.windowsNoMagicRoot !== void 0 ? options.windowsNoMagicRoot : !!(this.isWindows && this.nocase);
-    this.globSet = [];
-    this.globParts = [];
-    this.set = [];
-    this.make();
-  }
-  hasMagic() {
-    if (this.options.magicalBraces && this.set.length > 1) {
-      return true;
-    }
-    for (const pattern of this.set) {
-      for (const part of pattern) {
-        if (typeof part !== "string")
-          return true;
-      }
-    }
-    return false;
-  }
-  debug(..._) {
-  }
-  make() {
-    const pattern = this.pattern;
-    const options = this.options;
-    if (!options.nocomment && pattern.charAt(0) === "#") {
-      this.comment = true;
-      return;
-    }
-    if (!pattern) {
-      this.empty = true;
-      return;
-    }
-    this.parseNegate();
-    this.globSet = [...new Set(this.braceExpand())];
-    if (options.debug) {
-      this.debug = (...args) => console.error(...args);
-    }
-    this.debug(this.pattern, this.globSet);
-    const rawGlobParts = this.globSet.map((s) => this.slashSplit(s));
-    this.globParts = this.preprocess(rawGlobParts);
-    this.debug(this.pattern, this.globParts);
-    let set = this.globParts.map((s, _, __) => {
-      if (this.isWindows && this.windowsNoMagicRoot) {
-        const isUNC = s[0] === "" && s[1] === "" && (s[2] === "?" || !globMagic.test(s[2])) && !globMagic.test(s[3]);
-        const isDrive = /^[a-z]:/i.test(s[0]);
-        if (isUNC) {
-          return [...s.slice(0, 4), ...s.slice(4).map((ss) => this.parse(ss))];
-        } else if (isDrive) {
-          return [s[0], ...s.slice(1).map((ss) => this.parse(ss))];
-        }
-      }
-      return s.map((ss) => this.parse(ss));
-    });
-    this.debug(this.pattern, set);
-    this.set = set.filter((s) => s.indexOf(false) === -1);
-    if (this.isWindows) {
-      for (let i = 0; i < this.set.length; i++) {
-        const p = this.set[i];
-        if (p[0] === "" && p[1] === "" && this.globParts[i][2] === "?" && typeof p[3] === "string" && /^[a-z]:$/i.test(p[3])) {
-          p[2] = "?";
-        }
-      }
-    }
-    this.debug(this.pattern, this.set);
-  }
-  // various transforms to equivalent pattern sets that are
-  // faster to process in a filesystem walk.  The goal is to
-  // eliminate what we can, and push all ** patterns as far
-  // to the right as possible, even if it increases the number
-  // of patterns that we have to process.
-  preprocess(globParts) {
-    if (this.options.noglobstar) {
-      for (let i = 0; i < globParts.length; i++) {
-        for (let j = 0; j < globParts[i].length; j++) {
-          if (globParts[i][j] === "**") {
-            globParts[i][j] = "*";
-          }
-        }
-      }
-    }
-    const { optimizationLevel = 1 } = this.options;
-    if (optimizationLevel >= 2) {
-      globParts = this.firstPhasePreProcess(globParts);
-      globParts = this.secondPhasePreProcess(globParts);
-    } else if (optimizationLevel >= 1) {
-      globParts = this.levelOneOptimize(globParts);
-    } else {
-      globParts = this.adjascentGlobstarOptimize(globParts);
-    }
-    return globParts;
-  }
-  // just get rid of adjascent ** portions
-  adjascentGlobstarOptimize(globParts) {
-    return globParts.map((parts) => {
-      let gs = -1;
-      while (-1 !== (gs = parts.indexOf("**", gs + 1))) {
-        let i = gs;
-        while (parts[i + 1] === "**") {
-          i++;
-        }
-        if (i !== gs) {
-          parts.splice(gs, i - gs);
-        }
-      }
-      return parts;
-    });
-  }
-  // get rid of adjascent ** and resolve .. portions
-  levelOneOptimize(globParts) {
-    return globParts.map((parts) => {
-      parts = parts.reduce((set, part) => {
-        const prev = set[set.length - 1];
-        if (part === "**" && prev === "**") {
-          return set;
-        }
-        if (part === "..") {
-          if (prev && prev !== ".." && prev !== "." && prev !== "**") {
-            set.pop();
-            return set;
-          }
-        }
-        set.push(part);
-        return set;
-      }, []);
-      return parts.length === 0 ? [""] : parts;
-    });
-  }
-  levelTwoFileOptimize(parts) {
-    if (!Array.isArray(parts)) {
-      parts = this.slashSplit(parts);
-    }
-    let didSomething = false;
-    do {
-      didSomething = false;
-      if (!this.preserveMultipleSlashes) {
-        for (let i = 1; i < parts.length - 1; i++) {
-          const p = parts[i];
-          if (i === 1 && p === "" && parts[0] === "")
-            continue;
-          if (p === "." || p === "") {
-            didSomething = true;
-            parts.splice(i, 1);
-            i--;
-          }
-        }
-        if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
-          didSomething = true;
-          parts.pop();
-        }
-      }
-      let dd = 0;
-      while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
-        const p = parts[dd - 1];
-        if (p && p !== "." && p !== ".." && p !== "**") {
-          didSomething = true;
-          parts.splice(dd - 1, 2);
-          dd -= 2;
-        }
-      }
-    } while (didSomething);
-    return parts.length === 0 ? [""] : parts;
-  }
-  // First phase: single-pattern processing
-  // 
 is 1 or more portions
-  //  is 1 or more portions
-  // 

is any portion other than ., .., '', or ** - // is . or '' - // - // **/.. is *brutal* for filesystem walking performance, because - // it effectively resets the recursive walk each time it occurs, - // and ** cannot be reduced out by a .. pattern part like a regexp - // or most strings (other than .., ., and '') can be. - // - //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - //

// -> 
/
-  // 
/

/../ ->

/
-  // **/**/ -> **/
-  //
-  // **/*/ -> */**/ <== not valid because ** doesn't follow
-  // this WOULD be allowed if ** did follow symlinks, or * didn't
-  firstPhasePreProcess(globParts) {
-    let didSomething = false;
-    do {
-      didSomething = false;
-      for (let parts of globParts) {
-        let gs = -1;
-        while (-1 !== (gs = parts.indexOf("**", gs + 1))) {
-          let gss = gs;
-          while (parts[gss + 1] === "**") {
-            gss++;
-          }
-          if (gss > gs) {
-            parts.splice(gs + 1, gss - gs);
-          }
-          let next = parts[gs + 1];
-          const p = parts[gs + 2];
-          const p2 = parts[gs + 3];
-          if (next !== "..")
-            continue;
-          if (!p || p === "." || p === ".." || !p2 || p2 === "." || p2 === "..") {
-            continue;
-          }
-          didSomething = true;
-          parts.splice(gs, 1);
-          const other = parts.slice(0);
-          other[gs] = "**";
-          globParts.push(other);
-          gs--;
-        }
-        if (!this.preserveMultipleSlashes) {
-          for (let i = 1; i < parts.length - 1; i++) {
-            const p = parts[i];
-            if (i === 1 && p === "" && parts[0] === "")
-              continue;
-            if (p === "." || p === "") {
-              didSomething = true;
-              parts.splice(i, 1);
-              i--;
-            }
-          }
-          if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
-            didSomething = true;
-            parts.pop();
-          }
-        }
-        let dd = 0;
-        while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
-          const p = parts[dd - 1];
-          if (p && p !== "." && p !== ".." && p !== "**") {
-            didSomething = true;
-            const needDot = dd === 1 && parts[dd + 1] === "**";
-            const splin = needDot ? ["."] : [];
-            parts.splice(dd - 1, 2, ...splin);
-            if (parts.length === 0)
-              parts.push("");
-            dd -= 2;
-          }
-        }
-      }
-    } while (didSomething);
-    return globParts;
-  }
-  // second phase: multi-pattern dedupes
-  // {
/*/,
/

/} ->

/*/
-  // {
/,
/} -> 
/
-  // {
/**/,
/} -> 
/**/
-  //
-  // {
/**/,
/**/

/} ->

/**/
-  // ^-- not valid because ** doens't follow symlinks
-  secondPhasePreProcess(globParts) {
-    for (let i = 0; i < globParts.length - 1; i++) {
-      for (let j = i + 1; j < globParts.length; j++) {
-        const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
-        if (!matched)
-          continue;
-        globParts[i] = matched;
-        globParts[j] = [];
-      }
-    }
-    return globParts.filter((gs) => gs.length);
-  }
-  partsMatch(a, b, emptyGSMatch = false) {
-    let ai = 0;
-    let bi = 0;
-    let result = [];
-    let which = "";
-    while (ai < a.length && bi < b.length) {
-      if (a[ai] === b[bi]) {
-        result.push(which === "b" ? b[bi] : a[ai]);
-        ai++;
-        bi++;
-      } else if (emptyGSMatch && a[ai] === "**" && b[bi] === a[ai + 1]) {
-        result.push(a[ai]);
-        ai++;
-      } else if (emptyGSMatch && b[bi] === "**" && a[ai] === b[bi + 1]) {
-        result.push(b[bi]);
-        bi++;
-      } else if (a[ai] === "*" && b[bi] && (this.options.dot || !b[bi].startsWith(".")) && b[bi] !== "**") {
-        if (which === "b")
-          return false;
-        which = "a";
-        result.push(a[ai]);
-        ai++;
-        bi++;
-      } else if (b[bi] === "*" && a[ai] && (this.options.dot || !a[ai].startsWith(".")) && a[ai] !== "**") {
-        if (which === "a")
-          return false;
-        which = "b";
-        result.push(b[bi]);
-        ai++;
-        bi++;
-      } else {
-        return false;
-      }
-    }
-    return a.length === b.length && result;
-  }
-  parseNegate() {
-    if (this.nonegate)
-      return;
-    const pattern = this.pattern;
-    let negate = false;
-    let negateOffset = 0;
-    for (let i = 0; i < pattern.length && pattern.charAt(i) === "!"; i++) {
-      negate = !negate;
-      negateOffset++;
-    }
-    if (negateOffset)
-      this.pattern = pattern.slice(negateOffset);
-    this.negate = negate;
-  }
-  // set partial to true to test if, for example,
-  // "/a/b" matches the start of "/*/b/*/d"
-  // Partial means, if you run out of file before you run
-  // out of pattern, then that's fine, as long as all
-  // the parts match.
-  matchOne(file, pattern, partial = false) {
-    const options = this.options;
-    if (this.isWindows) {
-      const fileDrive = typeof file[0] === "string" && /^[a-z]:$/i.test(file[0]);
-      const fileUNC = !fileDrive && file[0] === "" && file[1] === "" && file[2] === "?" && /^[a-z]:$/i.test(file[3]);
-      const patternDrive = typeof pattern[0] === "string" && /^[a-z]:$/i.test(pattern[0]);
-      const patternUNC = !patternDrive && pattern[0] === "" && pattern[1] === "" && pattern[2] === "?" && typeof pattern[3] === "string" && /^[a-z]:$/i.test(pattern[3]);
-      const fdi = fileUNC ? 3 : fileDrive ? 0 : void 0;
-      const pdi = patternUNC ? 3 : patternDrive ? 0 : void 0;
-      if (typeof fdi === "number" && typeof pdi === "number") {
-        const [fd, pd] = [file[fdi], pattern[pdi]];
-        if (fd.toLowerCase() === pd.toLowerCase()) {
-          pattern[pdi] = fd;
-          if (pdi > fdi) {
-            pattern = pattern.slice(pdi);
-          } else if (fdi > pdi) {
-            file = file.slice(fdi);
-          }
-        }
-      }
-    }
-    const { optimizationLevel = 1 } = this.options;
-    if (optimizationLevel >= 2) {
-      file = this.levelTwoFileOptimize(file);
-    }
-    this.debug("matchOne", this, { file, pattern });
-    this.debug("matchOne", file.length, pattern.length);
-    for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
-      this.debug("matchOne loop");
-      var p = pattern[pi];
-      var f = file[fi];
-      this.debug(pattern, p, f);
-      if (p === false) {
-        return false;
-      }
-      if (p === GLOBSTAR) {
-        this.debug("GLOBSTAR", [pattern, p, f]);
-        var fr = fi;
-        var pr = pi + 1;
-        if (pr === pl) {
-          this.debug("** at the end");
-          for (; fi < fl; fi++) {
-            if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".")
-              return false;
-          }
-          return true;
-        }
-        while (fr < fl) {
-          var swallowee = file[fr];
-          this.debug("\nglobstar while", file, fr, pattern, pr, swallowee);
-          if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
-            this.debug("globstar found match!", fr, fl, swallowee);
-            return true;
-          } else {
-            if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") {
-              this.debug("dot detected!", file, fr, pattern, pr);
-              break;
-            }
-            this.debug("globstar swallow a segment, and continue");
-            fr++;
-          }
-        }
-        if (partial) {
-          this.debug("\n>>> no match, partial?", file, fr, pattern, pr);
-          if (fr === fl) {
-            return true;
-          }
-        }
-        return false;
-      }
-      let hit;
-      if (typeof p === "string") {
-        hit = f === p;
-        this.debug("string match", p, f, hit);
-      } else {
-        hit = p.test(f);
-        this.debug("pattern match", p, f, hit);
-      }
-      if (!hit)
-        return false;
-    }
-    if (fi === fl && pi === pl) {
-      return true;
-    } else if (fi === fl) {
-      return partial;
-    } else if (pi === pl) {
-      return fi === fl - 1 && file[fi] === "";
-    } else {
-      throw new Error("wtf?");
-    }
-  }
-  braceExpand() {
-    return braceExpand(this.pattern, this.options);
-  }
-  parse(pattern) {
-    assertValidPattern(pattern);
-    const options = this.options;
-    if (pattern === "**")
-      return GLOBSTAR;
-    if (pattern === "")
-      return "";
-    let m;
-    let fastTest = null;
-    if (m = pattern.match(starRE)) {
-      fastTest = options.dot ? starTestDot : starTest;
-    } else if (m = pattern.match(starDotExtRE)) {
-      fastTest = (options.nocase ? options.dot ? starDotExtTestNocaseDot : starDotExtTestNocase : options.dot ? starDotExtTestDot : starDotExtTest)(m[1]);
-    } else if (m = pattern.match(qmarksRE)) {
-      fastTest = (options.nocase ? options.dot ? qmarksTestNocaseDot : qmarksTestNocase : options.dot ? qmarksTestDot : qmarksTest)(m);
-    } else if (m = pattern.match(starDotStarRE)) {
-      fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
-    } else if (m = pattern.match(dotStarRE)) {
-      fastTest = dotStarTest;
-    }
-    const re = AST.fromGlob(pattern, this.options).toMMPattern();
-    return fastTest ? Object.assign(re, { test: fastTest }) : re;
-  }
-  makeRe() {
-    if (this.regexp || this.regexp === false)
-      return this.regexp;
-    const set = this.set;
-    if (!set.length) {
-      this.regexp = false;
-      return this.regexp;
-    }
-    const options = this.options;
-    const twoStar = options.noglobstar ? star2 : options.dot ? twoStarDot : twoStarNoDot;
-    const flags = new Set(options.nocase ? ["i"] : []);
-    let re = set.map((pattern) => {
-      const pp = pattern.map((p) => {
-        if (p instanceof RegExp) {
-          for (const f of p.flags.split(""))
-            flags.add(f);
-        }
-        return typeof p === "string" ? regExpEscape2(p) : p === GLOBSTAR ? GLOBSTAR : p._src;
-      });
-      pp.forEach((p, i) => {
-        const next = pp[i + 1];
-        const prev = pp[i - 1];
-        if (p !== GLOBSTAR || prev === GLOBSTAR) {
-          return;
-        }
-        if (prev === void 0) {
-          if (next !== void 0 && next !== GLOBSTAR) {
-            pp[i + 1] = "(?:\\/|" + twoStar + "\\/)?" + next;
-          } else {
-            pp[i] = twoStar;
-          }
-        } else if (next === void 0) {
-          pp[i - 1] = prev + "(?:\\/|" + twoStar + ")?";
-        } else if (next !== GLOBSTAR) {
-          pp[i - 1] = prev + "(?:\\/|\\/" + twoStar + "\\/)" + next;
-          pp[i + 1] = GLOBSTAR;
-        }
-      });
-      return pp.filter((p) => p !== GLOBSTAR).join("/");
-    }).join("|");
-    const [open, close] = set.length > 1 ? ["(?:", ")"] : ["", ""];
-    re = "^" + open + re + close + "$";
-    if (this.negate)
-      re = "^(?!" + re + ").+$";
-    try {
-      this.regexp = new RegExp(re, [...flags].join(""));
-    } catch (ex) {
-      this.regexp = false;
-    }
-    return this.regexp;
-  }
-  slashSplit(p) {
-    if (this.preserveMultipleSlashes) {
-      return p.split("/");
-    } else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
-      return ["", ...p.split(/\/+/)];
-    } else {
-      return p.split(/\/+/);
-    }
-  }
-  match(f, partial = this.partial) {
-    this.debug("match", f, this.pattern);
-    if (this.comment) {
-      return false;
-    }
-    if (this.empty) {
-      return f === "";
-    }
-    if (f === "/" && partial) {
-      return true;
-    }
-    const options = this.options;
-    if (this.isWindows) {
-      f = f.split("\\").join("/");
-    }
-    const ff = this.slashSplit(f);
-    this.debug(this.pattern, "split", ff);
-    const set = this.set;
-    this.debug(this.pattern, "set", set);
-    let filename = ff[ff.length - 1];
-    if (!filename) {
-      for (let i = ff.length - 2; !filename && i >= 0; i--) {
-        filename = ff[i];
-      }
-    }
-    for (let i = 0; i < set.length; i++) {
-      const pattern = set[i];
-      let file = ff;
-      if (options.matchBase && pattern.length === 1) {
-        file = [filename];
-      }
-      const hit = this.matchOne(file, pattern, partial);
-      if (hit) {
-        if (options.flipNegate) {
-          return true;
-        }
-        return !this.negate;
-      }
-    }
-    if (options.flipNegate) {
-      return false;
-    }
-    return this.negate;
-  }
-  static defaults(def) {
-    return minimatch.defaults(def).Minimatch;
-  }
-};
-minimatch.AST = AST;
-minimatch.Minimatch = Minimatch;
-minimatch.escape = escape;
-minimatch.unescape = unescape;
-
-// node_modules/it-glob/dist/src/index.js
-async function* glob(dir, pattern, options = {}) {
-  const absoluteDir = path2.resolve(dir);
-  const relativeDir = path2.relative(options.cwd ?? process.cwd(), dir);
-  const stats = await fs4.stat(absoluteDir);
-  if (stats.isDirectory()) {
-    for await (const entry of _glob(absoluteDir, "", pattern, options)) {
-      yield entry;
-    }
-    return;
-  }
-  if (minimatch(relativeDir, pattern, options)) {
-    yield options.absolute === true ? absoluteDir : relativeDir;
-  }
-}
-async function* _glob(base3, dir, pattern, options) {
-  for await (const entry of await fs4.opendir(path2.join(base3, dir))) {
-    const relativeEntryPath = path2.join(dir, entry.name);
-    const absoluteEntryPath = path2.join(base3, dir, entry.name);
-    let match2 = minimatch(relativeEntryPath, pattern, options);
-    const isDirectory = entry.isDirectory();
-    if (isDirectory && options.nodir === true) {
-      match2 = false;
-    }
-    if (match2) {
-      yield options.absolute === true ? absoluteEntryPath : relativeEntryPath;
-    }
-    if (isDirectory) {
-      yield* _glob(base3, relativeEntryPath, pattern, options);
-    }
-  }
-}
-
-// node_modules/@helia/unixfs/dist/src/index.js
-var DefaultUnixFS = class {
-  components;
-  constructor(components) {
-    this.components = components;
-  }
-  async *addAll(source, options = {}) {
-    yield* addAll(source, this.components.blockstore, options);
-  }
-  async addBytes(bytes, options = {}) {
-    return addBytes(bytes, this.components.blockstore, options);
-  }
-  async addByteStream(bytes, options = {}) {
-    return addByteStream(bytes, this.components.blockstore, options);
-  }
-  async addFile(file, options = {}) {
-    return addFile(file, this.components.blockstore, options);
-  }
-  async addDirectory(dir = {}, options = {}) {
-    return addDirectory(dir, this.components.blockstore, options);
-  }
-  async *cat(cid, options = {}) {
-    yield* cat(cid, this.components.blockstore, options);
-  }
-  async chmod(cid, mode, options = {}) {
-    return chmod(cid, mode, this.components.blockstore, options);
-  }
-  async cp(source, target, name4, options = {}) {
-    return cp(source, target, name4, this.components.blockstore, options);
-  }
-  async *ls(cid, options = {}) {
-    yield* ls(cid, this.components.blockstore, options);
-  }
-  async mkdir(cid, dirname, options = {}) {
-    return mkdir(cid, dirname, this.components.blockstore, options);
-  }
-  async rm(cid, path6, options = {}) {
-    return rm(cid, path6, this.components.blockstore, options);
-  }
-  async stat(cid, options = {}) {
-    return stat(cid, this.components.blockstore, options);
-  }
-  async touch(cid, options = {}) {
-    return touch(cid, this.components.blockstore, options);
-  }
-};
-function unixfs(helia) {
-  return new DefaultUnixFS(helia);
-}
-
-// node_modules/blockstore-fs/dist/src/index.js
-import fs5 from "node:fs/promises";
-import path4 from "node:path";
-import { promisify as promisify3 } from "node:util";
-
-// node_modules/blockstore-core/dist/src/errors.js
-var errors_exports = {};
-__export(errors_exports, {
-  abortedError: () => abortedError,
-  closeFailedError: () => closeFailedError,
-  deleteFailedError: () => deleteFailedError,
-  getFailedError: () => getFailedError,
-  hasFailedError: () => hasFailedError,
-  notFoundError: () => notFoundError,
-  openFailedError: () => openFailedError,
-  putFailedError: () => putFailedError
-});
-var import_err_code16 = __toESM(require_err_code(), 1);
-function openFailedError(err) {
-  err = err ?? new Error("Open failed");
-  return (0, import_err_code16.default)(err, "ERR_OPEN_FAILED");
-}
-function closeFailedError(err) {
-  err = err ?? new Error("Close failed");
-  return (0, import_err_code16.default)(err, "ERR_CLOSE_FAILED");
-}
-function putFailedError(err) {
-  err = err ?? new Error("Put failed");
-  return (0, import_err_code16.default)(err, "ERR_PUT_FAILED");
-}
-function getFailedError(err) {
-  err = err ?? new Error("Get failed");
-  return (0, import_err_code16.default)(err, "ERR_GET_FAILED");
-}
-function deleteFailedError(err) {
-  err = err ?? new Error("Delete failed");
-  return (0, import_err_code16.default)(err, "ERR_DELETE_FAILED");
-}
-function hasFailedError(err) {
-  err = err ?? new Error("Has failed");
-  return (0, import_err_code16.default)(err, "ERR_HAS_FAILED");
-}
-function notFoundError(err) {
-  err = err ?? new Error("Not Found");
-  return (0, import_err_code16.default)(err, "ERR_NOT_FOUND");
-}
-function abortedError(err) {
-  err = err ?? new Error("Aborted");
-  return (0, import_err_code16.default)(err, "ERR_ABORTED");
-}
-
-// node_modules/blockstore-core/node_modules/@libp2p/logger/dist/src/index.js
-var import_debug2 = __toESM(require_src2(), 1);
-import_debug2.default.formatters.b = (v) => {
-  return v == null ? "undefined" : base58btc2.baseEncode(v);
-};
-import_debug2.default.formatters.t = (v) => {
-  return v == null ? "undefined" : base322.baseEncode(v);
-};
-import_debug2.default.formatters.m = (v) => {
-  return v == null ? "undefined" : base64.baseEncode(v);
-};
-import_debug2.default.formatters.p = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-import_debug2.default.formatters.c = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-import_debug2.default.formatters.k = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-import_debug2.default.formatters.a = (v) => {
-  return v == null ? "undefined" : v.toString();
-};
-function createDisabledLogger2(namespace) {
-  const logger3 = () => {
-  };
-  logger3.enabled = false;
-  logger3.color = "";
-  logger3.diff = 0;
-  logger3.log = () => {
-  };
-  logger3.namespace = namespace;
-  logger3.destroy = () => true;
-  logger3.extend = () => logger3;
-  return logger3;
-}
-function logger2(name4) {
-  let trace = createDisabledLogger2(`${name4}:trace`);
-  if (import_debug2.default.enabled(`${name4}:trace`) && import_debug2.default.names.map((r) => r.toString()).find((n) => n.includes(":trace")) != null) {
-    trace = (0, import_debug2.default)(`${name4}:trace`);
-  }
-  return Object.assign((0, import_debug2.default)(name4), {
-    error: (0, import_debug2.default)(`${name4}:error`),
-    trace
-  });
-}
-
-// node_modules/blockstore-core/dist/src/tiered.js
-var log11 = logger2("blockstore:core:tiered");
-
-// node_modules/blockstore-core/dist/src/index.js
-var Errors = {
-  ...errors_exports
-};
-
-// node_modules/blockstore-fs/dist/src/index.js
-var import_fast_write_atomic = __toESM(require_fast_write_atomic(), 1);
-
-// node_modules/blockstore-fs/dist/src/sharding.js
-import path3 from "node:path";
-var NextToLast = class {
-  extension;
-  prefixLength;
-  base;
-  constructor(init = {}) {
-    this.extension = init.extension ?? ".data";
-    this.prefixLength = init.prefixLength ?? 2;
-    this.base = init.base ?? base32upper2;
-  }
-  encode(cid) {
-    const str = this.base.encoder.encode(cid.multihash.bytes);
-    const prefix = str.substring(str.length - this.prefixLength);
-    return {
-      dir: prefix,
-      file: `${str}${this.extension}`
-    };
-  }
-  decode(str) {
-    let fileName = path3.basename(str);
-    if (fileName.endsWith(this.extension)) {
-      fileName = fileName.substring(0, fileName.length - this.extension.length);
-    }
-    return CID2.decode(this.base.decoder.decode(fileName));
-  }
-};
-
-// node_modules/blockstore-fs/dist/src/index.js
-var writeAtomic = promisify3(import_fast_write_atomic.default);
-async function writeFile(file, contents) {
-  try {
-    await writeAtomic(file, contents);
-  } catch (err) {
-    if (err.code === "EPERM" && err.syscall === "rename") {
-      await fs5.access(file, fs5.constants.F_OK | fs5.constants.W_OK);
-      return;
-    }
-    throw err;
-  }
-}
-var FsBlockstore = class {
-  path;
-  createIfMissing;
-  errorIfExists;
-  putManyConcurrency;
-  getManyConcurrency;
-  deleteManyConcurrency;
-  shardingStrategy;
-  constructor(location, init = {}) {
-    this.path = path4.resolve(location);
-    this.createIfMissing = init.createIfMissing ?? true;
-    this.errorIfExists = init.errorIfExists ?? false;
-    this.deleteManyConcurrency = init.deleteManyConcurrency ?? 50;
-    this.getManyConcurrency = init.getManyConcurrency ?? 50;
-    this.putManyConcurrency = init.putManyConcurrency ?? 50;
-    this.shardingStrategy = init.shardingStrategy ?? new NextToLast();
-  }
-  async open() {
-    try {
-      await fs5.access(this.path, fs5.constants.F_OK | fs5.constants.W_OK);
-      if (this.errorIfExists) {
-        throw Errors.openFailedError(new Error(`Blockstore directory: ${this.path} already exists`));
-      }
-    } catch (err) {
-      if (err.code === "ENOENT") {
-        if (this.createIfMissing) {
-          await fs5.mkdir(this.path, { recursive: true });
-          return;
-        } else {
-          throw Errors.openFailedError(new Error(`Blockstore directory: ${this.path} does not exist`));
-        }
-      }
-      throw err;
-    }
-  }
-  async close() {
-    await Promise.resolve();
-  }
-  async put(key, val) {
-    const { dir, file } = this.shardingStrategy.encode(key);
-    try {
-      if (dir != null && dir !== "") {
-        await fs5.mkdir(path4.join(this.path, dir), {
-          recursive: true
-        });
-      }
-      await writeFile(path4.join(this.path, dir, file), val);
-      return key;
-    } catch (err) {
-      throw Errors.putFailedError(err);
-    }
-  }
-  async *putMany(source) {
-    yield* parallelBatch(src_default3(source, ({ cid, block }) => {
-      return async () => {
-        await this.put(cid, block);
-        return cid;
-      };
-    }), this.putManyConcurrency);
-  }
-  async get(key) {
-    const { dir, file } = this.shardingStrategy.encode(key);
-    try {
-      return await fs5.readFile(path4.join(this.path, dir, file));
-    } catch (err) {
-      throw Errors.notFoundError(err);
-    }
-  }
-  async *getMany(source) {
-    yield* parallelBatch(src_default3(source, (key) => {
-      return async () => {
-        return {
-          cid: key,
-          block: await this.get(key)
-        };
-      };
-    }), this.getManyConcurrency);
-  }
-  async delete(key) {
-    const { dir, file } = this.shardingStrategy.encode(key);
-    try {
-      await fs5.unlink(path4.join(this.path, dir, file));
-    } catch (err) {
-      if (err.code === "ENOENT") {
-        return;
-      }
-      throw Errors.deleteFailedError(err);
-    }
-  }
-  async *deleteMany(source) {
-    yield* parallelBatch(src_default3(source, (key) => {
-      return async () => {
-        await this.delete(key);
-        return key;
-      };
-    }), this.deleteManyConcurrency);
-  }
-  /**
-   * Check for the existence of the given key
-   */
-  async has(key) {
-    const { dir, file } = this.shardingStrategy.encode(key);
-    try {
-      await fs5.access(path4.join(this.path, dir, file));
-    } catch (err) {
-      return false;
-    }
-    return true;
-  }
-  async *getAll() {
-    const pattern = `**/*${this.shardingStrategy.extension}`.split(path4.sep).join("/");
-    const files = glob(this.path, pattern, {
-      absolute: true
-    });
-    for await (const file of files) {
-      try {
-        const buf2 = await fs5.readFile(file);
-        const pair = {
-          cid: this.shardingStrategy.decode(file),
-          block: buf2
-        };
-        yield pair;
-      } catch (err) {
-        if (err.code !== "ENOENT") {
-          throw err;
-        }
-      }
-    }
-  }
-};
-
-// src/objectManager.js
-import { createReadStream, createWriteStream } from "node:fs";
-import { mkdir as mkdir2, rm as rm2 } from "node:fs/promises";
-import os from "node:os";
-import path5 from "node:path";
-import { Readable } from "node:stream";
-import { v4 as uuidv4 } from "uuid";
-var ObjectManager = class {
-  #DEFAULT_ENDPOINT = "https://s3.filebase.com";
-  #DEFAULT_REGION = "us-east-1";
-  #DEFAULT_MAX_CONCURRENT_UPLOADS = 4;
-  #client;
-  #credentials;
-  #defaultBucket;
-  #gatewayConfiguration;
-  #maxConcurrentUploads;
-  /**
-   * @typedef {Object} objectManagerOptions Optional settings for the constructor.
-   * @property {string} [bucket] Default bucket to use.
-   * @property {objectDownloadOptions} [gateway] Default gateway to use.
-   * @property {number} [maxConcurrentUploads] The maximum number of concurrent uploads.
-   */
-  /**
-   * @typedef {Object} objectDownloadOptions Optional settings for downloading objects
-   * @property {string} endpoint Default gateway to use.
-   * @property {string} [token] Token for the default gateway.
-   * @property {number} [timeout=60000] Timeout for the default gateway
-   */
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @param {objectManagerOptions} options - Optional settings for the constructor.
-   * @tutorial quickstart-object
-   * @example
-   * import { ObjectManager } from "@filebase/sdk";
-   * const objectManager = new ObjectManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", {
-   *   bucket: "my-default-bucket",
-   *   maxConcurrentUploads: 4,
-   *   gateway: {
-   *     endpoint: "https://my-default-gateway.mydomain.com
-   *     token: SUPER_SECRET_GATEWAY_TOKEN
-   *   }
-   * });
-   */
-  constructor(clientKey, clientSecret, options) {
-    var _a, _b, _c;
-    const clientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_S3_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, clientConfiguration = {
-      credentials: {
-        accessKeyId: clientKey,
-        secretAccessKey: clientSecret
-      },
-      endpoint: clientEndpoint,
-      region: this.#DEFAULT_REGION,
-      forcePathStyle: true
-    };
-    this.#defaultBucket = options == null ? void 0 : options.bucket;
-    this.#maxConcurrentUploads = (options == null ? void 0 : options.maxConcurrentUploads) || this.#DEFAULT_MAX_CONCURRENT_UPLOADS;
-    this.#credentials = {
-      key: clientKey,
-      secret: clientSecret
-    };
-    this.#client = new S3Client2(clientConfiguration);
-    this.#gatewayConfiguration = {
-      endpoint: (_a = options == null ? void 0 : options.gateway) == null ? void 0 : _a.endpoint,
-      token: (_b = options == null ? void 0 : options.gateway) == null ? void 0 : _b.token,
-      timeout: (_c = options == null ? void 0 : options.gateway) == null ? void 0 : _c.timeout
-    };
-  }
-  /**
-   * @typedef {Object} objectOptions
-   * @property {string} [bucket] - The bucket to pin the IPFS CID into.
-   */
-  /**
-   * @typedef {Object} objectHeadResult
-   * @property {string} cid The CID of the uploaded object
-   * @property {function} download Convenience function to download the object via S3 or the selected gateway
-   * @property {array} [entries] If a directory then returns an array of the containing objects
-   * @property {string} entries.cid The CID of the uploaded object
-   * @property {string} entries.path The path of the object
-   */
-  /**
-   * If the source parameter is an array of objects, it will pack multiple files into a CAR file for upload.
-   * The method returns a Promise that resolves to an object containing the CID (Content Identifier) of the uploaded file
-   * and an optional entries object when uploading a CAR file.
-   *
-   * @summary Uploads a file or a CAR file to the specified bucket.
-   * @param {string} key - The key or path of the file in the bucket.
-   * @param {Buffer|ReadableStream|Array} source - The content of the object to be uploaded.
-   *    If an array of files is provided, each file should have a 'path' property specifying the path of the file
-   *    and a 'content' property specifying the content of the file.  The SDK will then construct a CAR file locally
-   *    and use that as the content of the object to be uploaded.
-   * @param {Object} [metadata] Optional metadata for pin object
-   * @param {objectOptions} [options] - The options for uploading the object.
-   * @returns {Promise}
-   * @example
-   * // Upload Object
-   * await objectManager.upload("my-object", Buffer.from("Hello World!"));
-   * // Upload Object with Metadata
-   * await objectManager.upload("my-custom-object", Buffer.from("Hello Big World!"), {
-   *   "application": "my-filebase-app"
-   * });
-   * // Upload Directory
-   * await objectManager.upload("my-first-directory", [
-   *  {
-   *   path: "/testObjects/1.txt",
-   *   content: Buffer.from("upload test object", "utf-8"),
-   *  },
-   *  {
-   *   path: "/testObjects/deep/1.txt",
-   *   content: Buffer.from("upload deep test object", "utf-8"),
-   *  },
-   *  {
-   *   path: "/topLevel.txt",
-   *   content: Buffer.from("upload top level test object", "utf-8"),
-   *  },
-   * ]);
-   */
-  async upload(key, source, metadata, options) {
-    const uploadUUID = uuidv4();
-    const bucket = (options == null ? void 0 : options.bucket) || this.#defaultBucket, uploadOptions = {
-      client: this.#client,
-      params: {
-        Bucket: bucket,
-        Key: key,
-        Body: source,
-        Metadata: metadata || {}
-      },
-      queueSize: this.#maxConcurrentUploads,
-      partSize: 26843546
-      //25.6Mb || 250Gb Max File Size
-    };
-    let parsedEntries = {};
-    if (Array.isArray(source)) {
-      uploadOptions.params.Metadata = {
-        ...uploadOptions.params.Metadata,
-        import: "car"
-      };
-      let temporaryCarFilePath, temporaryBlockstoreDir;
-      try {
-        temporaryBlockstoreDir = path5.resolve(
-          os.tmpdir(),
-          "filebase-sdk",
-          "uploads",
-          uploadUUID
-        );
-        temporaryCarFilePath = `${temporaryBlockstoreDir}/main.car`;
-        await mkdir2(temporaryBlockstoreDir, { recursive: true });
-        const temporaryBlockstore = new FsBlockstore(temporaryBlockstoreDir);
-        const heliaFs = unixfs({
-          blockstore: temporaryBlockstore
-        });
-        for (let sourceEntry of source) {
-          sourceEntry.path = sourceEntry.path[0] === "/" ? `/${uploadUUID}${sourceEntry.path}` : `/${uploadUUID}/${sourceEntry.path}`;
-        }
-        for await (const entry of heliaFs.addAll(source)) {
-          parsedEntries[entry.path] = entry;
-        }
-        const rootEntry = parsedEntries[uploadUUID];
-        const carExporter = car({ blockstore: temporaryBlockstore }), { writer, out } = CarWriter2.create([rootEntry.cid]);
-        const output = createWriteStream(temporaryCarFilePath);
-        Readable.from(out).pipe(output);
-        await carExporter.export(rootEntry.cid, writer);
-        uploadOptions.params.Body = createReadStream(temporaryCarFilePath);
-        const parallelUploads3 = new Upload(uploadOptions);
-        await parallelUploads3.done();
-        await temporaryBlockstore.close();
-      } finally {
-        if (typeof temporaryBlockstoreDir !== "undefined") {
-          await rm2(temporaryBlockstoreDir, { recursive: true, force: true });
-        }
-      }
-    } else {
-      const parallelUploads3 = new Upload(uploadOptions);
-      await parallelUploads3.done();
-    }
-    const command = new HeadObjectCommand({
-      Bucket: bucket,
-      Key: key,
-      Body: source
-    }), headResult = await this.#client.send(command), responseCid = headResult.Metadata.cid;
-    if (Object.keys(parsedEntries).length === 0) {
-      return {
-        cid: responseCid,
-        download: () => {
-          return this.#routeDownload(responseCid, key, options);
-        }
-      };
-    }
-    return {
-      cid: responseCid,
-      download: () => {
-        return this.#routeDownload(responseCid, key, options);
-      },
-      entries: parsedEntries
-    };
-  }
-  async #routeDownload(cid, key, options) {
-    return typeof this.#gatewayConfiguration.endpoint !== "undefined" ? downloadFromGateway(cid, this.#gatewayConfiguration) : this.download(key, options);
-  }
-  /**
-   * @summary Gets an objects info and metadata using the S3 API.
-   * @param {string} key - The key of the object to be inspected.
-   * @param {objectOptions} [options] - The options for inspecting the object.
-   * @returns {Promise}
-   */
-  async get(key, options) {
-    const bucket = (options == null ? void 0 : options.bucket) || this.#defaultBucket;
-    try {
-      const command = new HeadObjectCommand({
-        Bucket: bucket,
-        Key: key
-      }), response = await this.#client.send(command);
-      response.download = () => {
-        return this.#routeDownload(response.Metadata.cid, key, options);
-      };
-      return response;
-    } catch (err) {
-      if (err.name === "NotFound") {
-        return false;
-      }
-      throw err;
-    }
-  }
-  /**
-   * @summary Downloads an object from the specified bucket using the provided key.
-   * @param {string} key - The key of the object to be downloaded.
-   * @param {objectOptions} [options] - The options for downloading the object..
-   * @returns {Promise} - A promise that resolves with the contents of the downloaded object as a Stream.
-   * @example
-   * // Download object with name of `download-object-example`
-   * await objectManager.download(`download-object-example`);
-   */
-  async download(key, options) {
-    if (typeof this.#gatewayConfiguration.endpoint === "string") {
-      const objectToFetch = await this.get(key, options);
-      return objectToFetch.download();
-    } else {
-      const command = new GetObjectCommand({
-        Bucket: (options == null ? void 0 : options.bucket) || this.#defaultBucket,
-        Key: key
-      }), response = await this.#client.send(command);
-      return response.Body;
-    }
-  }
-  /**
-   * @typedef {Object} listObjectsResult
-   * @property {boolean} IsTruncated Indicates if more results exist on the server
-   * @property {string} NextContinuationToken ContinuationToken used to paginate list requests
-   * @property {Array} Contents List of Keys stored in the S3 Bucket
-   * @property {string} Contents.Key Key of the Object
-   * @property {string} Contents.LastModified Date Last Modified of the Object
-   * @property {string} Contents.CID CID of the Object
-   * @property {string} Contents.ETag ETag of the Object
-   * @property {number} Contents.Size Size in Bytes of the Object
-   * @property {string} Contents.StorageClass Class of Storage of the Object
-   * @property {function} Contents.download Convenience function to download the item using the S3 gateway
-   */
-  /**
-   * @typedef {Object} listObjectOptions
-   * @property {string} [Bucket] The name of the bucket. If not provided, the default bucket will be used.
-   * @property {string} [ContinuationToken=null] Continues listing from this objects name.
-   * @property {string} [Delimiter=null] Character used to group keys
-   * @property {number} [MaxKeys=1000] The maximum number of objects to retrieve. Defaults to 1000.
-   */
-  /**
-   * Retrieves a list of objects from a specified bucket.
-   *
-   * @param {listObjectOptions} options - The options for listing objects.
-   * @returns {Promise} - A promise that resolves to an array of objects.
-   * @example
-   * // List objects in bucket with a limit of 1000
-   * await objectManager.list({
-   *   MaxKeys: 1000
-   * });
-   */
-  async list(options = {
-    Bucket: this.#defaultBucket,
-    ContinuationToken: null,
-    Delimiter: null,
-    MaxKeys: 1e3
-  }) {
-    if ((options == null ? void 0 : options.MaxKeys) && options.MaxKeys > 1e5) {
-      throw new Error(`MaxKeys Maximum value is 100000`);
-    }
-    const bucket = (options == null ? void 0 : options.Bucket) || this.#defaultBucket, limit = (options == null ? void 0 : options.MaxKeys) || 1e3, commandOptions = {
-      Bucket: bucket,
-      MaxKeys: limit
-    }, command = new ListObjectsV2Command({
-      ...options,
-      ...commandOptions
-    });
-    const { Contents, IsTruncated, NextContinuationToken } = await this.#client.send(command);
-    return { Contents, IsTruncated, NextContinuationToken };
-  }
-  /**
-   * @summary Deletes an object from the specified bucket using the provided key.
-   * @param {string} key - The key of the object to be deleted.
-   * @param {objectOptions} [options] - The options for deleting the file.
-   * @returns {Promise} - A Promise that resolves with the result of the delete operation.
-   * @example
-   * // Delete object with name of `delete-object-example`
-   * await objectManager.delete(`delete-object-example`);
-   */
-  async delete(key, options) {
-    const command = new DeleteObjectCommand({
-      Bucket: (options == null ? void 0 : options.bucket) || this.#defaultBucket,
-      Key: key
-    });
-    await this.#client.send(command);
-    return true;
-  }
-  /**
-   * @typedef {Object} copyObjectOptions
-   * @property {string} [sourceBucket] The source bucket from where the object is to be copied.
-   * @property {string} [destinationKey] The key of the object in the destination bucket. By default, it is the same as the sourceKey.
-   */
-  /**
-   * If the destinationKey is not provided, the object will be copied with the same key as the sourceKey.
-   *
-   * @summary Copy the object from sourceKey in the sourceBucket to destinationKey in the destinationBucket.
-   * @param {string} sourceKey - The key of the object to be copied from the sourceBucket.
-   * @param {string} destinationBucket - The bucket where the object will be copied to.
-   * @param {copyObjectOptions} [options] - Additional options for the copy operation.
-   *
-   * @returns {Promise} - A Promise that resolves with the result of the copy operation.
-   * @example
-   * // Copy object `copy-object-test` from `copy-object-test-pass-src` to `copy-object-test-pass-dest`
-   * // TIP: Set bucket on constructor and it will be used as the default source for copying objects.
-   * await objectManager.copy(`copy-object-test`, `copy-object-dest`, {
-   *   sourceBucket: `copy-object-src`
-   * });
-   */
-  async copy(sourceKey, destinationBucket, options = {
-    sourceBucket: this.#defaultBucket,
-    destinationKey: void 0
-  }) {
-    const copySource = `${(options == null ? void 0 : options.sourceBucket) || this.#defaultBucket}/${sourceKey}`, command = new CopyObjectCommand({
-      CopySource: copySource,
-      Bucket: destinationBucket,
-      Key: (options == null ? void 0 : options.destinationKey) || sourceKey
-    });
-    await this.#client.send(command);
-    return true;
-  }
-};
-var objectManager_default = ObjectManager;
-
-// src/pinManager.js
-import axios4 from "axios";
-var PinManager = class {
-  #DEFAULT_ENDPOINT = "https://api.filebase.io";
-  #DEFAULT_TIMEOUT = 6e4;
-  #client;
-  #credentials;
-  #gatewayConfiguration;
-  #defaultBucket;
-  /**
-   * @typedef {Object} pinManagerOptions Optional settings for the constructor.
-   * @property {string} [bucket] Default bucket to use.
-   * @property {pinDownloadOptions} [gateway] Default gateway to use.
-   */
-  /**
-   * @typedef {Object} pinDownloadOptions Optional settings for downloading pins
-   * @property {string} endpoint Default gateway to use.
-   * @property {string} [token] Token for the default gateway.
-   * @property {number} [timeout=60000] Timeout for the default gateway
-   */
-  /**
-   * @summary Creates a new instance of the constructor.
-   * @param {string} clientKey - The access key ID for authentication.
-   * @param {string} clientSecret - The secret access key for authentication.
-   * @param {pinManagerOptions} [options] - Optional settings for the constructor.
-   * @tutorial quickstart-pin
-   * @example
-   * import { PinManager } from "@filebase/sdk";
-   * const pinManager = new PinManager("KEY_FROM_DASHBOARD", "SECRET_FROM_DASHBOARD", {
-   *   bucket: "my-default-bucket",
-   *   gateway: {
-   *     endpoint: "https://my-default-gateway.mydomain.com
-   *     token: SUPER_SECRET_GATEWAY_TOKEN
-   *   }
-   * });
-   */
-  constructor(clientKey, clientSecret, options) {
-    var _a, _b, _c;
-    this.#defaultBucket = options == null ? void 0 : options.bucket;
-    const PSAClientEndpoint = process.env.NODE_ENV === "test" ? process.env.TEST_NAME_ENDPOINT || this.#DEFAULT_ENDPOINT : this.#DEFAULT_ENDPOINT, baseURL = `${PSAClientEndpoint}/v1/ipfs/pins`;
-    this.#credentials = {
-      key: clientKey,
-      secret: clientSecret
-    };
-    this.#client = axios4.create({
-      baseURL,
-      timeout: this.#DEFAULT_TIMEOUT
-    });
-    this.#gatewayConfiguration = {
-      endpoint: (_a = options == null ? void 0 : options.gateway) == null ? void 0 : _a.endpoint,
-      token: (_b = options == null ? void 0 : options.gateway) == null ? void 0 : _b.token,
-      timeout: ((_c = options == null ? void 0 : options.gateway) == null ? void 0 : _c.timeout) || this.#DEFAULT_TIMEOUT
-    };
-  }
-  /**
-   * @typedef {Object} pinStatus
-   * @property {string} requestid Globally unique identifier of the pin request; can be used to check the status of ongoing pinning, or pin removal
-   * @property {string} status Status a pin object can have at a pinning service. ("queued","pinning","pinned","failed")
-   * @property {string} created Immutable timestamp indicating when a pin request entered a pinning service; can be used for filtering results and pagination
-   * @property {Object} pin Pin object
-   * @property {string} pin.cid Content Identifier (CID) pinned recursively
-   * @property {string} pin.name Name for pinned data; can be used for lookups later
-   * @property {Array} pin.origins Optional list of multiaddrs known to provide the data
-   * @property {Object} pin.meta Optional metadata for pin object
-   * @property {Array} delegates List of multiaddrs designated by pinning service that will receive the pin data
-   * @property {object} [info] Optional info for PinStatus response
-   * @property {function} download Convenience function to download pin
-   */
-  /**
-   * @typedef {Object} pinOptions
-   * @property {string} [bucket] - The bucket to pin the IPFS CID into.
-   */
-  /**
-   * @typedef {Object} listPinOptions
-   * @property {Array} [cid] Return pin objects responsible for pinning the specified CID(s); be aware that using longer hash functions introduces further constraints on the number of CIDs that will fit under the limit of 2000 characters per URL in browser contexts
-   * @property {string} [name] Return pin objects with specified name (by default a case-sensitive, exact match)
-   * @property {string} [match] Customize the text matching strategy applied when the name filter is present; exact (the default) is a case-sensitive exact match, partial matches anywhere in the name, iexact and ipartial are case-insensitive versions of the exact and partial strategies
-   * @property {Array} [status] Return pin objects for pins with the specified status (when missing, service defaults to pinned only)
-   * @property {string} [before] Return results created (queued) before provided timestamp
-   * @property {string} [after] Return results created (queued) after provided timestamp
-   * @property {number} [limit] Max records to return
-   * @property {Object} [meta] Return pin objects that match specified metadata keys passed as a string representation of a JSON object; when implementing a client library, make sure the parameter is URL-encoded to ensure safe transport
-   */
-  /**
-   * @typedef {Object} listPinResults
-   * @property {number} count Total number of pin objects that exist for passed query filters
-   * @property {Array} Array of PinStatus results
-   */
-  /**
-   * @summary List the pins in a given bucket
-   * @param {listPinOptions} [listOptions]
-   * @param {pinOptions} [options]
-   * @returns {Promise}
-   * @example
-   * // List pins in bucket with a limit of 1000
-   * await pinManager.list({
-   *   limit: 1000
-   * });
-   */
-  async list(listOptions, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), getResponse = await this.#client.request({
-        method: "GET",
-        params: listOptions,
-        headers: { Authorization: `Bearer ${encodedToken}` }
-      });
-      for (let pinStatus of getResponse.data.results) {
-        pinStatus.download = () => {
-          return this.download(pinStatus.pin.cid);
-        };
-      }
-      return getResponse.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Create a pin in the selected bucket
-   * @param {string} key Key or path of the file in the bucket
-   * @param {string} cid Content Identifier (CID) to be pinned recursively
-   * @param {Object} [metadata] Optional metadata for pin object
-   * @param {pinOptions} [options] Options for pinning the object
-   * @returns {Promise}
-   * @example
-   * // Create Pin with Metadata
-   * await pinManager.create("my-pin", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", {
-   *   "application": "my-custom-app-on-filebase"
-   * });
-   */
-  async create(key, cid, metadata, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), pinStatus = await this.#client.request({
-        method: "POST",
-        data: {
-          cid,
-          name: key,
-          meta: metadata
-        },
-        headers: { Authorization: `Bearer ${encodedToken}` }
-      });
-      pinStatus.data.download = () => {
-        return this.download(pinStatus.data.pin.cid);
-      };
-      return pinStatus.data;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @typedef {Object} replacePinOptions
-   * @augments pinOptions
-   * @property {Object} [metadata] Optional metadata to set on pin during replacement
-   * @property {string} [name] Optional name for pin to set during replacement
-   */
-  /**
-   * @summary Replace a pinned object in the selected bucket
-   * @param {string} requestid Unique ID for the pinned object
-   * @param {string} cid Content Identifier (CID) to be pinned recursively
-   * @param {replacePinOptions} [options] Options for pinning the object
-   * @returns {Promise}
-   * @example
-   * // Replace Pin with Metadata
-   * await pinManager.create("qr4231213", "QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF", {
-   *   "revision": Date.now()
-   * }
-   */
-  async replace(requestid, cid, options) {
-    try {
-      let replaceData = {
-        cid,
-        meta: (options == null ? void 0 : options.metadata) || {}
-      };
-      if (options == null ? void 0 : options.name) {
-        replaceData.name = options.name;
-      }
-      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), pinStatusResult = await this.#client.request({
-        method: "POST",
-        url: `/${requestid}`,
-        data: replaceData,
-        validateStatus: (status) => {
-          return status === 200;
-        },
-        headers: { Authorization: `Bearer ${encodedToken}` }
-      });
-      const pinStatus = pinStatusResult.data;
-      pinStatus.download = () => {
-        return this.download(pinStatus.pin.cid);
-      };
-      return pinStatus;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Download a pin from the selected IPFS gateway
-   * @param {string} cid
-   * @param {pinDownloadOptions} [options]
-   * @returns {Promise}
-   * @example
-   * // Download Pin by CID
-   * await pinManager.download("QmTJkc7crTuPG7xRmCQSz1yioBpCW3juFBtJPXhQfdCqGF");
-   */
-  async download(cid, options) {
-    const downloadOptions = Object.assign(this.#gatewayConfiguration, options);
-    return downloadFromGateway(cid, downloadOptions);
-  }
-  /**
-   * @summary Get details about a pinned object
-   * @param {string} requestid Globally unique identifier of the pin request
-   * @param {pinOptions} [options] Options for getting the pin
-   * @returns {Promise}
-   * @example
-   * // Get Pin Info by RequestId
-   * await pinManager.get("qr4231214");
-   */
-  async get(requestid, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket), getResponseResult = await this.#client.request({
-        method: "GET",
-        url: `/${requestid}`,
-        headers: { Authorization: `Bearer ${encodedToken}` },
-        validateStatus: (status) => {
-          return status === 200 || status === 404;
-        }
-      });
-      if (getResponseResult.status === 404) {
-        return false;
-      }
-      const pinStatus = getResponseResult.data;
-      pinStatus.download = () => {
-        return this.download(pinStatus.pin.cid);
-      };
-      return pinStatus;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  /**
-   * @summary Delete a pinned object from the selected bucket
-   * @param requestid Globally unique identifier of the pin request
-   * @param {pinOptions} [options] Options for deleting the pin
-   * @returns {Promise}
-   * @example
-   * // Delete Pin by RequestId
-   * await pinManager.delete("qr4231213");
-   */
-  async delete(requestid, options) {
-    try {
-      const encodedToken = this.#getEncodedToken(options == null ? void 0 : options.bucket);
-      await this.#client.request({
-        method: "DELETE",
-        url: `/${requestid}`,
-        headers: { Authorization: `Bearer ${encodedToken}` },
-        validateStatus: (status) => {
-          return status === 202;
-        }
-      });
-      return true;
-    } catch (err) {
-      apiErrorHandler(err);
-    }
-  }
-  #getEncodedToken(bucket) {
-    bucket = bucket || this.#defaultBucket;
-    return Buffer.from(
-      `${this.#credentials.key}:${this.#credentials.secret}:${bucket}`
-    ).toString("base64");
-  }
-};
-var pinManager_default = PinManager;
-export {
-  bucketManager_default as BucketManager,
-  gatewayManager_default as GatewayManager,
-  nameManager_default as NameManager,
-  objectManager_default as ObjectManager,
-  pinManager_default as PinManager
-};
diff --git a/package.json b/package.json
index 41e297c..907b37d 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@filebase/sdk",
-  "version": "1.0.2",
+  "version": "1.0.3",
   "description": "SDK for Interacting with Filebase Services [S3(Buckets, Objects), IPFS(Gateways, Pins) IPNS(Names)]",
   "repository": {
     "type": "git",
@@ -40,7 +40,7 @@
     "clean-jsdoc-theme": "4.2.17",
     "jsdoc": "4.0.2",
     "prettier": "3.1.0",
-    "tsup": "^8.0.1",
+    "tsup": "8.0.1",
     "typescript": "5.3.3"
   },
   "dependencies": {
diff --git a/yarn.lock b/yarn.lock
index 771fb84..5626756 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -59,7 +59,7 @@
     "@aws-sdk/util-utf8-browser" "^3.0.0"
     tslib "^1.11.1"
 
-"@aws-crypto/sha256-js@^3.0.0", "@aws-crypto/sha256-js@3.0.0":
+"@aws-crypto/sha256-js@3.0.0", "@aws-crypto/sha256-js@^3.0.0":
   version "3.0.0"
   resolved "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-3.0.0.tgz"
   integrity sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ==
@@ -84,7 +84,7 @@
     "@aws-sdk/util-utf8-browser" "^3.0.0"
     tslib "^1.11.1"
 
-"@aws-sdk/client-s3@^3.0.0", "@aws-sdk/client-s3@3.478.0":
+"@aws-sdk/client-s3@3.478.0":
   version "3.478.0"
   resolved "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.478.0.tgz"
   integrity sha512-OUpbCCnK71lQQ07BohJOx9ZER0rPqRAGOVIIVhNEkeN0uYFLzB7/o5a7+FEPUQXEd5rZRZgbxN5xEmnNW/0Waw==
@@ -528,7 +528,7 @@
     "@smithy/util-utf8" "^2.0.2"
     tslib "^2.5.0"
 
-"@aws-sdk/types@^3.222.0", "@aws-sdk/types@3.468.0":
+"@aws-sdk/types@3.468.0", "@aws-sdk/types@^3.222.0":
   version "3.468.0"
   resolved "https://registry.npmjs.org/@aws-sdk/types/-/types-3.468.0.tgz"
   integrity sha512-rx/9uHI4inRbp2tw3Y4Ih4PNZkVj32h7WneSg3MVgVjAoVD5Zti9KhS5hkvsBxfgmQmg0AQbE+b1sy5WGAgntA==
@@ -611,11 +611,121 @@
   dependencies:
     "@chainsafe/is-ip" "^2.0.1"
 
+"@esbuild/aix-ppc64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.19.12.tgz#d1bc06aedb6936b3b6d313bf809a5a40387d2b7f"
+  integrity sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==
+
+"@esbuild/android-arm64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.19.12.tgz#7ad65a36cfdb7e0d429c353e00f680d737c2aed4"
+  integrity sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==
+
+"@esbuild/android-arm@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.19.12.tgz#b0c26536f37776162ca8bde25e42040c203f2824"
+  integrity sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==
+
+"@esbuild/android-x64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.19.12.tgz#cb13e2211282012194d89bf3bfe7721273473b3d"
+  integrity sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==
+
 "@esbuild/darwin-arm64@0.19.12":
   version "0.19.12"
   resolved "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.12.tgz"
   integrity sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==
 
+"@esbuild/darwin-x64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.19.12.tgz#e37d9633246d52aecf491ee916ece709f9d5f4cd"
+  integrity sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==
+
+"@esbuild/freebsd-arm64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.12.tgz#1ee4d8b682ed363b08af74d1ea2b2b4dbba76487"
+  integrity sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==
+
+"@esbuild/freebsd-x64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.19.12.tgz#37a693553d42ff77cd7126764b535fb6cc28a11c"
+  integrity sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==
+
+"@esbuild/linux-arm64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.19.12.tgz#be9b145985ec6c57470e0e051d887b09dddb2d4b"
+  integrity sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==
+
+"@esbuild/linux-arm@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.19.12.tgz#207ecd982a8db95f7b5279207d0ff2331acf5eef"
+  integrity sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==
+
+"@esbuild/linux-ia32@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.19.12.tgz#d0d86b5ca1562523dc284a6723293a52d5860601"
+  integrity sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==
+
+"@esbuild/linux-loong64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.19.12.tgz#9a37f87fec4b8408e682b528391fa22afd952299"
+  integrity sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==
+
+"@esbuild/linux-mips64el@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.19.12.tgz#4ddebd4e6eeba20b509d8e74c8e30d8ace0b89ec"
+  integrity sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==
+
+"@esbuild/linux-ppc64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.19.12.tgz#adb67dadb73656849f63cd522f5ecb351dd8dee8"
+  integrity sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==
+
+"@esbuild/linux-riscv64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.19.12.tgz#11bc0698bf0a2abf8727f1c7ace2112612c15adf"
+  integrity sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==
+
+"@esbuild/linux-s390x@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.19.12.tgz#e86fb8ffba7c5c92ba91fc3b27ed5a70196c3cc8"
+  integrity sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==
+
+"@esbuild/linux-x64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.19.12.tgz#5f37cfdc705aea687dfe5dfbec086a05acfe9c78"
+  integrity sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==
+
+"@esbuild/netbsd-x64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.19.12.tgz#29da566a75324e0d0dd7e47519ba2f7ef168657b"
+  integrity sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==
+
+"@esbuild/openbsd-x64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.19.12.tgz#306c0acbdb5a99c95be98bdd1d47c916e7dc3ff0"
+  integrity sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==
+
+"@esbuild/sunos-x64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.19.12.tgz#0933eaab9af8b9b2c930236f62aae3fc593faf30"
+  integrity sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==
+
+"@esbuild/win32-arm64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.19.12.tgz#773bdbaa1971b36db2f6560088639ccd1e6773ae"
+  integrity sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==
+
+"@esbuild/win32-ia32@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.19.12.tgz#000516cad06354cc84a73f0943a4aa690ef6fd67"
+  integrity sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==
+
+"@esbuild/win32-x64@0.19.12":
+  version "0.19.12"
+  resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.19.12.tgz#c57c8afbb4054a3ab8317591a0b7320360b444ae"
+  integrity sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==
+
 "@helia/car@1.0.4":
   version "1.0.4"
   resolved "https://registry.npmjs.org/@helia/car/-/car-1.0.4.tgz"
@@ -669,16 +779,6 @@
     progress-events "^1.0.0"
     sparse-array "^1.3.2"
 
-"@ipld/car@^5.1.1":
-  version "5.2.6"
-  resolved "https://registry.npmjs.org/@ipld/car/-/car-5.2.6.tgz"
-  integrity sha512-ZiIYan7UFLLQsR90GpKOrZ0t6/6owrevJI7dCG8McNj0zUO4vGzsPumpKRBP4pdBgek4oXt4TbFOwxqTPEh5mA==
-  dependencies:
-    "@ipld/dag-cbor" "^9.0.7"
-    cborg "^4.0.5"
-    multiformats "^13.0.0"
-    varint "^6.0.0"
-
 "@ipld/car@5.2.4":
   version "5.2.4"
   resolved "https://registry.npmjs.org/@ipld/car/-/car-5.2.4.tgz"
@@ -689,6 +789,16 @@
     multiformats "^12.1.0"
     varint "^6.0.0"
 
+"@ipld/car@^5.1.1":
+  version "5.2.6"
+  resolved "https://registry.npmjs.org/@ipld/car/-/car-5.2.6.tgz"
+  integrity sha512-ZiIYan7UFLLQsR90GpKOrZ0t6/6owrevJI7dCG8McNj0zUO4vGzsPumpKRBP4pdBgek4oXt4TbFOwxqTPEh5mA==
+  dependencies:
+    "@ipld/dag-cbor" "^9.0.7"
+    cborg "^4.0.5"
+    multiformats "^13.0.0"
+    varint "^6.0.0"
+
 "@ipld/dag-cbor@^9.0.0", "@ipld/dag-cbor@^9.0.7":
   version "9.1.0"
   resolved "https://registry.npmjs.org/@ipld/dag-cbor/-/dag-cbor-9.1.0.tgz"
@@ -777,18 +887,7 @@
     race-signal "^1.0.0"
     uint8arraylist "^2.4.3"
 
-"@libp2p/interface@^1.0.0":
-  version "1.0.2"
-  resolved "https://registry.npmjs.org/@libp2p/interface/-/interface-1.0.2.tgz"
-  integrity sha512-z/3Yyg+7cVyzRXwzdrDkJd7YmNaLE9iZjQaixo5luI/n9uk5OFFjb9ulAsNqpq8V1xylCo2DXIC7f94KClwzVw==
-  dependencies:
-    "@multiformats/multiaddr" "^12.1.10"
-    it-pushable "^3.2.1"
-    it-stream-types "^2.0.1"
-    multiformats "^12.1.3"
-    uint8arraylist "^2.4.3"
-
-"@libp2p/interface@^1.0.2":
+"@libp2p/interface@^1.0.0", "@libp2p/interface@^1.0.2":
   version "1.0.2"
   resolved "https://registry.npmjs.org/@libp2p/interface/-/interface-1.0.2.tgz"
   integrity sha512-z/3Yyg+7cVyzRXwzdrDkJd7YmNaLE9iZjQaixo5luI/n9uk5OFFjb9ulAsNqpq8V1xylCo2DXIC7f94KClwzVw==
@@ -855,7 +954,7 @@
     "@nodelib/fs.stat" "2.0.5"
     run-parallel "^1.1.9"
 
-"@nodelib/fs.stat@^2.0.2", "@nodelib/fs.stat@2.0.5":
+"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2":
   version "2.0.5"
   resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz"
   integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==
@@ -873,11 +972,71 @@
   resolved "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz"
   integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==
 
+"@rollup/rollup-android-arm-eabi@4.9.6":
+  version "4.9.6"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.9.6.tgz#66b8d9cb2b3a474d115500f9ebaf43e2126fe496"
+  integrity sha512-MVNXSSYN6QXOulbHpLMKYi60ppyO13W9my1qogeiAqtjb2yR4LSmfU2+POvDkLzhjYLXz9Rf9+9a3zFHW1Lecg==
+
+"@rollup/rollup-android-arm64@4.9.6":
+  version "4.9.6"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.9.6.tgz#46327d5b86420d2307946bec1535fdf00356e47d"
+  integrity sha512-T14aNLpqJ5wzKNf5jEDpv5zgyIqcpn1MlwCrUXLrwoADr2RkWA0vOWP4XxbO9aiO3dvMCQICZdKeDrFl7UMClw==
+
 "@rollup/rollup-darwin-arm64@4.9.6":
   version "4.9.6"
   resolved "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.9.6.tgz"
   integrity sha512-CqNNAyhRkTbo8VVZ5R85X73H3R5NX9ONnKbXuHisGWC0qRbTTxnF1U4V9NafzJbgGM0sHZpdO83pLPzq8uOZFw==
 
+"@rollup/rollup-darwin-x64@4.9.6":
+  version "4.9.6"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.9.6.tgz#a2e6e096f74ccea6e2f174454c26aef6bcdd1274"
+  integrity sha512-zRDtdJuRvA1dc9Mp6BWYqAsU5oeLixdfUvkTHuiYOHwqYuQ4YgSmi6+/lPvSsqc/I0Omw3DdICx4Tfacdzmhog==
+
+"@rollup/rollup-linux-arm-gnueabihf@4.9.6":
+  version "4.9.6"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.9.6.tgz#09fcd4c55a2d6160c5865fec708a8e5287f30515"
+  integrity sha512-oNk8YXDDnNyG4qlNb6is1ojTOGL/tRhbbKeE/YuccItzerEZT68Z9gHrY3ROh7axDc974+zYAPxK5SH0j/G+QQ==
+
+"@rollup/rollup-linux-arm64-gnu@4.9.6":
+  version "4.9.6"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.9.6.tgz#19a3c0b6315c747ca9acf86e9b710cc2440f83c9"
+  integrity sha512-Z3O60yxPtuCYobrtzjo0wlmvDdx2qZfeAWTyfOjEDqd08kthDKexLpV97KfAeUXPosENKd8uyJMRDfFMxcYkDQ==
+
+"@rollup/rollup-linux-arm64-musl@4.9.6":
+  version "4.9.6"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.9.6.tgz#94aaf95fdaf2ad9335983a4552759f98e6b2e850"
+  integrity sha512-gpiG0qQJNdYEVad+1iAsGAbgAnZ8j07FapmnIAQgODKcOTjLEWM9sRb+MbQyVsYCnA0Im6M6QIq6ax7liws6eQ==
+
+"@rollup/rollup-linux-riscv64-gnu@4.9.6":
+  version "4.9.6"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.9.6.tgz#160510e63f4b12618af4013bddf1761cf9fc9880"
+  integrity sha512-+uCOcvVmFUYvVDr27aiyun9WgZk0tXe7ThuzoUTAukZJOwS5MrGbmSlNOhx1j80GdpqbOty05XqSl5w4dQvcOA==
+
+"@rollup/rollup-linux-x64-gnu@4.9.6":
+  version "4.9.6"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.9.6.tgz#5ac5d068ce0726bd0a96ca260d5bd93721c0cb98"
+  integrity sha512-HUNqM32dGzfBKuaDUBqFB7tP6VMN74eLZ33Q9Y1TBqRDn+qDonkAUyKWwF9BR9unV7QUzffLnz9GrnKvMqC/fw==
+
+"@rollup/rollup-linux-x64-musl@4.9.6":
+  version "4.9.6"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.9.6.tgz#bafa759ab43e8eab9edf242a8259ffb4f2a57a5d"
+  integrity sha512-ch7M+9Tr5R4FK40FHQk8VnML0Szi2KRujUgHXd/HjuH9ifH72GUmw6lStZBo3c3GB82vHa0ZoUfjfcM7JiiMrQ==
+
+"@rollup/rollup-win32-arm64-msvc@4.9.6":
+  version "4.9.6"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.9.6.tgz#1cc3416682e5a20d8f088f26657e6e47f8db468e"
+  integrity sha512-VD6qnR99dhmTQ1mJhIzXsRcTBvTjbfbGGwKAHcu+52cVl15AC/kplkhxzW/uT0Xl62Y/meBKDZvoJSJN+vTeGA==
+
+"@rollup/rollup-win32-ia32-msvc@4.9.6":
+  version "4.9.6"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.9.6.tgz#7d2251e1aa5e8a1e47c86891fe4547a939503461"
+  integrity sha512-J9AFDq/xiRI58eR2NIDfyVmTYGyIZmRcvcAoJ48oDld/NTR8wyiPUu2X/v1navJ+N/FGg68LEbX3Ejd6l8B7MQ==
+
+"@rollup/rollup-win32-x64-msvc@4.9.6":
+  version "4.9.6"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.9.6.tgz#2c1fb69e02a3f1506f52698cfdc3a8b6386df9a6"
+  integrity sha512-jqzNLhNDvIZOrt69Ce4UjGRpXJBzhUBzawMwnaDAwyHriki3XollsewxWzOzz+4yOFDkuJHtTsZFwMxhYJWmLQ==
+
 "@smithy/abort-controller@^2.0.1", "@smithy/abort-controller@^2.0.15":
   version "2.0.15"
   resolved "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-2.0.15.tgz"
@@ -1352,7 +1511,7 @@
   resolved "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-3.0.5.tgz"
   integrity sha512-yg6E+u0/+Zjva+buc3EIb+29XEg4wltq7cSmd4Uc2EE/1nUVmxyzpX6gUXD0V8jIrG0r7YeOGVIbYRkxeooCtw==
 
-"@types/markdown-it@*", "@types/markdown-it@^12.2.3":
+"@types/markdown-it@^12.2.3":
   version "12.2.3"
   resolved "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-12.2.3.tgz"
   integrity sha512-GKMHFfv3458yYy+v/N8gjufHO6MSZKCOXpZc5GXIWWy8uldwfmPn98vp81gZ5f9SVw8YYBctgfJ22a2d7AOMeQ==
@@ -1530,14 +1689,6 @@ buffer-from@^1.0.0:
   resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz"
   integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==
 
-buffer@^6.0.3:
-  version "6.0.3"
-  resolved "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz"
-  integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==
-  dependencies:
-    base64-js "^1.3.1"
-    ieee754 "^1.2.1"
-
 buffer@5.6.0:
   version "5.6.0"
   resolved "https://registry.npmjs.org/buffer/-/buffer-5.6.0.tgz"
@@ -1546,6 +1697,14 @@ buffer@5.6.0:
     base64-js "^1.0.2"
     ieee754 "^1.1.4"
 
+buffer@^6.0.3:
+  version "6.0.3"
+  resolved "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz"
+  integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==
+  dependencies:
+    base64-js "^1.3.1"
+    ieee754 "^1.2.1"
+
 bundle-require@^4.0.0:
   version "4.0.2"
   resolved "https://registry.npmjs.org/bundle-require/-/bundle-require-4.0.2.tgz"
@@ -1725,7 +1884,7 @@ err-code@^3.0.1:
   resolved "https://registry.npmjs.org/err-code/-/err-code-3.0.1.tgz"
   integrity sha512-GiaH0KJUewYok+eeY05IIgjtAe4Yltygk9Wqp1V5yVWLdhf0hYZchRjNIT9bb0mSwRcIusT3cx7PJUf3zEIfUA==
 
-esbuild@^0.19.2, esbuild@>=0.17:
+esbuild@^0.19.2:
   version "0.19.12"
   resolved "https://registry.npmjs.org/esbuild/-/esbuild-0.19.12.tgz"
   integrity sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==
@@ -1764,7 +1923,7 @@ eventemitter3@^5.0.1:
   resolved "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz"
   integrity sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==
 
-events@^3.3.0, events@3.3.0:
+events@3.3.0, events@^3.3.0:
   version "3.3.0"
   resolved "https://registry.npmjs.org/events/-/events-3.3.0.tgz"
   integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==
@@ -2237,7 +2396,7 @@ js2xmlparser@^4.0.2:
   dependencies:
     xmlcreate "^2.0.4"
 
-"jsdoc@>=3.x <=4.x", jsdoc@4.0.2:
+jsdoc@4.0.2:
   version "4.0.2"
   resolved "https://registry.npmjs.org/jsdoc/-/jsdoc-4.0.2.tgz"
   integrity sha512-e8cIg2z62InH7azBBi3EsSEqrKx+nUtAS5bBcYTSpZFA+vhNPyhv8PTFZ0WsjOPDj04/dOLlm08EDcQJDqaGQg==
@@ -2335,7 +2494,7 @@ markdown-it-anchor@^8.4.1:
   resolved "https://registry.npmjs.org/markdown-it-anchor/-/markdown-it-anchor-8.6.7.tgz"
   integrity sha512-FlCHFwNnutLgVTflOYHPW2pPcl2AACqVzExlkGQNsi4CJgqOHN7YTgDd4LuhgN1BFO3TS0vLAruV1Td6dwWPJA==
 
-markdown-it@*, markdown-it@^12.3.2:
+markdown-it@^12.3.2:
   version "12.3.2"
   resolved "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz"
   integrity sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==
@@ -2420,7 +2579,7 @@ mkdirp@^1.0.4:
   resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz"
   integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==
 
-ms@^2.1.1, ms@2.1.2:
+ms@2.1.2, ms@^2.1.1:
   version "2.1.2"
   resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz"
   integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
@@ -2734,11 +2893,6 @@ source-map-support@~0.5.20:
     buffer-from "^1.0.0"
     source-map "^0.6.0"
 
-source-map@^0.6.0, source-map@~0.6.0:
-  version "0.6.1"
-  resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz"
-  integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
-
 source-map@0.8.0-beta.0:
   version "0.8.0-beta.0"
   resolved "https://registry.npmjs.org/source-map/-/source-map-0.8.0-beta.0.tgz"
@@ -2746,6 +2900,11 @@ source-map@0.8.0-beta.0:
   dependencies:
     whatwg-url "^7.0.0"
 
+source-map@^0.6.0, source-map@~0.6.0:
+  version "0.6.1"
+  resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz"
+  integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
+
 sparse-array@^1.3.1, sparse-array@^1.3.2:
   version "1.3.2"
   resolved "https://registry.npmjs.org/sparse-array/-/sparse-array-1.3.2.tgz"
@@ -2759,23 +2918,7 @@ stream-browserify@3.0.0:
     inherits "~2.0.4"
     readable-stream "^3.5.0"
 
-string_decoder@^1.1.1:
-  version "1.3.0"
-  resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz"
-  integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
-  dependencies:
-    safe-buffer "~5.2.0"
-
-"string-width-cjs@npm:string-width@^4.2.0":
-  version "4.2.3"
-  resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"
-  integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
-  dependencies:
-    emoji-regex "^8.0.0"
-    is-fullwidth-code-point "^3.0.0"
-    strip-ansi "^6.0.1"
-
-string-width@^4.1.0:
+"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0:
   version "4.2.3"
   resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"
   integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
@@ -2793,14 +2936,14 @@ string-width@^5.0.1, string-width@^5.1.2:
     emoji-regex "^9.2.2"
     strip-ansi "^7.0.1"
 
-"strip-ansi-cjs@npm:strip-ansi@^6.0.1":
-  version "6.0.1"
-  resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"
-  integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
+string_decoder@^1.1.1:
+  version "1.3.0"
+  resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz"
+  integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
   dependencies:
-    ansi-regex "^5.0.1"
+    safe-buffer "~5.2.0"
 
-strip-ansi@^6.0.0, strip-ansi@^6.0.1:
+"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1:
   version "6.0.1"
   resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"
   integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
@@ -2912,9 +3055,9 @@ tslib@^2.0.3, tslib@^2.3.1, tslib@^2.5.0:
   resolved "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz"
   integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==
 
-tsup@^8.0.1:
+tsup@8.0.1:
   version "8.0.1"
-  resolved "https://registry.npmjs.org/tsup/-/tsup-8.0.1.tgz"
+  resolved "https://registry.yarnpkg.com/tsup/-/tsup-8.0.1.tgz#04a0170f7bbe77e81da3b53006b0a40282291833"
   integrity sha512-hvW7gUSG96j53ZTSlT4j/KL0q1Q2l6TqGBFc6/mu/L46IoNWqLLUzLRLP1R8Q7xrJTmkDxxDoojV5uCVs1sVOg==
   dependencies:
     bundle-require "^4.0.0"
@@ -2932,7 +3075,7 @@ tsup@^8.0.1:
     sucrase "^3.20.3"
     tree-kill "^1.2.2"
 
-typescript@>=4.5.0, typescript@5.3.3:
+typescript@5.3.3:
   version "5.3.3"
   resolved "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz"
   integrity sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==
@@ -2986,16 +3129,16 @@ util-deprecate@^1.0.1:
   resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz"
   integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
 
-uuid@^8.3.2:
-  version "8.3.2"
-  resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz"
-  integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
-
 uuid@9.0.1:
   version "9.0.1"
   resolved "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz"
   integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==
 
+uuid@^8.3.2:
+  version "8.3.2"
+  resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz"
+  integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
+
 varint-decoder@^1.0.0:
   version "1.0.0"
   resolved "https://registry.npmjs.org/varint-decoder/-/varint-decoder-1.0.0.tgz"

From 0bcfccb5e7ad820ce3888a266911ed0bc928822d Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Fri, 2 Feb 2024 12:15:16 -0600
Subject: [PATCH 03/16] Only compile CJS Fix ESM tests

---
 .gitignore                  | 2 +-
 package.json                | 8 ++++++--
 src/package.json            | 3 +++
 test/bucketManager.spec.js  | 8 ++++----
 test/gatewayManager.spec.js | 2 +-
 test/nameManager.spec.js    | 2 +-
 test/objectManager.spec.js  | 2 +-
 test/package.json           | 3 +++
 test/pinManager.spec.js     | 2 +-
 tsup.config.js              | 2 +-
 10 files changed, 22 insertions(+), 12 deletions(-)
 create mode 100644 src/package.json
 create mode 100644 test/package.json

diff --git a/.gitignore b/.gitignore
index 41e9674..c479dda 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,4 @@
 dist
 node_modules
 documentation
-.env
+.env
\ No newline at end of file
diff --git a/package.json b/package.json
index 907b37d..97f31f6 100644
--- a/package.json
+++ b/package.json
@@ -13,16 +13,20 @@
   "exports": {
     ".": {
       "require": "./dist/index.js",
-      "import": "./dist/index.mjs",
+      "import": "./src/index.mjs",
       "types": "./dist/index.d.ts"
     }
   },
+  "files": [
+    "dist",
+    "src"
+  ],
   "engines": {
     "node": ">=16.0.0",
     "npm": ">=8.0.0"
   },
   "scripts": {
-    "build": "tsup src/index.js --format cjs,esm --dts --clean",
+    "build": "tsup src/index.js --format cjs --dts --clean",
     "test": "node --test",
     "doc": "jsdoc -c jsdoc.json"
   },
diff --git a/src/package.json b/src/package.json
new file mode 100644
index 0000000..aead43d
--- /dev/null
+++ b/src/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
\ No newline at end of file
diff --git a/test/bucketManager.spec.js b/test/bucketManager.spec.js
index 87e00cc..54a84a0 100644
--- a/test/bucketManager.spec.js
+++ b/test/bucketManager.spec.js
@@ -1,10 +1,10 @@
 import test from "node:test";
 import assert from "node:assert/strict";
-import { BucketManager } from "../dist/index.mjs";
+import { BucketManager } from "../src/index.js";
 
 const TEST_PREFIX = Date.now();
 
-test("create bucket", async (t) => {
+test("create bucket", async () => {
   // Initialize BucketManager
   const bucketManager = new BucketManager(
     process.env.TEST_S3_KEY || process.env.TEST_KEY,
@@ -51,7 +51,7 @@ test("list buckets", async () => {
   );
 });
 
-test("delete bucket", async (t) => {
+test("delete bucket", async () => {
   // Initialize BucketManager
   const bucketManager = new BucketManager(
     process.env.TEST_S3_KEY || process.env.TEST_KEY,
@@ -82,7 +82,7 @@ test("delete bucket", async (t) => {
   assert.equal(typeof deletedBucket, "undefined");
 });
 
-test("set bucket privacy to public", async (t) => {
+test("set bucket privacy to public", async () => {
   // Initialize BucketManager
   const bucketManager = new BucketManager(
     process.env.TEST_S3_KEY || process.env.TEST_KEY,
diff --git a/test/gatewayManager.spec.js b/test/gatewayManager.spec.js
index 62f4398..364bb9c 100644
--- a/test/gatewayManager.spec.js
+++ b/test/gatewayManager.spec.js
@@ -1,6 +1,6 @@
 import { test } from "node:test";
 import assert from "node:assert/strict";
-import { GatewayManager } from "../dist/index.mjs";
+import { GatewayManager } from "../src/index.js";
 
 const TEST_PREFIX = Date.now();
 
diff --git a/test/nameManager.spec.js b/test/nameManager.spec.js
index 42d24d8..c48fe2c 100644
--- a/test/nameManager.spec.js
+++ b/test/nameManager.spec.js
@@ -1,6 +1,6 @@
 import { test } from "node:test";
 import assert from "node:assert/strict";
-import { NameManager } from "../dist/index.mjs";
+import { NameManager } from "../src/index.js";
 
 const TEST_CID = process.env.TEST_NAME_CID,
   TEST_PRIVATE_KEY = process.env.TEST_NAME_PRIVATE_KEY,
diff --git a/test/objectManager.spec.js b/test/objectManager.spec.js
index 6968bd3..6c2bb22 100644
--- a/test/objectManager.spec.js
+++ b/test/objectManager.spec.js
@@ -1,6 +1,6 @@
 import { test } from "node:test";
 import assert from "node:assert/strict";
-import { ObjectManager, BucketManager } from "../dist/index.mjs";
+import { ObjectManager, BucketManager } from "../src/index.js";
 import * as Path from "node:path";
 import { writeFile } from "node:fs/promises";
 import { v4 as uuidv4 } from "uuid";
diff --git a/test/package.json b/test/package.json
new file mode 100644
index 0000000..aead43d
--- /dev/null
+++ b/test/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
\ No newline at end of file
diff --git a/test/pinManager.spec.js b/test/pinManager.spec.js
index 4b48fee..f42c2bb 100644
--- a/test/pinManager.spec.js
+++ b/test/pinManager.spec.js
@@ -5,7 +5,7 @@ import { v4 as uuidv4 } from "uuid";
 import Path from "node:path";
 import os from "node:os";
 import { writeFile } from "node:fs/promises";
-import { BucketManager } from "../dist/index.mjs";
+import { BucketManager } from "../src/index.js";
 
 const TEST_CID_1 = "QmSEu6zGwKgkQA3ZKaDnvkrwre1kkQa7eRFCbQi7waNwTT",
   TEST_CID_2 = "QmNXcMdXadLRTxLpHJMsGnaeKz26d2F6NgUDVWScp54EfC",
diff --git a/tsup.config.js b/tsup.config.js
index 2aa3a3b..0352edd 100644
--- a/tsup.config.js
+++ b/tsup.config.js
@@ -6,6 +6,6 @@ export default defineConfig({
   sourcemap: false,
   noExternal: ['@ipld/car', '@helia/car', '@helia/unixfs', 'blockstore-fs'],
   dts: true,
-  format: ['cjs', 'esm'],
+  format: ['cjs'],
   clean: true,
 })

From 7e2bd5a55cb45b20d0a6fb7395726d8fd02e80d5 Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Fri, 2 Feb 2024 12:34:22 -0600
Subject: [PATCH 04/16] Add CJS tests Change MJS test file names

---
 test/bucketManager.spec.cjs                   | 124 +++++++
 ...Manager.spec.js => bucketManager.spec.mjs} |   0
 test/gatewayManager.spec.cjs                  | 125 +++++++
 ...anager.spec.js => gatewayManager.spec.mjs} |   0
 test/nameManager.spec.cjs                     | 141 ++++++++
 ...meManager.spec.js => nameManager.spec.mjs} |   0
 test/objectManager.spec.cjs                   | 325 ++++++++++++++++++
 ...Manager.spec.js => objectManager.spec.mjs} |   0
 test/package.json                             |   3 -
 test/pinManager.spec.cjs                      | 262 ++++++++++++++
 ...pinManager.spec.js => pinManager.spec.mjs} |   0
 11 files changed, 977 insertions(+), 3 deletions(-)
 create mode 100644 test/bucketManager.spec.cjs
 rename test/{bucketManager.spec.js => bucketManager.spec.mjs} (100%)
 create mode 100644 test/gatewayManager.spec.cjs
 rename test/{gatewayManager.spec.js => gatewayManager.spec.mjs} (100%)
 create mode 100644 test/nameManager.spec.cjs
 rename test/{nameManager.spec.js => nameManager.spec.mjs} (100%)
 create mode 100644 test/objectManager.spec.cjs
 rename test/{objectManager.spec.js => objectManager.spec.mjs} (100%)
 delete mode 100644 test/package.json
 create mode 100644 test/pinManager.spec.cjs
 rename test/{pinManager.spec.js => pinManager.spec.mjs} (100%)

diff --git a/test/bucketManager.spec.cjs b/test/bucketManager.spec.cjs
new file mode 100644
index 0000000..47018a8
--- /dev/null
+++ b/test/bucketManager.spec.cjs
@@ -0,0 +1,124 @@
+const test = require("node:test");
+const assert = require("node:assert/strict");
+const { BucketManager } = require("../dist/index.js");
+
+const TEST_PREFIX = Date.now();
+
+test("create bucket", async () => {
+  // Initialize BucketManager
+  const bucketManager = new BucketManager(
+    process.env.TEST_S3_KEY || process.env.TEST_KEY,
+    process.env.TEST_S3_SECRET || process.env.TEST_SECRET,
+  );
+
+  // Create bucket `create-bucket-test-pass`
+  const bucketNameToCreate = `${TEST_PREFIX}-create-bucket-test-pass`;
+  await bucketManager.create(bucketNameToCreate);
+
+  try {
+    // List buckets
+    const currentBuckets = await bucketManager.list(),
+      createdBucket = currentBuckets.find((currentBucket) => {
+        return currentBucket.Name === bucketNameToCreate;
+      });
+
+    // Assert new bucket exists
+    assert.equal(createdBucket.Name, bucketNameToCreate);
+  } finally {
+    // Delete new bucket
+    await bucketManager.delete(bucketNameToCreate);
+  }
+});
+
+test("list buckets", async () => {
+  const testBucketName = `${TEST_PREFIX}-list-bucket-test-pass`,
+    bucketManager = new BucketManager(
+      process.env.TEST_S3_KEY || process.env.TEST_KEY,
+      process.env.TEST_S3_SECRET || process.env.TEST_SECRET,
+    ),
+    initialBucketsList = await bucketManager.list(),
+    countToCreate = 3;
+  for (let i = 0; i < countToCreate; i++) {
+    await bucketManager.create(`${testBucketName}-${i}`);
+  }
+  const bucketsList = await bucketManager.list();
+  for (let i = 0; i < countToCreate; i++) {
+    await bucketManager.delete(`${testBucketName}-${i}`);
+  }
+  assert.strictEqual(
+    bucketsList.length,
+    initialBucketsList.length + countToCreate,
+  );
+});
+
+test("delete bucket", async () => {
+  // Initialize BucketManager
+  const bucketManager = new BucketManager(
+    process.env.TEST_S3_KEY || process.env.TEST_KEY,
+    process.env.TEST_S3_SECRET || process.env.TEST_SECRET,
+  );
+
+  // Create bucket `delete-bucket-test-pass`
+  const bucketNameToCreate = `${TEST_PREFIX}-delete-bucket-test-pass`;
+  await bucketManager.create(bucketNameToCreate);
+
+  // List buckets and assert new bucket exists
+  const currentBuckets = await bucketManager.list(),
+    createdBucket = currentBuckets.find((currentBucket) => {
+      return currentBucket.Name === bucketNameToCreate;
+    });
+  if (typeof createdBucket === "undefined") {
+    throw new Error(`Unable to create test bucket [delete-bucket-test-pass]`);
+  }
+
+  // Delete new bucket
+  await bucketManager.delete(bucketNameToCreate);
+
+  // List buckets and assert new bucket does not exist
+  const updatedBuckets = await bucketManager.list(),
+    deletedBucket = updatedBuckets.find((updatedBucket) => {
+      return updatedBucket.Name === bucketNameToCreate;
+    });
+  assert.equal(typeof deletedBucket, "undefined");
+});
+
+test("set bucket privacy to public", async () => {
+  // Initialize BucketManager
+  const bucketManager = new BucketManager(
+    process.env.TEST_S3_KEY || process.env.TEST_KEY,
+    process.env.TEST_S3_SECRET || process.env.TEST_SECRET,
+  );
+
+  // Create bucket `toggle-bucket-test-pass`
+  const bucketNameToCreate = `${TEST_PREFIX}-toggle-bucket-test-pass`;
+  await bucketManager.create(bucketNameToCreate);
+
+  try {
+    // List buckets
+    const currentBuckets = await bucketManager.list(),
+      createdBucket = currentBuckets.find((currentBucket) => {
+        return currentBucket.Name === bucketNameToCreate;
+      });
+
+    // Check Privacy
+    const initialPrivacy = await bucketManager.getPrivacy(bucketNameToCreate);
+    if (initialPrivacy === false) {
+      throw new Error(`Unexpected Privacy State on Bucket`);
+    }
+
+    // Toggle Privacy
+    await bucketManager.setPrivacy(bucketNameToCreate, false);
+
+    // Check Privacy
+    const updatedPrivacy = await bucketManager.getPrivacy(bucketNameToCreate);
+    if (updatedPrivacy === true) {
+      throw new Error(`Unexpected Privacy State on Bucket`);
+    }
+
+    // Assert new bucket exists
+    assert.equal(createdBucket.Name, bucketNameToCreate);
+  } finally {
+    // Delete new bucket
+    await bucketManager.delete(bucketNameToCreate);
+  }
+});
diff --git a/test/bucketManager.spec.js b/test/bucketManager.spec.mjs
similarity index 100%
rename from test/bucketManager.spec.js
rename to test/bucketManager.spec.mjs
diff --git a/test/gatewayManager.spec.cjs b/test/gatewayManager.spec.cjs
new file mode 100644
index 0000000..98237e3
--- /dev/null
+++ b/test/gatewayManager.spec.cjs
@@ -0,0 +1,125 @@
+const test = require("node:test");
+const assert = require("node:assert/strict");
+const { GatewayManager } = require("../dist/index.js");
+
+const TEST_PREFIX = Date.now();
+
+test("delete gateway", async () => {
+  const testGatewayName = `${TEST_PREFIX}-delete-gateway-test-pass`,
+    gatewayManager = new GatewayManager(
+      process.env.TEST_GW_KEY || process.env.TEST_KEY,
+      process.env.TEST_GW_SECRET || process.env.TEST_SECRET,
+    );
+  await gatewayManager.create(testGatewayName);
+  await gatewayManager.delete(testGatewayName);
+  const deletedName = await gatewayManager.get(testGatewayName);
+  assert.strictEqual(deletedName, false);
+});
+test("create gateway", async () => {
+  const testGatewayName = `${TEST_PREFIX}-create-gateway-test-pass`,
+    gatewayManager = new GatewayManager(
+      process.env.TEST_GW_KEY || process.env.TEST_KEY,
+      process.env.TEST_GW_SECRET || process.env.TEST_SECRET,
+    ),
+    createdName = await gatewayManager.create(testGatewayName);
+  await gatewayManager.delete(testGatewayName);
+  assert.strictEqual(createdName.name, testGatewayName);
+});
+
+test("update gateway", async () => {
+  const testGatewayName = `${TEST_PREFIX}-update-gateway-test-pass`,
+    gatewayManager = new GatewayManager(
+      process.env.TEST_GW_KEY || process.env.TEST_KEY,
+      process.env.TEST_GW_SECRET || process.env.TEST_SECRET,
+    ),
+    createdName = await gatewayManager.create(testGatewayName);
+  try {
+    const updatedName = await gatewayManager.update(createdName.name, {
+      private: true,
+      enabled: false,
+    });
+    assert.strictEqual(updatedName, true);
+  } finally {
+    await gatewayManager.delete(testGatewayName);
+  }
+});
+
+test("get gateway", async () => {
+  const testGatewayName = `${TEST_PREFIX}-get-gateway-test-pass`,
+    gatewayManager = new GatewayManager(
+      process.env.TEST_GW_KEY || process.env.TEST_KEY,
+      process.env.TEST_GW_SECRET || process.env.TEST_SECRET,
+    ),
+    createdName = await gatewayManager.create(testGatewayName, {});
+  try {
+    const testName = await gatewayManager.get(createdName.name);
+    assert.strictEqual(testName.name, testGatewayName);
+  } finally {
+    await gatewayManager.delete(testGatewayName);
+  }
+});
+
+test("list gateways", async () => {
+  const testGatewayName = `${TEST_PREFIX}-list-names-test-pass`,
+    gatewayManager = new GatewayManager(
+      process.env.TEST_GW_KEY || process.env.TEST_KEY,
+      process.env.TEST_GW_SECRET || process.env.TEST_SECRET,
+    ),
+    initialGatewaysList = await gatewayManager.list(),
+    countToCreate = 3;
+  for (let i = 0; i < countToCreate; i++) {
+    await gatewayManager.create(`${testGatewayName}-${i}`);
+  }
+  const gatewaysList = await gatewayManager.list();
+  for (let i = 0; i < countToCreate; i++) {
+    await gatewayManager.delete(`${testGatewayName}-${i}`);
+  }
+  assert.strictEqual(
+    gatewaysList.length,
+    initialGatewaysList.length + countToCreate,
+  );
+});
+
+test("toggle gateway off", async () => {
+  const testGatewayName = `${TEST_PREFIX}-toggle-gateway-test-pass`,
+    gatewayManager = new GatewayManager(
+      process.env.TEST_GW_KEY || process.env.TEST_KEY,
+      process.env.TEST_GW_SECRET || process.env.TEST_SECRET,
+    );
+  await gatewayManager.create(testGatewayName);
+  try {
+    const resolvedName = await gatewayManager.get(testGatewayName);
+    if (resolvedName?.enabled === false) {
+      throw new Error(`Incorrect State on Resolved Name`);
+    }
+    await gatewayManager.toggle(testGatewayName, false);
+    const updatedName = await gatewayManager.get(testGatewayName);
+    assert.strictEqual(updatedName.name, testGatewayName);
+    assert.strictEqual(updatedName.enabled, false);
+  } finally {
+    await gatewayManager.delete(testGatewayName);
+  }
+});
+
+test("toggle gateway on", async () => {
+  const testGatewayName = `${TEST_PREFIX}-toggle-gateway-test-pass`,
+    gatewayManager = new GatewayManager(
+      process.env.TEST_GW_KEY || process.env.TEST_KEY,
+      process.env.TEST_GW_SECRET || process.env.TEST_SECRET,
+    );
+  await gatewayManager.create(testGatewayName, {
+    enabled: false,
+  });
+  try {
+    const resolvedName = await gatewayManager.get(testGatewayName);
+    if (resolvedName?.enabled === true) {
+      throw new Error(`Incorrect State on Resolved Name`);
+    }
+    await gatewayManager.toggle(testGatewayName, true);
+    const updatedName = await gatewayManager.get(testGatewayName);
+    assert.strictEqual(updatedName.name, testGatewayName);
+    assert.strictEqual(updatedName.enabled, true);
+  } finally {
+    await gatewayManager.delete(testGatewayName);
+  }
+});
diff --git a/test/gatewayManager.spec.js b/test/gatewayManager.spec.mjs
similarity index 100%
rename from test/gatewayManager.spec.js
rename to test/gatewayManager.spec.mjs
diff --git a/test/nameManager.spec.cjs b/test/nameManager.spec.cjs
new file mode 100644
index 0000000..283f7fd
--- /dev/null
+++ b/test/nameManager.spec.cjs
@@ -0,0 +1,141 @@
+const test = require("node:test");
+const assert = require("node:assert/strict");
+const { NameManager } = require("../dist/index.js");
+
+const TEST_CID = process.env.TEST_NAME_CID,
+  TEST_PRIVATE_KEY = process.env.TEST_NAME_PRIVATE_KEY,
+  TEST_PREFIX = Date.now();
+
+test("delete name", async () => {
+  const testNameLabel = `${TEST_PREFIX}-delete-name-test-pass`,
+    nameManager = new NameManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+    );
+  await nameManager.create(testNameLabel, TEST_CID);
+  await nameManager.delete(testNameLabel);
+  const deletedName = await nameManager.get(testNameLabel);
+  assert.strictEqual(deletedName, false);
+});
+test("create name", async () => {
+  const testNameLabel = `${TEST_PREFIX}-create-name-test-pass`,
+    nameManager = new NameManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+    ),
+    createdName = await nameManager.create(testNameLabel, TEST_CID);
+  await nameManager.delete(testNameLabel);
+  assert.strictEqual(createdName.label, testNameLabel);
+  assert.strictEqual(createdName.cid, TEST_CID);
+});
+
+test("import name", async () => {
+  const testNameLabel = `${TEST_PREFIX}-import-name-test-pass`,
+    nameManager = new NameManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+    ),
+    importedName = await nameManager.import(
+      testNameLabel,
+      TEST_CID,
+      TEST_PRIVATE_KEY,
+    );
+  await nameManager.delete(testNameLabel);
+  assert.strictEqual(importedName.label, testNameLabel);
+  assert.strictEqual(importedName.cid, TEST_CID);
+});
+
+test("update name", async () => {
+  const testNameLabel = `${TEST_PREFIX}-update-name-test-pass`,
+    nameManager = new NameManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+    ),
+    createdName = await nameManager.create(testNameLabel, TEST_CID);
+  try {
+    const updatedName = await nameManager.update(createdName.label, TEST_CID);
+    assert.strictEqual(updatedName, true);
+  } finally {
+    await nameManager.delete(testNameLabel);
+  }
+});
+
+test("get name", async () => {
+  const testNameLabel = `${TEST_PREFIX}-get-name-test-pass`,
+    nameManager = new NameManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+    ),
+    createdName = await nameManager.create(testNameLabel, TEST_CID);
+  try {
+    const testName = await nameManager.get(createdName.label);
+    assert.strictEqual(testName.label, testNameLabel);
+    assert.strictEqual(testName.cid, TEST_CID);
+  } finally {
+    await nameManager.delete(testNameLabel);
+  }
+});
+
+test("list names", async () => {
+  const testNameLabel = `${TEST_PREFIX}-list-names-test-pass`,
+    nameManager = new NameManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+    ),
+    initialNamesList = await nameManager.list(),
+    countToCreate = 3;
+  for (let i = 0; i < countToCreate; i++) {
+    await nameManager.create(`${testNameLabel}-${i}`, TEST_CID);
+  }
+  const namesList = await nameManager.list();
+  for (let i = 0; i < countToCreate; i++) {
+    await nameManager.delete(`${testNameLabel}-${i}`);
+  }
+  assert.strictEqual(namesList.length, initialNamesList.length + countToCreate);
+});
+
+test("toggle name on", async () => {
+  const testNameLabel = `${TEST_PREFIX}-toggle-name-test-pass`,
+    nameManager = new NameManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+    );
+  await nameManager.create(testNameLabel, TEST_CID, {
+    enabled: false,
+  });
+  try {
+    const resolvedName = await nameManager.get(testNameLabel);
+    if (resolvedName?.enabled === true) {
+      throw new Error(`Incorrect State on Resolved Name`);
+    }
+    await nameManager.toggle(testNameLabel, true);
+    const updatedName = await nameManager.get(testNameLabel);
+    assert.strictEqual(updatedName.label, testNameLabel);
+    assert.strictEqual(updatedName.cid, TEST_CID);
+    assert.strictEqual(updatedName.enabled, true);
+  } finally {
+    await nameManager.delete(testNameLabel);
+  }
+});
+
+test("toggle name off", async () => {
+  const testNameLabel = `${TEST_PREFIX}-toggle-name-test-pass`,
+    nameManager = new NameManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+    );
+  await nameManager.create(testNameLabel, TEST_CID);
+  try {
+    const resolvedName = await nameManager.get(testNameLabel);
+    if (resolvedName?.enabled === false) {
+      throw new Error(`Incorrect State on Resolved Name`);
+    }
+    await nameManager.toggle(testNameLabel, false);
+    const updatedName = await nameManager.get(testNameLabel);
+    assert.strictEqual(updatedName.label, testNameLabel);
+    assert.strictEqual(updatedName.cid, TEST_CID);
+    assert.strictEqual(updatedName.enabled, false);
+  } finally {
+    await nameManager.delete(testNameLabel);
+  }
+});
diff --git a/test/nameManager.spec.js b/test/nameManager.spec.mjs
similarity index 100%
rename from test/nameManager.spec.js
rename to test/nameManager.spec.mjs
diff --git a/test/objectManager.spec.cjs b/test/objectManager.spec.cjs
new file mode 100644
index 0000000..c78c610
--- /dev/null
+++ b/test/objectManager.spec.cjs
@@ -0,0 +1,325 @@
+const test = require("node:test");
+const assert = require("node:assert/strict");
+const { ObjectManager, BucketManager } = require("../dist/index.js");
+const Path = require("node:path");
+const { writeFile } = require("node:fs/promises");
+const { v4: uuidv4 } = require("node:uuid");
+const os = require("node:os");
+
+const TEST_PREFIX = Date.now();
+
+async function createBucket(name) {
+  // Initialize BucketManager
+  const bucketManager = new BucketManager(
+    process.env.TEST_S3_KEY || process.env.TEST_KEY,
+    process.env.TEST_S3_SECRET || process.env.TEST_SECRET,
+  );
+
+  // Create bucket with name
+  const bucketNameToCreate = name;
+  await bucketManager.create(bucketNameToCreate);
+
+  // List buckets and assert new bucket exists
+  const currentBuckets = await bucketManager.list(),
+    createdBucket = currentBuckets.find((currentBucket) => {
+      return currentBucket.Name === bucketNameToCreate;
+    });
+
+  return typeof createdBucket !== "undefined";
+}
+
+async function uploadObject(bucket, key, body) {
+  // Initialize ObjectManager
+  const objectManager = new ObjectManager(
+    process.env.TEST_S3_KEY || process.env.TEST_KEY,
+    process.env.TEST_S3_SECRET || process.env.TEST_SECRET,
+    { bucket },
+  );
+
+  // Upload Object
+  await objectManager.upload(key, body);
+
+  // Confirm Object Uploaded
+  const uploadedObject = await objectManager.get(key);
+
+  return typeof uploadedObject !== "undefined";
+}
+
+async function deleteObject(bucket, key) {
+  // Initialize ObjectManager
+  const objectManager = new ObjectManager(
+    process.env.TEST_S3_KEY || process.env.TEST_KEY,
+    process.env.TEST_S3_SECRET || process.env.TEST_SECRET,
+    { bucket },
+  );
+
+  // Delete Object
+  await objectManager.delete(key);
+  return true;
+}
+
+async function deleteBucket(bucket) {
+  // Initialize BucketManager
+  const bucketManager = new BucketManager(
+    process.env.TEST_S3_KEY || process.env.TEST_KEY,
+    process.env.TEST_S3_SECRET || process.env.TEST_SECRET,
+  );
+
+  // Delete Bucket
+  await bucketManager.delete(bucket);
+  return true;
+}
+
+test("delete object", async () => {
+  // Create bucket `delete-object-test-pass`
+  const deleteTestBucket = `${TEST_PREFIX}-delete-object-test-pass`;
+  await createBucket(deleteTestBucket);
+
+  try {
+    // Upload object `delete-object-test`
+    const objectNameToCreate = `delete-object-test`;
+    const uploaded = await uploadObject(
+      deleteTestBucket,
+      objectNameToCreate,
+      Buffer.from("delete object", "utf-8"),
+    );
+    if (uploaded === false) {
+      throw Error(`Failed to create object [delete-object-test]`);
+    }
+
+    // Initialize ObjectManager
+    const objectManager = new ObjectManager(
+      process.env.TEST_S3_KEY || process.env.TEST_KEY,
+      process.env.TEST_S3_SECRET || process.env.TEST_SECRET,
+      { bucket: deleteTestBucket },
+    );
+
+    // Delete object `delete-object-test`
+    await objectManager.delete(objectNameToCreate);
+
+    // List bucket and assert new object doesn't exist
+    const uploadedObject = await objectManager.get(objectNameToCreate);
+    assert.equal(uploadedObject, false);
+  } finally {
+    await deleteBucket(deleteTestBucket);
+  }
+});
+
+test("upload object", async () => {
+  // Create Bucket `create-object-test-pass
+  const uploadTestBucket = `${TEST_PREFIX}-create-object-test-pass`;
+  await createBucket(uploadTestBucket);
+
+  try {
+    // Upload object `create-object-test`
+    const uploaded = await uploadObject(
+      uploadTestBucket,
+      `create-object-test`,
+      Buffer.from("upload object", "utf-8"),
+    );
+
+    assert.strictEqual(uploaded, true);
+    await deleteObject(uploadTestBucket, `create-object-test`);
+  } finally {
+    await deleteBucket(uploadTestBucket);
+  }
+});
+
+test("upload directory", async () => {
+  // Create Bucket `create-object-test-pass
+  const uploadDirectoryTestBucket = `${TEST_PREFIX}-create-directory-test-pass`;
+  await createBucket(uploadDirectoryTestBucket);
+
+  try {
+    // Upload object `create-object-test`
+    const uploaded = await uploadObject(
+      uploadDirectoryTestBucket,
+      `create-directory-test`,
+      [
+        {
+          path: "/testObjects/1.txt",
+          content: Buffer.from("upload test object", "utf-8"),
+        },
+        {
+          path: "/testObjects/deep/1.txt",
+          content: Buffer.from("upload deep test object", "utf-8"),
+        },
+        {
+          path: "/topLevel.txt",
+          content: Buffer.from("upload top level test object", "utf-8"),
+        },
+      ],
+    );
+    assert.strictEqual(uploaded, true);
+    await deleteObject(uploadDirectoryTestBucket, `create-directory-test`);
+  } finally {
+    await deleteBucket(uploadDirectoryTestBucket);
+  }
+});
+
+test("download object", async () => {
+  // Create bucket `download-object-test-pass`
+  const downloadTestBucket = `${TEST_PREFIX}-download-object-test-pass`;
+  await createBucket(downloadTestBucket);
+
+  try {
+    // Upload object `download-object-test`
+    const objectNameToCreate = `download-object-test`;
+    const uploaded = await uploadObject(
+      downloadTestBucket,
+      objectNameToCreate,
+      Buffer.from("download object", "utf-8"),
+    );
+    if (uploaded === false) {
+      throw Error(`Failed to create object [download-object-test]`);
+    }
+
+    try {
+      // Download object `download-object-test` and assert it completes
+      const objectManager = new ObjectManager(
+        process.env.TEST_S3_KEY || process.env.TEST_KEY,
+        process.env.TEST_S3_SECRET || process.env.TEST_SECRET,
+        { bucket: downloadTestBucket },
+      );
+      const downloadStream = await objectManager.download(objectNameToCreate),
+        downloadFilename = uuidv4(),
+        downloadPath = Path.resolve(os.tmpdir(), downloadFilename),
+        writeFileResult = await writeFile(downloadPath, downloadStream);
+      assert.strictEqual(typeof writeFileResult, "undefined");
+    } finally {
+      await deleteObject(downloadTestBucket, objectNameToCreate);
+    }
+  } finally {
+    await deleteBucket(downloadTestBucket);
+  }
+});
+
+test("download object using gateway", async () => {
+  // Create bucket `download-object-test-pass`
+  const downloadTestBucket = `${TEST_PREFIX}-download-object-test-pass`;
+  await createBucket(downloadTestBucket);
+
+  try {
+    // Upload object `download-object-test`
+    const objectNameToCreate = `download-object-test`;
+    const uploaded = await uploadObject(
+      downloadTestBucket,
+      objectNameToCreate,
+      Buffer.from("download object", "utf-8"),
+    );
+    if (uploaded === false) {
+      throw Error(`Failed to create object [download-object-test]`);
+    }
+
+    try {
+      // Download object `download-object-test` and assert it completes
+      const objectManager = new ObjectManager(
+        process.env.TEST_S3_KEY || process.env.TEST_KEY,
+        process.env.TEST_S3_SECRET || process.env.TEST_SECRET,
+        {
+          bucket: downloadTestBucket,
+          gateway: { endpoint: process.env.TEST_IPFS_GATEWAY },
+        },
+      );
+      const downloadStream = await objectManager.download(objectNameToCreate),
+        downloadFilename = uuidv4(),
+        downloadPath = Path.resolve(os.tmpdir(), downloadFilename),
+        writeFileResult = await writeFile(downloadPath, downloadStream);
+      assert.strictEqual(typeof writeFileResult, "undefined");
+    } finally {
+      await deleteObject(downloadTestBucket, objectNameToCreate);
+    }
+  } finally {
+    await deleteBucket(downloadTestBucket);
+  }
+});
+
+test("list objects", async () => {
+  // Create bucket `list-objects-test-pass`
+  const listTestBucket = `${TEST_PREFIX}-list-objects-test-pass`;
+  await createBucket(listTestBucket);
+
+  try {
+    let createdObjectCount = 0;
+    while (createdObjectCount < 26) {
+      // Upload objects `list-object-test-[x]`
+      const objectNameToCreate = `list-object-test-${createdObjectCount}`;
+      await uploadObject(
+        listTestBucket,
+        objectNameToCreate,
+        Buffer.from(`list objects ${createdObjectCount}`, "utf-8"),
+      );
+      createdObjectCount++;
+    }
+
+    const objectManager = new ObjectManager(
+      process.env.TEST_S3_KEY || process.env.TEST_KEY,
+      process.env.TEST_S3_SECRET || process.env.TEST_SECRET,
+      { bucket: listTestBucket },
+    );
+
+    const objectList = await objectManager.list({
+      MaxKeys: 50,
+      Prefix: `list-object-test-`,
+    });
+    assert.equal(objectList.Contents.length, 26);
+
+    let deletedObjectCount = 0;
+    while (deletedObjectCount < 26) {
+      // Delete objects `list-object-test-[x]`
+      const objectNameToDelete = `list-object-test-${deletedObjectCount}`;
+      await deleteObject(listTestBucket, objectNameToDelete);
+      deletedObjectCount++;
+    }
+  } finally {
+    await deleteBucket(listTestBucket);
+  }
+});
+
+test("copy object", async () => {
+  // Create bucket `copy-object-test-pass-src`
+  const bucketSrc = `${TEST_PREFIX}-copy-object-test-pass-src`;
+  await createBucket(bucketSrc);
+
+  try {
+    // Upload object `copy-object-test`
+    const objectNameToCreateSrc = `copy-object-test`;
+    const uploaded = await uploadObject(
+      bucketSrc,
+      objectNameToCreateSrc,
+      Buffer.from("copy object", "utf-8"),
+    );
+    try {
+      assert.equal(uploaded, true);
+
+      // Create bucket `copy-object-test-pass-dest`
+      const bucketDest = `${TEST_PREFIX}-copy-object-test-pass-dest`;
+      await createBucket(bucketDest);
+
+      try {
+        // Initialize ObjectManager
+        const objectManager = new ObjectManager(
+          process.env.TEST_S3_KEY || process.env.TEST_KEY,
+          process.env.TEST_S3_SECRET || process.env.TEST_SECRET,
+          { bucket: bucketSrc },
+        );
+
+        // Copy object `copy-object-test` from `copy-object-test-pass-src` to `copy-object-test-pass-dest`
+        await objectManager.copy(objectNameToCreateSrc, bucketDest);
+        try {
+          // List bucket and assert new object exists
+          const copiedObject = await objectManager.get(objectNameToCreateSrc);
+          assert.equal(copiedObject.ETag, '"8605273d870f50fde0d8fbcad4a8f702"');
+        } finally {
+          await deleteObject(bucketDest, objectNameToCreateSrc);
+        }
+      } finally {
+        await deleteBucket(bucketDest);
+      }
+    } finally {
+      await deleteObject(bucketSrc, objectNameToCreateSrc);
+    }
+  } finally {
+    await deleteBucket(bucketSrc);
+  }
+});
diff --git a/test/objectManager.spec.js b/test/objectManager.spec.mjs
similarity index 100%
rename from test/objectManager.spec.js
rename to test/objectManager.spec.mjs
diff --git a/test/package.json b/test/package.json
deleted file mode 100644
index aead43d..0000000
--- a/test/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
\ No newline at end of file
diff --git a/test/pinManager.spec.cjs b/test/pinManager.spec.cjs
new file mode 100644
index 0000000..bd45429
--- /dev/null
+++ b/test/pinManager.spec.cjs
@@ -0,0 +1,262 @@
+const test = require("node:test");
+const assert = require("node:assert/strict");
+const { BucketManager, PinManager } = require("../dist/index.js");
+const Path = require("node:path");
+const { writeFile } = require("node:fs/promises");
+const { v4: uuidv4 } = require("node:uuid");
+const os = require("node:os");
+
+const TEST_CID_1 = "QmSEu6zGwKgkQA3ZKaDnvkrwre1kkQa7eRFCbQi7waNwTT",
+  TEST_CID_2 = "QmNXcMdXadLRTxLpHJMsGnaeKz26d2F6NgUDVWScp54EfC",
+  TEST_PREFIX = Date.now();
+
+async function createBucket(name) {
+  // Initialize BucketManager
+  const bucketManager = new BucketManager(
+    process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+    process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+  );
+
+  // Create bucket with name
+  const bucketNameToCreate = name;
+  await bucketManager.create(bucketNameToCreate);
+
+  // List buckets and assert new bucket exists
+  const currentBuckets = await bucketManager.list(),
+    createdBucket = currentBuckets.find((currentBucket) => {
+      return currentBucket.Name === bucketNameToCreate;
+    });
+
+  return typeof createdBucket !== "undefined";
+}
+
+async function deleteBucket(bucket) {
+  // Initialize BucketManager
+  const bucketManager = new BucketManager(
+    process.env.TEST_S3_KEY || process.env.TEST_KEY,
+    process.env.TEST_S3_SECRET || process.env.TEST_SECRET,
+  );
+
+  // Delete Bucket
+  await bucketManager.delete(bucket);
+  return true;
+}
+
+test("create pin", async () => {
+  const testBucketName = `${TEST_PREFIX}-create-pin-test-pass`,
+    testPinName = `${TEST_PREFIX}-create-pin-test-pass`,
+    pinManager = new PinManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+      {
+        bucket: testBucketName,
+      },
+    );
+  await createBucket(testBucketName);
+  try {
+    const createdPin = await pinManager.create(testPinName, TEST_CID_1);
+    assert.strictEqual(createdPin.pin.cid, TEST_CID_1);
+    await pinManager.delete(createdPin.requestid);
+  } finally {
+    await deleteBucket(testBucketName);
+  }
+});
+
+test("replace pin with name", async () => {
+  const testBucketName = `${TEST_PREFIX}-replname-pin-test-pass`,
+    testPinName = `${TEST_PREFIX}-replace-pin-test-pass`,
+    pinManager = new PinManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+      {
+        bucket: testBucketName,
+      },
+    );
+  await createBucket(testBucketName);
+  try {
+    const createdPin = await pinManager.create(testPinName, TEST_CID_1);
+    assert.strictEqual(createdPin.pin.cid, TEST_CID_1);
+    const replacedPin = await pinManager.replace(
+      createdPin.requestid,
+      TEST_CID_2,
+      {
+        name: `${testPinName}-replaced`,
+      },
+    );
+    assert.strictEqual(replacedPin.pin.cid, TEST_CID_2);
+    assert.strictEqual(replacedPin.pin.name, `${testPinName}-replaced`);
+    await pinManager.delete(replacedPin.requestid);
+  } finally {
+    await deleteBucket(testBucketName);
+  }
+});
+
+test("replace pin without name", async () => {
+  const testBucketName = `${TEST_PREFIX}-replace-pin-test-pass`,
+    testPinName = `${TEST_PREFIX}-replace-pin-test-pass`,
+    pinManager = new PinManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+      {
+        bucket: testBucketName,
+      },
+    );
+  await createBucket(testBucketName);
+  try {
+    const createdPin = await pinManager.create(testPinName, TEST_CID_1);
+    assert.strictEqual(createdPin.pin.cid, TEST_CID_1);
+    const replacedPin = await pinManager.replace(
+      createdPin.requestid,
+      TEST_CID_2,
+    );
+    assert.strictEqual(replacedPin.pin.name, testPinName);
+    assert.strictEqual(replacedPin.pin.cid, TEST_CID_2);
+    await pinManager.delete(replacedPin.requestid);
+  } finally {
+    await deleteBucket(testBucketName);
+  }
+});
+
+test("get pin", async () => {
+  const testBucketName = `${TEST_PREFIX}-get-pin-test-pass`,
+    testPinName = `${TEST_PREFIX}-get-pin-test-pass`,
+    pinManager = new PinManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+      {
+        bucket: testBucketName,
+      },
+    );
+  await createBucket(testBucketName);
+  try {
+    const createdPin = await pinManager.create(testPinName, TEST_CID_1);
+    assert.strictEqual(createdPin.pin.cid, TEST_CID_1);
+    try {
+      const queriedPin = await pinManager.get(createdPin.requestid);
+      assert.strictEqual(queriedPin.requestid, createdPin.requestid);
+      assert.strictEqual(queriedPin.pin.cid, TEST_CID_1);
+    } finally {
+      await pinManager.delete(createdPin.requestid);
+    }
+  } finally {
+    await deleteBucket(testBucketName);
+  }
+});
+
+test("download pin", async () => {
+  const testBucketName = `${TEST_PREFIX}-download-pin-test-pass`,
+    testPinName = `${TEST_PREFIX}-download-pin-test-pass`,
+    pinManager = new PinManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+      {
+        bucket: testBucketName,
+        gateway: {
+          endpoint: process.env.TEST_IPFS_GATEWAY,
+        },
+      },
+    );
+  await createBucket(testBucketName);
+  try {
+    const createdPin = await pinManager.create(testPinName, TEST_CID_1);
+    try {
+      assert.strictEqual(createdPin.pin.cid, TEST_CID_1);
+      const downloadStream = await pinManager.download(createdPin.pin.cid),
+        downloadFilename = uuidv4(),
+        downloadPath = Path.resolve(os.tmpdir(), downloadFilename),
+        writeFileResult = await writeFile(downloadPath, downloadStream);
+      assert.strictEqual(typeof writeFileResult, "undefined");
+    } finally {
+      await pinManager.delete(createdPin.requestid);
+    }
+  } finally {
+    await deleteBucket(testBucketName);
+  }
+});
+
+test("download pin by reference", async () => {
+  const testBucketName = `${TEST_PREFIX}-download-ref-test-pass`,
+    testPinName = `${TEST_PREFIX}-download-pin-test-pass`,
+    pinManager = new PinManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+      {
+        bucket: testBucketName,
+        gateway: {
+          endpoint: process.env.TEST_IPFS_GATEWAY,
+        },
+      },
+    );
+  await createBucket(testBucketName);
+  try {
+    const createdPin = await pinManager.create(testPinName, TEST_CID_1);
+    try {
+      assert.strictEqual(createdPin.pin.cid, TEST_CID_1);
+      const pinToDownload = await pinManager.get(createdPin.requestid),
+        downloadStream = await pinToDownload.download(),
+        downloadFilename = uuidv4(),
+        downloadPath = Path.resolve(os.tmpdir(), downloadFilename),
+        writeFileResult = await writeFile(downloadPath, downloadStream);
+      assert.strictEqual(typeof writeFileResult, "undefined");
+    } finally {
+      await pinManager.delete(createdPin.requestid);
+    }
+  } finally {
+    await deleteBucket(testBucketName);
+  }
+});
+
+test("list pins", async () => {
+  const testBucketName = `${TEST_PREFIX}-list-pin-test-pass`,
+    testPinName = `${TEST_PREFIX}-list-pin-test-pass`,
+    pinManager = new PinManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+      {
+        bucket: testBucketName,
+      },
+    );
+  await createBucket(testBucketName);
+  try {
+    const existingPinList = await pinManager.list(),
+      countToCreate = 25;
+    let createdPins = [];
+    for (let i = 0; i < countToCreate; i++) {
+      createdPins.push(
+        await pinManager.create(`${testPinName}_${i}`, TEST_CID_1),
+      );
+    }
+    try {
+      const pinList = await pinManager.list();
+      assert.strictEqual(pinList.count, existingPinList.count + countToCreate);
+    } finally {
+      for (const createdPin of createdPins) {
+        await pinManager.delete(createdPin.requestid);
+      }
+    }
+  } finally {
+    await deleteBucket(testBucketName);
+  }
+});
+
+test("delete pin", async () => {
+  const testBucketName = `${TEST_PREFIX}-delete-pin-test-pass`,
+    testPinName = `${TEST_PREFIX}-delete-pin-test-pass`,
+    pinManager = new PinManager(
+      process.env.TEST_NAME_KEY || process.env.TEST_KEY,
+      process.env.TEST_NAME_SECRET || process.env.TEST_SECRET,
+      {
+        bucket: testBucketName,
+      },
+    );
+  await createBucket(testBucketName);
+  try {
+    const createdPin = await pinManager.create(testPinName, TEST_CID_1);
+    assert.strictEqual(createdPin.pin.cid, TEST_CID_1);
+    await pinManager.delete(createdPin.requestid);
+    const deletedPin = await pinManager.get(createdPin.requestid);
+    assert.strictEqual(deletedPin, false);
+  } finally {
+    await deleteBucket(testBucketName);
+  }
+});
diff --git a/test/pinManager.spec.js b/test/pinManager.spec.mjs
similarity index 100%
rename from test/pinManager.spec.js
rename to test/pinManager.spec.mjs

From d31e432a1e28696f86191ebeb7f4c3f7badba271 Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Fri, 2 Feb 2024 12:35:58 -0600
Subject: [PATCH 05/16] Fix CJS test

---
 test/objectManager.spec.cjs | 2 +-
 test/pinManager.spec.cjs    | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/test/objectManager.spec.cjs b/test/objectManager.spec.cjs
index c78c610..e87b98b 100644
--- a/test/objectManager.spec.cjs
+++ b/test/objectManager.spec.cjs
@@ -3,7 +3,7 @@ const assert = require("node:assert/strict");
 const { ObjectManager, BucketManager } = require("../dist/index.js");
 const Path = require("node:path");
 const { writeFile } = require("node:fs/promises");
-const { v4: uuidv4 } = require("node:uuid");
+const { v4: uuidv4 } = require("uuid");
 const os = require("node:os");
 
 const TEST_PREFIX = Date.now();
diff --git a/test/pinManager.spec.cjs b/test/pinManager.spec.cjs
index bd45429..e11ce61 100644
--- a/test/pinManager.spec.cjs
+++ b/test/pinManager.spec.cjs
@@ -3,7 +3,7 @@ const assert = require("node:assert/strict");
 const { BucketManager, PinManager } = require("../dist/index.js");
 const Path = require("node:path");
 const { writeFile } = require("node:fs/promises");
-const { v4: uuidv4 } = require("node:uuid");
+const { v4: uuidv4 } = require("uuid");
 const os = require("node:os");
 
 const TEST_CID_1 = "QmSEu6zGwKgkQA3ZKaDnvkrwre1kkQa7eRFCbQi7waNwTT",

From 07e19167fecb370b4869c6ed13bb1f7c65e63c3d Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Fri, 2 Feb 2024 12:47:31 -0600
Subject: [PATCH 06/16] Fix package.json module path

---
 package.json | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/package.json b/package.json
index 97f31f6..8d8cb07 100644
--- a/package.json
+++ b/package.json
@@ -8,12 +8,12 @@
   },
   "license": "MIT",
   "main": "./dist/index.js",
-  "module": "./dist/index.mjs",
+  "module": "./src/index.js",
   "types": "./dist/index.d.ts",
   "exports": {
     ".": {
       "require": "./dist/index.js",
-      "import": "./src/index.mjs",
+      "import": "./src/index.js",
       "types": "./dist/index.d.ts"
     }
   },

From 83999d690793dad0bb8ca89d99fe83f16504f28e Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Thu, 7 Mar 2024 12:23:19 -0600
Subject: [PATCH 07/16] Attempt to fix windows limits on open files

---
 src/objectManager.js | 10 +++++++++-
 yarn.lock            |  2 ++
 2 files changed, 11 insertions(+), 1 deletion(-)

diff --git a/src/objectManager.js b/src/objectManager.js
index ab00cd4..4f6a79d 100644
--- a/src/objectManager.js
+++ b/src/objectManager.js
@@ -14,7 +14,7 @@ import { car } from "@helia/car";
 import { unixfs } from "@helia/unixfs";
 import { FsBlockstore } from "blockstore-fs";
 // Utility Imports
-import { createReadStream, createWriteStream } from "node:fs";
+import { createReadStream, createWriteStream, ReadStream } from "node:fs";
 import { mkdir, rm } from "node:fs/promises";
 import os from "node:os";
 import path from "node:path";
@@ -190,13 +190,21 @@ class ObjectManager {
           blockstore: temporaryBlockstore,
         });
 
+        const fileHandlers = new Map();
         for (let sourceEntry of source) {
           sourceEntry.path =
             sourceEntry.path[0] === "/"
               ? `/${uploadUUID}${sourceEntry.path}`
               : `/${uploadUUID}/${sourceEntry.path}`;
+          if (sourceEntry.content instanceof ReadStream) {
+            fileHandlers.set(sourceEntry.path, sourceEntry.content);
+          }
         }
         for await (const entry of heliaFs.addAll(source)) {
+          if (fileHandlers.has(entry.path)) {
+            fileHandlers.get(entry.path).destroy();
+            fileHandlers.delete(entry.path);
+          }
           parsedEntries[entry.path] = entry;
         }
         const rootEntry = parsedEntries[uploadUUID];
diff --git a/yarn.lock b/yarn.lock
index 5626756..c43f70d 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -2919,6 +2919,7 @@ stream-browserify@3.0.0:
     readable-stream "^3.5.0"
 
 "string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0:
+  name string-width-cjs
   version "4.2.3"
   resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"
   integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
@@ -2944,6 +2945,7 @@ string_decoder@^1.1.1:
     safe-buffer "~5.2.0"
 
 "strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1:
+  name strip-ansi-cjs
   version "6.0.1"
   resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"
   integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==

From 08bb0fdd639949d514248a04a25c51e0c4bc705d Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Thu, 7 Mar 2024 12:23:59 -0600
Subject: [PATCH 08/16] Bump package version

---
 package.json | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/package.json b/package.json
index 8d8cb07..ce16fcf 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@filebase/sdk",
-  "version": "1.0.3",
+  "version": "1.0.4",
   "description": "SDK for Interacting with Filebase Services [S3(Buckets, Objects), IPFS(Gateways, Pins) IPNS(Names)]",
   "repository": {
     "type": "git",

From 37f8922df5e4788d11aa98479a4ca9b45535cc1a Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Thu, 7 Mar 2024 15:20:58 -0600
Subject: [PATCH 09/16] Merge and fix packages

---
 package.json         |   4 +
 src/objectManager.js |  62 ++++++----
 yarn.lock            | 286 +++++++++++++++++++++++++++++++++++++++++--
 3 files changed, 319 insertions(+), 33 deletions(-)

diff --git a/package.json b/package.json
index ce16fcf..1a2b434 100644
--- a/package.json
+++ b/package.json
@@ -44,6 +44,7 @@
     "clean-jsdoc-theme": "4.2.17",
     "jsdoc": "4.0.2",
     "prettier": "3.1.0",
+    "recursive-fs": "2.1.0",
     "tsup": "8.0.1",
     "typescript": "5.3.3"
   },
@@ -51,10 +52,13 @@
     "@aws-sdk/client-s3": "3.478.0",
     "@aws-sdk/lib-storage": "3.478.0",
     "@helia/car": "1.0.4",
+    "@helia/mfs": "3.0.1",
     "@helia/unixfs": "1.4.3",
     "@ipld/car": "5.2.4",
     "axios": "1.6.2",
     "blockstore-fs": "1.1.8",
+    "datastore-core": "9.2.9",
+    "p-queue": "8.0.1",
     "uuid": "9.0.1"
   }
 }
diff --git a/src/objectManager.js b/src/objectManager.js
index 4f6a79d..ed8a081 100644
--- a/src/objectManager.js
+++ b/src/objectManager.js
@@ -11,16 +11,19 @@ import { Upload } from "@aws-sdk/lib-storage";
 // Helia Imports
 import { CarWriter } from "@ipld/car";
 import { car } from "@helia/car";
+import { mfs } from "@helia/mfs";
 import { unixfs } from "@helia/unixfs";
 import { FsBlockstore } from "blockstore-fs";
+import { MemoryDatastore } from "datastore-core";
 // Utility Imports
 import { createReadStream, createWriteStream, ReadStream } from "node:fs";
-import { mkdir, rm } from "node:fs/promises";
+import { mkdir, rm, open } from "node:fs/promises";
 import os from "node:os";
 import path from "node:path";
 import { Readable } from "node:stream";
 import { v4 as uuidv4 } from "uuid";
 import { downloadFromGateway } from "./helpers.js";
+import PQueue from "p-queue";
 
 /** Interacts with an S3 client to perform various operations on objects in a bucket. */
 class ObjectManager {
@@ -184,32 +187,47 @@ class ObjectManager {
         );
         temporaryCarFilePath = `${temporaryBlockstoreDir}/main.car`;
         await mkdir(temporaryBlockstoreDir, { recursive: true });
-        const temporaryBlockstore = new FsBlockstore(temporaryBlockstoreDir);
+        const temporaryBlockstore = new FsBlockstore(temporaryBlockstoreDir),
+          temporaryDatastore = new MemoryDatastore();
 
         const heliaFs = unixfs({
-          blockstore: temporaryBlockstore,
-        });
-
-        const fileHandlers = new Map();
-        for (let sourceEntry of source) {
-          sourceEntry.path =
-            sourceEntry.path[0] === "/"
-              ? `/${uploadUUID}${sourceEntry.path}`
-              : `/${uploadUUID}/${sourceEntry.path}`;
-          if (sourceEntry.content instanceof ReadStream) {
-            fileHandlers.set(sourceEntry.path, sourceEntry.content);
-          }
+            blockstore: temporaryBlockstore,
+            datastore: temporaryDatastore,
+          }),
+          heliaMfs = mfs({
+            blockstore: temporaryBlockstore,
+            datastore: temporaryDatastore,
+          });
+        const queue = new PQueue({ concurrency: os.cpus().length });
+        let parsePromises = [];
+        for (const entry of source) {
+          parsePromises.push(
+            (async () => {
+              let fileHandle;
+              try {
+                await queue.add(async () => {
+                  if (entry.type === "import") {
+                    fileHandle = await open(entry.content);
+                    entry.content = await fileHandle.createReadStream();
+                  }
+                  parsedEntries[entry.path] = await heliaFs.addFile({
+                    path: entry.path,
+                    content: entry.content,
+                  });
+                });
+              } finally {
+                if (typeof fileHandle !== "undefined") {
+                  await fileHandle.close();
+                }
+              }
+            })(),
+          );
         }
-        for await (const entry of heliaFs.addAll(source)) {
-          if (fileHandlers.has(entry.path)) {
-            fileHandlers.get(entry.path).destroy();
-            fileHandlers.delete(entry.path);
-          }
-          parsedEntries[entry.path] = entry;
-        }
-        const rootEntry = parsedEntries[uploadUUID];
+        await Promise.all(parsePromises);
+        parsedEntries["/"] = await heliaMfs.stat("/");
 
         // Get carFile stream here
+        const rootEntry = parsedEntries["/"];
         const carExporter = car({ blockstore: temporaryBlockstore }),
           { writer, out } = CarWriter.create([rootEntry.cid]);
 
diff --git a/yarn.lock b/yarn.lock
index c43f70d..22a6e6d 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -756,6 +756,33 @@
     multiformats "^12.0.1"
     progress-events "^1.0.0"
 
+"@helia/interface@^4.0.1":
+  version "4.0.1"
+  resolved "https://registry.yarnpkg.com/@helia/interface/-/interface-4.0.1.tgz#45ecc2aab4a03503bcc0cb269094741240a17b65"
+  integrity sha512-tp9dLlog9x5C/+gE4FF452slemnTQtpS3RneGOatx51nDaE76/HHOmW1hIAASuFtjPHeg/R9BAM8UIX4DEffsg==
+  dependencies:
+    "@libp2p/interface" "^1.1.4"
+    interface-blockstore "^5.2.10"
+    interface-datastore "^8.2.11"
+    interface-store "^5.1.8"
+    multiformats "^13.1.0"
+    progress-events "^1.0.0"
+
+"@helia/mfs@3.0.1":
+  version "3.0.1"
+  resolved "https://registry.yarnpkg.com/@helia/mfs/-/mfs-3.0.1.tgz#a24b26671d4543997594f93678905a7244c79bf7"
+  integrity sha512-2/ZlP1jQ787Tb1P86LBBL2UBSxX7sdvD/kAzbWRxFELJquURVgiJd3stFlFsv6PSN/7x0991H8WZAlKZ0oZt3Q==
+  dependencies:
+    "@helia/unixfs" "^3.0.1"
+    "@libp2p/interfaces" "^3.3.2"
+    "@libp2p/logger" "^4.0.7"
+    interface-blockstore "^5.2.10"
+    interface-datastore "^8.2.11"
+    ipfs-unixfs "^11.1.3"
+    ipfs-unixfs-exporter "^13.5.0"
+    ipfs-unixfs-importer "^15.2.4"
+    multiformats "^13.1.0"
+
 "@helia/unixfs@1.4.3":
   version "1.4.3"
   resolved "https://registry.npmjs.org/@helia/unixfs/-/unixfs-1.4.3.tgz"
@@ -779,6 +806,31 @@
     progress-events "^1.0.0"
     sparse-array "^1.3.2"
 
+"@helia/unixfs@^3.0.1":
+  version "3.0.1"
+  resolved "https://registry.yarnpkg.com/@helia/unixfs/-/unixfs-3.0.1.tgz#9643ff1d531990664c7a9e5c18fad99143ef4b99"
+  integrity sha512-NgT0gk3oswUK+8IEnJDl6IGv5catSQq6IQ1VrO68ZBpJPpo49CyFCqN5CO8IC3QL3wi4JyJ0sTLXemMVkUsTVA==
+  dependencies:
+    "@helia/interface" "^4.0.1"
+    "@ipld/dag-pb" "^4.1.0"
+    "@libp2p/interface" "^1.1.4"
+    "@libp2p/logger" "^4.0.7"
+    "@multiformats/murmur3" "^2.1.8"
+    hamt-sharding "^3.0.6"
+    interface-blockstore "^5.2.10"
+    ipfs-unixfs "^11.1.3"
+    ipfs-unixfs-exporter "^13.5.0"
+    ipfs-unixfs-importer "^15.2.4"
+    it-all "^3.0.4"
+    it-glob "^2.0.6"
+    it-last "^3.0.4"
+    it-pipe "^3.0.1"
+    merge-options "^3.0.4"
+    multiformats "^13.1.0"
+    progress-events "^1.0.0"
+    sparse-array "^1.3.2"
+    uint8arrays "^5.0.2"
+
 "@ipld/car@5.2.4":
   version "5.2.4"
   resolved "https://registry.npmjs.org/@ipld/car/-/car-5.2.4.tgz"
@@ -807,6 +859,14 @@
     cborg "^4.0.0"
     multiformats "^13.0.0"
 
+"@ipld/dag-json@^10.1.7":
+  version "10.2.0"
+  resolved "https://registry.yarnpkg.com/@ipld/dag-json/-/dag-json-10.2.0.tgz#32468182ce510284aae75a07e33b3a0da284994e"
+  integrity sha512-O9YLUrl3d3WbVz7v1WkajFkyfOLEe2Fep+wor4fgVe0ywxzrivrj437NiPcVyB+2EDdFn/Q7tCHFf8YVhDf8ZA==
+  dependencies:
+    cborg "^4.0.0"
+    multiformats "^13.1.0"
+
 "@ipld/dag-pb@^4.0.0":
   version "4.0.6"
   resolved "https://registry.npmjs.org/@ipld/dag-pb/-/dag-pb-4.0.6.tgz"
@@ -814,6 +874,13 @@
   dependencies:
     multiformats "^12.0.1"
 
+"@ipld/dag-pb@^4.1.0":
+  version "4.1.0"
+  resolved "https://registry.yarnpkg.com/@ipld/dag-pb/-/dag-pb-4.1.0.tgz#4ebec92eeb9e8f317b8ef971221c6dac7b12b302"
+  integrity sha512-LJU451Drqs5zjFm7jI4Hs3kHlilOqkjcSfPiQgVsZnWaYb2C7YdfhnclrVn/X+ucKejlU9BL3+gXFCZUXkMuCg==
+  dependencies:
+    multiformats "^13.1.0"
+
 "@isaacs/cliui@^8.0.2":
   version "8.0.2"
   resolved "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz"
@@ -898,7 +965,19 @@
     multiformats "^12.1.3"
     uint8arraylist "^2.4.3"
 
-"@libp2p/interfaces@^3.3.1":
+"@libp2p/interface@^1.1.4":
+  version "1.1.4"
+  resolved "https://registry.yarnpkg.com/@libp2p/interface/-/interface-1.1.4.tgz#21c7bbbe7628419d1e4902f0c953db1423b0f40f"
+  integrity sha512-gJXQycTF50tI02X/IlReAav4XoGPs3Yr917vNXsTUsZQRzQaPjbvKfXqA5hkLFpZ1lnxQ8wto/EVw4ca4XaL1A==
+  dependencies:
+    "@multiformats/multiaddr" "^12.1.14"
+    it-pushable "^3.2.3"
+    it-stream-types "^2.0.1"
+    multiformats "^13.1.0"
+    progress-events "^1.0.0"
+    uint8arraylist "^2.4.8"
+
+"@libp2p/interfaces@^3.3.1", "@libp2p/interfaces@^3.3.2":
   version "3.3.2"
   resolved "https://registry.npmjs.org/@libp2p/interfaces/-/interfaces-3.3.2.tgz"
   integrity sha512-p/M7plbrxLzuQchvNwww1Was7ZeGE2NaOFulMaZBYIihU8z3fhaV+a033OqnC/0NTX/yhfdNOG7znhYq3XoR/g==
@@ -925,6 +1004,17 @@
     interface-datastore "^8.2.0"
     multiformats "^12.1.3"
 
+"@libp2p/logger@^4.0.6", "@libp2p/logger@^4.0.7":
+  version "4.0.7"
+  resolved "https://registry.yarnpkg.com/@libp2p/logger/-/logger-4.0.7.tgz#b5e82135f5c8a6f275c1b2e183333db956f3ed90"
+  integrity sha512-oyICns7G18S4eDhbFHUwZ7gLQnZTBVQtUMmMgEmrs8LnQu2GvXADxmQAPPkKtLNSCvRudg4hN3hP04Y+vNvlBQ==
+  dependencies:
+    "@libp2p/interface" "^1.1.4"
+    "@multiformats/multiaddr" "^12.1.14"
+    debug "^4.3.4"
+    interface-datastore "^8.2.11"
+    multiformats "^13.1.0"
+
 "@multiformats/multiaddr@^12.1.0", "@multiformats/multiaddr@^12.1.10", "@multiformats/multiaddr@^12.1.5":
   version "12.1.11"
   resolved "https://registry.npmjs.org/@multiformats/multiaddr/-/multiaddr-12.1.11.tgz"
@@ -938,6 +1028,19 @@
     uint8-varint "^2.0.1"
     uint8arrays "^4.0.2"
 
+"@multiformats/multiaddr@^12.1.14":
+  version "12.1.14"
+  resolved "https://registry.yarnpkg.com/@multiformats/multiaddr/-/multiaddr-12.1.14.tgz#d021072667f4dfc566cdddcb45feee60fecc8cfd"
+  integrity sha512-1C0Mo73chzu7pTzTquuKs5vUtw70jhqg1i6pUNznGb0WV6RFa6vyB+D697Os5+cLx+DiItrAY6VzMtlGQsMzYg==
+  dependencies:
+    "@chainsafe/is-ip" "^2.0.1"
+    "@chainsafe/netmask" "^2.0.0"
+    "@libp2p/interface" "^1.0.0"
+    dns-over-http-resolver "^3.0.2"
+    multiformats "^13.0.0"
+    uint8-varint "^2.0.1"
+    uint8arrays "^5.0.0"
+
 "@multiformats/murmur3@^2.0.0", "@multiformats/murmur3@^2.1.2":
   version "2.1.7"
   resolved "https://registry.npmjs.org/@multiformats/murmur3/-/murmur3-2.1.7.tgz"
@@ -946,6 +1049,14 @@
     multiformats "^12.0.1"
     murmurhash3js-revisited "^3.0.0"
 
+"@multiformats/murmur3@^2.1.8":
+  version "2.1.8"
+  resolved "https://registry.yarnpkg.com/@multiformats/murmur3/-/murmur3-2.1.8.tgz#81c1c15b6391109f3febfca4b3205196615a04e9"
+  integrity sha512-6vId1C46ra3R1sbJUOFCZnsUIveR9oF20yhPmAFxPm0JfrX3/ZRCgP3YDrBzlGoEppOXnA9czHeYc0T9mB6hbA==
+  dependencies:
+    multiformats "^13.0.0"
+    murmurhash3js-revisited "^3.0.0"
+
 "@nodelib/fs.scandir@2.1.5":
   version "2.1.5"
   resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz"
@@ -1819,6 +1930,24 @@ cross-spawn@^7.0.0, cross-spawn@^7.0.3:
     shebang-command "^2.0.0"
     which "^2.0.1"
 
+datastore-core@9.2.9:
+  version "9.2.9"
+  resolved "https://registry.yarnpkg.com/datastore-core/-/datastore-core-9.2.9.tgz#74b4dd53d4597b59038488ba5f92a7f81769f8df"
+  integrity sha512-wraWTPsbtdE7FFaVo3pwPuTB/zXsgwGGAm8BgBYwYAuzZCTS0MfXmd/HH1vR9s0/NFFjOVmBkGiWCvKxZ+QjVw==
+  dependencies:
+    "@libp2p/logger" "^4.0.6"
+    err-code "^3.0.1"
+    interface-datastore "^8.0.0"
+    interface-store "^5.0.0"
+    it-drain "^3.0.5"
+    it-filter "^3.0.4"
+    it-map "^3.0.5"
+    it-merge "^3.0.3"
+    it-pipe "^3.0.1"
+    it-pushable "^3.2.3"
+    it-sort "^3.0.4"
+    it-take "^3.0.4"
+
 debug@^4.3.1, debug@^4.3.4:
   version "4.3.4"
   resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz"
@@ -1846,6 +1975,14 @@ dns-over-http-resolver@3.0.0:
     debug "^4.3.4"
     receptacle "^1.3.2"
 
+dns-over-http-resolver@^3.0.2:
+  version "3.0.2"
+  resolved "https://registry.yarnpkg.com/dns-over-http-resolver/-/dns-over-http-resolver-3.0.2.tgz#71644cbab3c5a94f53e357da68771e0781ea3407"
+  integrity sha512-5batkHOjCkuAfrFa+IPmt3jyeZqLtSMfAo1HQp3hfwtzgUwHooecTFplnYC093u5oRNL4CQHCXh3OfER7+vWrA==
+  dependencies:
+    debug "^4.3.4"
+    receptacle "^1.3.2"
+
 dot-case@^3.0.4:
   version "3.0.4"
   resolved "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz"
@@ -2069,6 +2206,14 @@ hamt-sharding@^3.0.0, hamt-sharding@^3.0.2:
     sparse-array "^1.3.1"
     uint8arrays "^4.0.2"
 
+hamt-sharding@^3.0.6:
+  version "3.0.6"
+  resolved "https://registry.yarnpkg.com/hamt-sharding/-/hamt-sharding-3.0.6.tgz#3643107a3021af66ac95684aec87b196add5ba57"
+  integrity sha512-nZeamxfymIWLpVcAN0CRrb7uVq3hCOGj9IcL6NMA6VVCVWqj+h9Jo/SmaWuS92AEDf1thmHsM5D5c70hM3j2Tg==
+  dependencies:
+    sparse-array "^1.3.1"
+    uint8arrays "^5.0.1"
+
 html-minifier-terser@^7.2.0:
   version "7.2.0"
   resolved "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-7.2.0.tgz"
@@ -2110,6 +2255,14 @@ interface-blockstore@^5.0.0:
     interface-store "^5.0.0"
     multiformats "^12.0.1"
 
+interface-blockstore@^5.2.10:
+  version "5.2.10"
+  resolved "https://registry.yarnpkg.com/interface-blockstore/-/interface-blockstore-5.2.10.tgz#b01101dd70eda2ab713cc00a492921949934c861"
+  integrity sha512-9K48hTvBCGsKVD3pF4ILgDcf+W2P/gq0oxLcsHGB6E6W6nDutYkzR+7k7bCs9REHrBEfKzcVDEKieiuNM9WRZg==
+  dependencies:
+    interface-store "^5.0.0"
+    multiformats "^13.0.1"
+
 interface-datastore@^8.0.0, interface-datastore@^8.2.0:
   version "8.2.9"
   resolved "https://registry.npmjs.org/interface-datastore/-/interface-datastore-8.2.9.tgz"
@@ -2118,11 +2271,24 @@ interface-datastore@^8.0.0, interface-datastore@^8.2.0:
     interface-store "^5.0.0"
     uint8arrays "^5.0.0"
 
+interface-datastore@^8.2.11:
+  version "8.2.11"
+  resolved "https://registry.yarnpkg.com/interface-datastore/-/interface-datastore-8.2.11.tgz#1d555ce6218ab6cba6291fc361debe9713590207"
+  integrity sha512-9E0iXehfp/j0UbZ2mvlYB4K9pP7uQBCppfuy8WHs1EHF6wLQrM9+zwyX+8Qt6HnH4GKZRyXX/CNXm6oD4+QYgA==
+  dependencies:
+    interface-store "^5.0.0"
+    uint8arrays "^5.0.2"
+
 interface-store@^5.0.0, interface-store@^5.0.1, interface-store@^5.1.0:
   version "5.1.5"
   resolved "https://registry.npmjs.org/interface-store/-/interface-store-5.1.5.tgz"
   integrity sha512-X0KnJBk3o+YL13MxZBMwa88/b3Mdrpm0yPzkSTKDDVn9BSPH7UK6W+ZtIPO2bxKOQVmq7zqOwAnYnpfqWjb6/g==
 
+interface-store@^5.1.8:
+  version "5.1.8"
+  resolved "https://registry.yarnpkg.com/interface-store/-/interface-store-5.1.8.tgz#94bf867d165b5c904cccf09adeba215a5b0f459e"
+  integrity sha512-7na81Uxkl0vqk0CBPO5PvyTkdaJBaezwUJGsMOz7riPOq0rJt+7W31iaopaMICWea/iykUsvNlPx/Tc+MxC3/w==
+
 ipfs-bitswap@^19.0.0:
   version "19.0.2"
   resolved "https://registry.npmjs.org/ipfs-bitswap/-/ipfs-bitswap-19.0.2.tgz"
@@ -2175,6 +2341,29 @@ ipfs-unixfs-exporter@^13.1.0:
     progress-events "^1.0.0"
     uint8arrays "^4.0.2"
 
+ipfs-unixfs-exporter@^13.5.0:
+  version "13.5.0"
+  resolved "https://registry.yarnpkg.com/ipfs-unixfs-exporter/-/ipfs-unixfs-exporter-13.5.0.tgz#48fafb272489cc2bf05757c16f3f44fa241ee038"
+  integrity sha512-s1eWXzoyhQFNEAB1p+QE3adjhW+lBdgpORmmjiCLiruHs5z7T5zsAgRVcWpM8LWYhq2flRtJHObb7Hg73J+oLQ==
+  dependencies:
+    "@ipld/dag-cbor" "^9.0.0"
+    "@ipld/dag-json" "^10.1.7"
+    "@ipld/dag-pb" "^4.0.0"
+    "@multiformats/murmur3" "^2.0.0"
+    err-code "^3.0.1"
+    hamt-sharding "^3.0.0"
+    interface-blockstore "^5.0.0"
+    ipfs-unixfs "^11.0.0"
+    it-filter "^3.0.2"
+    it-last "^3.0.2"
+    it-map "^3.0.3"
+    it-parallel "^3.0.0"
+    it-pipe "^3.0.1"
+    it-pushable "^3.1.0"
+    multiformats "^13.0.0"
+    p-queue "^8.0.1"
+    progress-events "^1.0.0"
+
 ipfs-unixfs-importer@^15.1.0:
   version "15.2.1"
   resolved "https://registry.npmjs.org/ipfs-unixfs-importer/-/ipfs-unixfs-importer-15.2.1.tgz"
@@ -2197,6 +2386,28 @@ ipfs-unixfs-importer@^15.1.0:
     uint8arraylist "^2.4.3"
     uint8arrays "^4.0.2"
 
+ipfs-unixfs-importer@^15.2.4:
+  version "15.2.4"
+  resolved "https://registry.yarnpkg.com/ipfs-unixfs-importer/-/ipfs-unixfs-importer-15.2.4.tgz#3577b21132c95693cfa9122c9dc432a565e02218"
+  integrity sha512-3b7d/pLPwGvAEXvpJ0WyYlbn2pb2j7qY6FayuMSzbZNdFxdJ82l6VkJ9vK1d/G/AHx+0ZfB06eSdGKjX0GVCAg==
+  dependencies:
+    "@ipld/dag-pb" "^4.0.0"
+    "@multiformats/murmur3" "^2.0.0"
+    err-code "^3.0.1"
+    hamt-sharding "^3.0.0"
+    interface-blockstore "^5.0.0"
+    interface-store "^5.0.1"
+    ipfs-unixfs "^11.0.0"
+    it-all "^3.0.2"
+    it-batch "^3.0.2"
+    it-first "^3.0.2"
+    it-parallel-batch "^3.0.1"
+    multiformats "^13.0.0"
+    progress-events "^1.0.0"
+    rabin-wasm "^0.1.4"
+    uint8arraylist "^2.4.3"
+    uint8arrays "^5.0.0"
+
 ipfs-unixfs@^11.0.0:
   version "11.1.0"
   resolved "https://registry.npmjs.org/ipfs-unixfs/-/ipfs-unixfs-11.1.0.tgz"
@@ -2206,6 +2417,15 @@ ipfs-unixfs@^11.0.0:
     protons-runtime "^5.0.0"
     uint8arraylist "^2.4.3"
 
+ipfs-unixfs@^11.1.3:
+  version "11.1.3"
+  resolved "https://registry.yarnpkg.com/ipfs-unixfs/-/ipfs-unixfs-11.1.3.tgz#b53f36d8d34022516d6cfead4305839712c1dab2"
+  integrity sha512-sy6Koojwm/EcM8yvDlycRYA89C8wIcLcGTMMpqnCPUtqTCdl+JxsuPNCBgAu7tmO8Nipm7Tv7f0g/erxTGKKRA==
+  dependencies:
+    err-code "^3.0.1"
+    protons-runtime "^5.0.0"
+    uint8arraylist "^2.4.3"
+
 is-binary-path@~2.1.0:
   version "2.1.0"
   resolved "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz"
@@ -2250,7 +2470,7 @@ isexe@^2.0.0:
   resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz"
   integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==
 
-it-all@^3.0.2:
+it-all@^3.0.0, it-all@^3.0.2, it-all@^3.0.4:
   version "3.0.4"
   resolved "https://registry.npmjs.org/it-all/-/it-all-3.0.4.tgz"
   integrity sha512-UMiy0i9DqCHBdWvMbzdYvVGa5/w4t1cc4nchpbnjdLhklglv8mQeEYnii0gvKESJuL1zV32Cqdb33R6/GPfxpQ==
@@ -2260,12 +2480,12 @@ it-batch@^3.0.0, it-batch@^3.0.2:
   resolved "https://registry.npmjs.org/it-batch/-/it-batch-3.0.4.tgz"
   integrity sha512-WRu2mqOYIs+T9k7+yxSK9VJdk0UE4R0jKQsWQcti5c6vhb1FhjC2+yCB5XBrctQ9edNfCMU/wVzdDj8qSwimbA==
 
-it-drain@^3.0.1:
+it-drain@^3.0.1, it-drain@^3.0.5:
   version "3.0.5"
   resolved "https://registry.npmjs.org/it-drain/-/it-drain-3.0.5.tgz"
   integrity sha512-qYFe4SWdvs9oJGUY5bSjvmiLUMLzFEODNOQUdYdCIkuIgQF+AUB2INhM4yQ09buJ2rhHKDFxvTD/+yUq6qg0XA==
 
-it-filter@^3.0.0, it-filter@^3.0.2:
+it-filter@^3.0.0, it-filter@^3.0.2, it-filter@^3.0.4:
   version "3.0.4"
   resolved "https://registry.npmjs.org/it-filter/-/it-filter-3.0.4.tgz"
   integrity sha512-e0sz+st4sudK/zH6GZ/gRTRP8A/ADuJFCYDmRgMbZvR79y5+v4ZXav850bBZk5wL9zXaYZFxS1v/6Qi+Vjwh5g==
@@ -2284,14 +2504,14 @@ it-foreach@^2.0.2:
   dependencies:
     it-peekable "^3.0.0"
 
-it-glob@^2.0.1, it-glob@^2.0.4:
+it-glob@^2.0.1, it-glob@^2.0.4, it-glob@^2.0.6:
   version "2.0.6"
-  resolved "https://registry.npmjs.org/it-glob/-/it-glob-2.0.6.tgz"
+  resolved "https://registry.yarnpkg.com/it-glob/-/it-glob-2.0.6.tgz#616a5b008aa456e82be9a29b8c7dd4dc8ef81acb"
   integrity sha512-4C6ccz4nhqrq7yZMzBr3MsKhyL+rlnLXIPceyGG6ogl3Lx3eeWMv1RtlySJwFi6q+jVcPyTpeYt/xftwI2JEQQ==
   dependencies:
     minimatch "^9.0.0"
 
-it-last@^3.0.1, it-last@^3.0.2:
+it-last@^3.0.1, it-last@^3.0.2, it-last@^3.0.4:
   version "3.0.4"
   resolved "https://registry.npmjs.org/it-last/-/it-last-3.0.4.tgz"
   integrity sha512-Ns+KTsQWhs0KCvfv5X3Ck3lpoYxHcp4zUp4d+AOdmC8cXXqDuoZqAjfWhgCbxJubXyIYWdfE2nRcfWqgvZHP8Q==
@@ -2308,14 +2528,14 @@ it-length-prefixed@^9.0.0:
     uint8arraylist "^2.0.0"
     uint8arrays "^4.0.2"
 
-it-map@^3.0.1, it-map@^3.0.2, it-map@^3.0.3:
+it-map@^3.0.1, it-map@^3.0.2, it-map@^3.0.3, it-map@^3.0.5:
   version "3.0.5"
   resolved "https://registry.npmjs.org/it-map/-/it-map-3.0.5.tgz"
   integrity sha512-hB0TDXo/h4KSJJDSRLgAPmDroiXP6Fx1ck4Bzl3US9hHfZweTKsuiP0y4gXuTMcJlS6vj0bb+f70rhkD47ZA3w==
   dependencies:
     it-peekable "^3.0.0"
 
-it-merge@^3.0.0, it-merge@^3.0.1:
+it-merge@^3.0.0, it-merge@^3.0.1, it-merge@^3.0.3:
   version "3.0.3"
   resolved "https://registry.npmjs.org/it-merge/-/it-merge-3.0.3.tgz"
   integrity sha512-FYVU15KC5pb/GQX1Ims+lee8d4pdqGVCpWr0lkNj8o4xuNo7jY71k6GuEiWdP+T7W1bJqewSxX5yoTy5yZpRVA==
@@ -2350,7 +2570,7 @@ it-pipe@^3.0.1:
     it-pushable "^3.1.2"
     it-stream-types "^2.0.1"
 
-it-pushable@^3.0.0, it-pushable@^3.1.0, it-pushable@^3.1.2, it-pushable@^3.2.0, it-pushable@^3.2.1:
+it-pushable@^3.0.0, it-pushable@^3.1.0, it-pushable@^3.1.2, it-pushable@^3.2.0, it-pushable@^3.2.1, it-pushable@^3.2.3:
   version "3.2.3"
   resolved "https://registry.npmjs.org/it-pushable/-/it-pushable-3.2.3.tgz"
   integrity sha512-gzYnXYK8Y5t5b/BnJUr7glfQLO4U5vyb05gPx/TyTw+4Bv1zM9gFk4YsOrnulWefMewlphCjKkakFvj1y99Tcg==
@@ -2365,12 +2585,19 @@ it-reader@^6.0.1:
     it-stream-types "^2.0.1"
     uint8arraylist "^2.0.0"
 
+it-sort@^3.0.4:
+  version "3.0.4"
+  resolved "https://registry.yarnpkg.com/it-sort/-/it-sort-3.0.4.tgz#250152bf4abf3fa9572954305424bafb3199fa63"
+  integrity sha512-tvnC93JZZWjX4UxALy0asow0dzXabkoaRbrPJKClTKhNCqw4gzHr+H5axf1gohcthedRRkqd/ae+wl7WqoxFhw==
+  dependencies:
+    it-all "^3.0.0"
+
 it-stream-types@^2.0.1:
   version "2.0.1"
   resolved "https://registry.npmjs.org/it-stream-types/-/it-stream-types-2.0.1.tgz"
   integrity sha512-6DmOs5r7ERDbvS4q8yLKENcj6Yecr7QQTqWApbZdfAUTEC947d+PEha7PCqhm//9oxaLYL7TWRekwhoXl2s6fg==
 
-it-take@^3.0.1:
+it-take@^3.0.1, it-take@^3.0.4:
   version "3.0.4"
   resolved "https://registry.npmjs.org/it-take/-/it-take-3.0.4.tgz"
   integrity sha512-RG8HDjAZlvkzz5Nav4xq6gK5zNT+Ff1UTIf+CrSJW8nIl6N1FpBH5e7clUshiCn+MmmMoSdIEpw4UaTolszxhA==
@@ -2594,6 +2821,11 @@ multiformats@^13.0.0:
   resolved "https://registry.npmjs.org/multiformats/-/multiformats-13.0.1.tgz"
   integrity sha512-bt3R5iXe2O8xpp3wkmQhC73b/lC4S2ihU8Dndwcsysqbydqb8N+bpP116qMcClZ17g58iSIwtXUTcg2zT4sniA==
 
+multiformats@^13.0.1, multiformats@^13.1.0:
+  version "13.1.0"
+  resolved "https://registry.yarnpkg.com/multiformats/-/multiformats-13.1.0.tgz#5aa9d2175108a448fc3bdb54ba8a3d0b6cab3ac3"
+  integrity sha512-HzdtdBwxsIkzpeXzhQ5mAhhuxcHbjEHH+JQoxt7hG/2HGFjjwyolLo7hbaexcnhoEuV4e0TNJ8kkpMjiEYY4VQ==
+
 murmurhash3js-revisited@^3.0.0:
   version "3.0.0"
   resolved "https://registry.npmjs.org/murmurhash3js-revisited/-/murmurhash3js-revisited-3.0.0.tgz"
@@ -2652,6 +2884,14 @@ p-defer@^4.0.0:
   resolved "https://registry.npmjs.org/p-defer/-/p-defer-4.0.0.tgz"
   integrity sha512-Vb3QRvQ0Y5XnF40ZUWW7JfLogicVh/EnA5gBIvKDJoYpeI82+1E3AlB9yOcKFS0AhHrWVnAQO39fbR0G99IVEQ==
 
+p-queue@8.0.1, p-queue@^8.0.1:
+  version "8.0.1"
+  resolved "https://registry.yarnpkg.com/p-queue/-/p-queue-8.0.1.tgz#718b7f83836922ef213ddec263ff4223ce70bef8"
+  integrity sha512-NXzu9aQJTAzbBqOt2hwsR63ea7yvxJc0PwN/zobNAudYfb1B7R08SzB4TsLeSbUCuG467NhnoT0oO6w1qRO+BA==
+  dependencies:
+    eventemitter3 "^5.0.1"
+    p-timeout "^6.1.2"
+
 p-queue@^7.3.0, p-queue@^7.3.4:
   version "7.4.1"
   resolved "https://registry.npmjs.org/p-queue/-/p-queue-7.4.1.tgz"
@@ -2665,6 +2905,11 @@ p-timeout@^5.0.2:
   resolved "https://registry.npmjs.org/p-timeout/-/p-timeout-5.1.0.tgz"
   integrity sha512-auFDyzzzGZZZdHz3BtET9VEz0SE/uMEAx7uWfGPucfzEwwe/xH0iVeZibQmANYE/hp9T2+UUZT5m+BKyrDp3Ew==
 
+p-timeout@^6.1.2:
+  version "6.1.2"
+  resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-6.1.2.tgz#22b8d8a78abf5e103030211c5fc6dee1166a6aa5"
+  integrity sha512-UbD77BuZ9Bc9aABo74gfXhNvzC9Tx7SxtHSh1fxvx3jTLLYvmVhiQZZrJzqqU0jKbN32kb5VOKiLEQI/3bIjgQ==
+
 param-case@^3.0.4:
   version "3.0.4"
   resolved "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz"
@@ -2790,6 +3035,11 @@ receptacle@^1.3.2:
   dependencies:
     ms "^2.1.1"
 
+recursive-fs@2.1.0:
+  version "2.1.0"
+  resolved "https://registry.yarnpkg.com/recursive-fs/-/recursive-fs-2.1.0.tgz#1e20cf7836b292ed81208c4817550a58ad0e15ff"
+  integrity sha512-oed3YruYsD52Mi16s/07eYblQOLi5dTtxpIJNdfCEJ7S5v8dDgVcycar0pRWf4IBuPMIkoctC8RTqGJzIKMNAQ==
+
 relateurl@^0.2.7:
   version "0.2.7"
   resolved "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz"
@@ -3102,6 +3352,13 @@ uint8arraylist@^2.0.0, uint8arraylist@^2.4.3:
   dependencies:
     uint8arrays "^4.0.2"
 
+uint8arraylist@^2.4.8:
+  version "2.4.8"
+  resolved "https://registry.yarnpkg.com/uint8arraylist/-/uint8arraylist-2.4.8.tgz#5a4d17f4defd77799cb38e93fd5db0f0dceddc12"
+  integrity sha512-vc1PlGOzglLF0eae1M8mLRTBivsvrGsdmJ5RbK3e+QRvRLOZfZhQROTwH/OfyF3+ZVUg9/8hE8bmKP2CvP9quQ==
+  dependencies:
+    uint8arrays "^5.0.1"
+
 uint8arrays@^4.0.2, uint8arrays@^4.0.6:
   version "4.0.10"
   resolved "https://registry.npmjs.org/uint8arrays/-/uint8arrays-4.0.10.tgz"
@@ -3116,6 +3373,13 @@ uint8arrays@^5.0.0:
   dependencies:
     multiformats "^12.0.1"
 
+uint8arrays@^5.0.1, uint8arrays@^5.0.2:
+  version "5.0.2"
+  resolved "https://registry.yarnpkg.com/uint8arrays/-/uint8arrays-5.0.2.tgz#f05479bcd521d37c2e7710b24132a460b0ac80e3"
+  integrity sha512-S0GaeR+orZt7LaqzTRs4ZP8QqzAauJ+0d4xvP2lJTA99jIkKsE2FgDs4tGF/K/z5O9I/2W5Yvrh7IuqNeYH+0Q==
+  dependencies:
+    multiformats "^13.0.0"
+
 underscore@~1.13.2:
   version "1.13.6"
   resolved "https://registry.npmjs.org/underscore/-/underscore-1.13.6.tgz"

From 719d2500a42eb49084440de90628560c98ba016e Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Thu, 7 Mar 2024 15:31:26 -0600
Subject: [PATCH 10/16] Fixes

---
 src/objectManager.js |  2 +-
 tsup.config.js       | 18 +++++++++++++-----
 2 files changed, 14 insertions(+), 6 deletions(-)

diff --git a/src/objectManager.js b/src/objectManager.js
index ed8a081..af4897c 100644
--- a/src/objectManager.js
+++ b/src/objectManager.js
@@ -16,7 +16,7 @@ import { unixfs } from "@helia/unixfs";
 import { FsBlockstore } from "blockstore-fs";
 import { MemoryDatastore } from "datastore-core";
 // Utility Imports
-import { createReadStream, createWriteStream, ReadStream } from "node:fs";
+import { createReadStream, createWriteStream } from "node:fs";
 import { mkdir, rm, open } from "node:fs/promises";
 import os from "node:os";
 import path from "node:path";
diff --git a/tsup.config.js b/tsup.config.js
index 0352edd..a088dea 100644
--- a/tsup.config.js
+++ b/tsup.config.js
@@ -1,11 +1,19 @@
-import { defineConfig } from 'tsup'
+import { defineConfig } from "tsup";
 
 export default defineConfig({
-  entry: ['src/index.js'],
+  entry: ["src/index.js"],
   splitting: false,
   sourcemap: false,
-  noExternal: ['@ipld/car', '@helia/car', '@helia/unixfs', 'blockstore-fs'],
+  noExternal: [
+    "@ipld/car",
+    "@helia/car",
+    "@helia/unixfs",
+    "@helia/mfs",
+    "blockstore-fs",
+    "datastore-core",
+    "p-queue",
+  ],
   dts: true,
-  format: ['cjs'],
+  format: ["cjs"],
   clean: true,
-})
+});

From 36723e03b2a15bfc7caa03203f6be9d3f5a5d74a Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Fri, 8 Mar 2024 16:53:34 -0600
Subject: [PATCH 11/16] Separates UnixFS and MFS operations to achieve a more
 consistent packing experience on Windows

---
 src/objectManager.js | 103 +++++++++++++++++++++++++++++++------------
 1 file changed, 74 insertions(+), 29 deletions(-)

diff --git a/src/objectManager.js b/src/objectManager.js
index af4897c..bf4292d 100644
--- a/src/objectManager.js
+++ b/src/objectManager.js
@@ -17,7 +17,7 @@ import { FsBlockstore } from "blockstore-fs";
 import { MemoryDatastore } from "datastore-core";
 // Utility Imports
 import { createReadStream, createWriteStream } from "node:fs";
-import { mkdir, rm, open } from "node:fs/promises";
+import { mkdir, rm } from "node:fs/promises";
 import os from "node:os";
 import path from "node:path";
 import { Readable } from "node:stream";
@@ -190,40 +190,61 @@ class ObjectManager {
         const temporaryBlockstore = new FsBlockstore(temporaryBlockstoreDir),
           temporaryDatastore = new MemoryDatastore();
 
+        let createFilePromises = [];
+        let createdFiles = new Map();
         const heliaFs = unixfs({
-            blockstore: temporaryBlockstore,
-            datastore: temporaryDatastore,
-          }),
-          heliaMfs = mfs({
-            blockstore: temporaryBlockstore,
-            datastore: temporaryDatastore,
-          });
+          blockstore: temporaryBlockstore,
+        });
         const queue = new PQueue({ concurrency: os.cpus().length });
-        let parsePromises = [];
         for (const entry of source) {
-          parsePromises.push(
-            (async () => {
-              let fileHandle;
-              try {
-                await queue.add(async () => {
-                  if (entry.type === "import") {
-                    fileHandle = await open(entry.content);
-                    entry.content = await fileHandle.createReadStream();
-                  }
-                  parsedEntries[entry.path] = await heliaFs.addFile({
-                    path: entry.path,
-                    content: entry.content,
-                  });
-                });
-              } finally {
-                if (typeof fileHandle !== "undefined") {
-                  await fileHandle.close();
+          const task = (async () => {
+            await queue.add(async () => {
+              let createdFile;
+              if (
+                entry.type === "import" ||
+                entry.content instanceof Readable
+              ) {
+                if (entry.type === "import") {
+                  entry.content = await createReadStream(
+                    path.resolve(entry.content),
+                  );
                 }
+                createdFile = await heliaFs.addByteStream(entry.content);
+              } else if (entry.content !== null) {
+                createdFile = await heliaFs.addBytes(entry.content);
+              } else {
+                return;
               }
-            })(),
-          );
+              createdFiles.set(entry.path, createdFile);
+            });
+          })();
+          createFilePromises.push(task);
+        }
+        await Promise.all(createFilePromises);
+
+        const heliaMfs = mfs({
+          blockstore: temporaryBlockstore,
+          datastore: temporaryDatastore,
+        });
+        let createdDirectories = new Set();
+        for (const entry of source) {
+          const pathsToCreate = splitPath(entry.path);
+          for (const pathToCreate of pathsToCreate) {
+            if (createdDirectories.has(pathToCreate) === false) {
+              await heliaMfs.mkdir(pathToCreate);
+              createdDirectories.add(pathToCreate);
+            }
+          }
+          if (entry.content === null) {
+            await heliaMfs.mkdir(entry.path);
+          } else {
+            const entryFile = createdFiles.get(entry.path);
+            await heliaMfs.cp(entryFile, entry.path);
+          }
+        }
+        for (const entry of source) {
+          parsedEntries[entry.path] = await heliaMfs.stat(entry.path);
         }
-        await Promise.all(parsePromises);
         parsedEntries["/"] = await heliaMfs.stat("/");
 
         // Get carFile stream here
@@ -463,4 +484,28 @@ class ObjectManager {
   }
 }
 
+function splitPath(inputPath) {
+  // Split the path into its components
+  const parts = inputPath.split("/");
+
+  // Initialize an empty array to hold the incremental paths
+  let incrementalPaths = [];
+
+  // Use reduce to build each part of the path incrementally
+  parts.reduce((acc, curr, index) => {
+    // Skip the first empty element due to the leading "/"
+    if (index > 0) {
+      const newPath = `${acc}/${curr}`;
+      incrementalPaths.push(newPath);
+      return newPath;
+    }
+    return acc;
+  }, "");
+
+  // Remove the last element because it's the file name, not a folder
+  incrementalPaths = incrementalPaths.slice(0, -1);
+
+  return incrementalPaths;
+}
+
 export default ObjectManager;

From 1ab85301b1e12476b678356bbe94cf08dcd8609f Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Mon, 18 Mar 2024 12:01:53 -0500
Subject: [PATCH 12/16] Explicitly close file handle Switch to blockstore-level

---
 package.json         |   5 +-
 src/logger.js        |  10 ++
 src/objectManager.js | 153 ++++++++++++++++--------
 yarn.lock            | 275 ++++++++++++++++++++++++++++++++++++++++---
 4 files changed, 376 insertions(+), 67 deletions(-)
 create mode 100644 src/logger.js

diff --git a/package.json b/package.json
index 1a2b434..0e131d9 100644
--- a/package.json
+++ b/package.json
@@ -56,9 +56,10 @@
     "@helia/unixfs": "1.4.3",
     "@ipld/car": "5.2.4",
     "axios": "1.6.2",
-    "blockstore-fs": "1.1.8",
+    "blockstore-level": "1.1.8",
     "datastore-core": "9.2.9",
     "p-queue": "8.0.1",
-    "uuid": "9.0.1"
+    "uuid": "9.0.1",
+    "winston": "3.12.0"
   }
 }
diff --git a/src/logger.js b/src/logger.js
new file mode 100644
index 0000000..afac359
--- /dev/null
+++ b/src/logger.js
@@ -0,0 +1,10 @@
+import winston from "winston";
+const { combine, timestamp, json } = winston.format;
+
+const logger = winston.createLogger({
+  level: process.env.LOG_LEVEL || "info",
+  format: combine(timestamp(), json()),
+  transports: [new winston.transports.Console()],
+});
+
+export default logger;
diff --git a/src/objectManager.js b/src/objectManager.js
index bf4292d..4b87c1d 100644
--- a/src/objectManager.js
+++ b/src/objectManager.js
@@ -1,3 +1,5 @@
+// Environment Imports
+import logger from "./logger.js";
 // S3 Imports
 import {
   CopyObjectCommand,
@@ -13,11 +15,12 @@ import { CarWriter } from "@ipld/car";
 import { car } from "@helia/car";
 import { mfs } from "@helia/mfs";
 import { unixfs } from "@helia/unixfs";
-import { FsBlockstore } from "blockstore-fs";
+import { LevelBlockstore } from "blockstore-level";
 import { MemoryDatastore } from "datastore-core";
 // Utility Imports
+import { once } from "node:events";
 import { createReadStream, createWriteStream } from "node:fs";
-import { mkdir, rm } from "node:fs/promises";
+import { mkdir, rm, open } from "node:fs/promises";
 import os from "node:os";
 import path from "node:path";
 import { Readable } from "node:stream";
@@ -152,6 +155,7 @@ class ObjectManager {
   async upload(key, source, metadata, options) {
     // Generate Upload UUID
     const uploadUUID = uuidv4();
+    const uploadLogger = logger.child({ uploadUUID });
 
     // Setup Upload Options
     const bucket = options?.bucket || this.#defaultBucket,
@@ -175,80 +179,139 @@ class ObjectManager {
         ...uploadOptions.params.Metadata,
         import: "car",
       };
+      source.sort((a, b) => {
+        return countSlashes(b.path) - countSlashes(a.path);
+      });
 
       let temporaryCarFilePath, temporaryBlockstoreDir;
       try {
         // Setup Blockstore
         temporaryBlockstoreDir = path.resolve(
           os.tmpdir(),
-          "filebase-sdk",
+          ".filebase-sdk",
           "uploads",
           uploadUUID,
         );
         temporaryCarFilePath = `${temporaryBlockstoreDir}/main.car`;
         await mkdir(temporaryBlockstoreDir, { recursive: true });
-        const temporaryBlockstore = new FsBlockstore(temporaryBlockstoreDir),
+        const temporaryBlockstore = new LevelBlockstore(temporaryBlockstoreDir),
           temporaryDatastore = new MemoryDatastore();
 
-        let createFilePromises = [];
         let createdFiles = new Map();
         const heliaFs = unixfs({
           blockstore: temporaryBlockstore,
         });
-        const queue = new PQueue({ concurrency: os.cpus().length });
+        uploadLogger.verbose("UNIXFS_ADD", {
+          count: source.length,
+        });
+        let createFilePromises = [];
+        const queue = new PQueue({ concurrency: 50 });
         for (const entry of source) {
+          if (entry.content === null) {
+            continue;
+          }
           const task = (async () => {
             await queue.add(async () => {
+              uploadLogger.silly("SOURCE_IMPORT_STARTED", {
+                path: entry.path,
+                size: queue.size,
+              });
               let createdFile;
               if (
-                entry.type === "import" ||
+                (entry.type === "import" && entry.content !== null) ||
                 entry.content instanceof Readable
               ) {
-                if (entry.type === "import") {
-                  entry.content = await createReadStream(
-                    path.resolve(entry.content),
-                  );
+                let filehandle;
+                try {
+                  if (entry.type === "import") {
+                    filehandle = await open(path.resolve(entry.content), "r");
+                    entry.content = filehandle.createReadStream();
+                  }
+                  createdFile = await heliaFs.addByteStream(entry.content);
+                } catch (err) {
+                  if (typeof filehandle !== "undefined") {
+                    await filehandle.close();
+                  }
+                  throw err;
+                }
+                if (typeof filehandle !== "undefined") {
+                  await filehandle.close();
                 }
-                createdFile = await heliaFs.addByteStream(entry.content);
               } else if (entry.content !== null) {
                 createdFile = await heliaFs.addBytes(entry.content);
               } else {
                 return;
               }
               createdFiles.set(entry.path, createdFile);
+              uploadLogger.verbose("SOURCE_IMPORT_COMPLETED", {
+                path: entry.path,
+                size: queue.size,
+              });
             });
           })();
+          if (queue.size > 150) {
+            while (queue.size > 100) {
+              await once(queue, "next");
+            }
+          }
           createFilePromises.push(task);
+          uploadLogger.verbose("SOURCE_IMPORT_QUEUED", {
+            path: entry.path,
+            size: queue.size,
+          });
         }
         await Promise.all(createFilePromises);
+        uploadLogger.verbose("UNIXFS_ADDED", {
+          count: createdFiles.size,
+        });
 
         const heliaMfs = mfs({
           blockstore: temporaryBlockstore,
           datastore: temporaryDatastore,
         });
-        let createdDirectories = new Set();
+        uploadLogger.verbose("MFS_ADDING", {
+          count: source.length,
+          output: temporaryCarFilePath,
+        });
         for (const entry of source) {
-          const pathsToCreate = splitPath(entry.path);
-          for (const pathToCreate of pathsToCreate) {
-            if (createdDirectories.has(pathToCreate) === false) {
-              await heliaMfs.mkdir(pathToCreate);
-              createdDirectories.add(pathToCreate);
-            }
-          }
           if (entry.content === null) {
+            uploadLogger.silly("MFS_DIR_CREATING", {
+              path: entry.path,
+            });
             await heliaMfs.mkdir(entry.path);
+            uploadLogger.verbose("MFS_DIR_CREATED", {
+              path: entry.path,
+            });
           } else {
             const entryFile = createdFiles.get(entry.path);
-            await heliaMfs.cp(entryFile, entry.path);
+            uploadLogger.silly("MFS_FILE_COPY", {
+              cid: entryFile,
+              path: entry.path,
+            });
+            await heliaMfs.cp(entryFile, entry.path, {
+              force: true,
+            });
+            uploadLogger.verbose("MFS_FILE_COPIED", {
+              cid: entryFile,
+              path: entry.path,
+            });
           }
         }
         for (const entry of source) {
           parsedEntries[entry.path] = await heliaMfs.stat(entry.path);
+          uploadLogger.silly("MFS_PATH_STAT", parsedEntries[entry.path]);
         }
         parsedEntries["/"] = await heliaMfs.stat("/");
+        const rootEntry = parsedEntries["/"];
+        uploadLogger.verbose("MFS_ADDED", {
+          root: rootEntry,
+          count: Object.keys(parsedEntries).length,
+        });
 
         // Get carFile stream here
-        const rootEntry = parsedEntries["/"];
+        uploadLogger.verbose("CAR_EXPORTING", {
+          root: rootEntry,
+        });
         const carExporter = car({ blockstore: temporaryBlockstore }),
           { writer, out } = CarWriter.create([rootEntry.cid]);
 
@@ -256,14 +319,31 @@ class ObjectManager {
         const output = createWriteStream(temporaryCarFilePath);
         Readable.from(out).pipe(output);
         await carExporter.export(rootEntry.cid, writer);
+        uploadLogger.verbose("CAR_EXPORTED", {
+          root: rootEntry,
+        });
 
         // Set Uploader to Read from carFile on disk
         uploadOptions.params.Body = createReadStream(temporaryCarFilePath);
 
         // Upload carFile via S3
+        uploadLogger.verbose("CAR_UPLOADING", {
+          entry: rootEntry,
+          source: temporaryCarFilePath,
+        });
         const parallelUploads3 = new Upload(uploadOptions);
+        parallelUploads3.on("httpUploadProgress", (progress) => {
+          uploadLogger.debug("CAR_UPLOAD_PROGRESS", progress);
+        });
         await parallelUploads3.done();
+        uploadLogger.verbose("CAR_UPLOADED", {
+          entry: rootEntry,
+          source: temporaryCarFilePath,
+        });
         await temporaryBlockstore.close();
+      } catch (err) {
+        console.error(err.message);
+        throw err;
       } finally {
         if (typeof temporaryBlockstoreDir !== "undefined") {
           // Delete Temporary Blockstore
@@ -339,7 +419,7 @@ class ObjectManager {
   /**
    * @summary Downloads an object from the specified bucket using the provided key.
    * @param {string} key - The key of the object to be downloaded.
-   * @param {objectOptions} [options] - The options for downloading the object..
+   * @param {objectOptions} [options] - The options for downloading the object.
    * @returns {Promise} - A promise that resolves with the contents of the downloaded object as a Stream.
    * @example
    * // Download object with name of `download-object-example`
@@ -457,7 +537,7 @@ class ObjectManager {
    * @returns {Promise} - A Promise that resolves with the result of the copy operation.
    * @example
    * // Copy object `copy-object-test` from `copy-object-test-pass-src` to `copy-object-test-pass-dest`
-   * // TIP: Set bucket on constructor and it will be used as the default source for copying objects.
+   * // TIP: Set bucket on constructor, it will be used as the default source for copying objects.
    * await objectManager.copy(`copy-object-test`, `copy-object-dest`, {
    *   sourceBucket: `copy-object-src`
    * });
@@ -484,28 +564,9 @@ class ObjectManager {
   }
 }
 
-function splitPath(inputPath) {
-  // Split the path into its components
-  const parts = inputPath.split("/");
-
-  // Initialize an empty array to hold the incremental paths
-  let incrementalPaths = [];
-
-  // Use reduce to build each part of the path incrementally
-  parts.reduce((acc, curr, index) => {
-    // Skip the first empty element due to the leading "/"
-    if (index > 0) {
-      const newPath = `${acc}/${curr}`;
-      incrementalPaths.push(newPath);
-      return newPath;
-    }
-    return acc;
-  }, "");
-
-  // Remove the last element because it's the file name, not a folder
-  incrementalPaths = incrementalPaths.slice(0, -1);
-
-  return incrementalPaths;
+// Function to count slashes in a path
+function countSlashes(path) {
+  return (path.match(/\//g) || []).length;
 }
 
 export default ObjectManager;
diff --git a/yarn.lock b/yarn.lock
index 22a6e6d..8dbf092 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -611,6 +611,20 @@
   dependencies:
     "@chainsafe/is-ip" "^2.0.1"
 
+"@colors/colors@1.6.0", "@colors/colors@^1.6.0":
+  version "1.6.0"
+  resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0"
+  integrity sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==
+
+"@dabh/diagnostics@^2.0.2":
+  version "2.0.3"
+  resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.3.tgz#7f7e97ee9a725dffc7808d93668cc984e1dc477a"
+  integrity sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==
+  dependencies:
+    colorspace "1.1.x"
+    enabled "2.0.x"
+    kuler "^2.0.0"
+
 "@esbuild/aix-ppc64@0.19.12":
   version "0.19.12"
   resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.19.12.tgz#d1bc06aedb6936b3b6d313bf809a5a40387d2b7f"
@@ -1635,6 +1649,11 @@
   resolved "https://registry.npmjs.org/@types/mdurl/-/mdurl-1.0.5.tgz"
   integrity sha512-6L6VymKTzYSrEf4Nev4Xa1LCHKrlTlYCBMTlQKFuddo1CvQcE52I0mwfOJayueUC7MJuXOeHTcIU683lzd0cUA==
 
+"@types/triple-beam@^1.3.2":
+  version "1.3.5"
+  resolved "https://registry.yarnpkg.com/@types/triple-beam/-/triple-beam-1.3.5.tgz#74fef9ffbaa198eb8b588be029f38b00299caa2c"
+  integrity sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==
+
 "@vascosantos/moving-average@^1.1.0":
   version "1.1.0"
   resolved "https://registry.npmjs.org/@vascosantos/moving-average/-/moving-average-1.1.0.tgz"
@@ -1648,6 +1667,19 @@ abortable-iterator@^5.0.1:
     get-iterator "^2.0.0"
     it-stream-types "^2.0.1"
 
+abstract-level@^1.0.2, abstract-level@^1.0.4:
+  version "1.0.4"
+  resolved "https://registry.yarnpkg.com/abstract-level/-/abstract-level-1.0.4.tgz#3ad8d684c51cc9cbc9cf9612a7100b716c414b57"
+  integrity sha512-eUP/6pbXBkMbXFdx4IH2fVgvB7M0JvR7/lIL33zcs0IBcwjdzSSl31TOJsaCzmKSSDF9h8QYSOJux4Nd4YJqFg==
+  dependencies:
+    buffer "^6.0.3"
+    catering "^2.1.0"
+    is-buffer "^2.0.5"
+    level-supports "^4.0.0"
+    level-transcoder "^1.0.1"
+    module-error "^1.0.1"
+    queue-microtask "^1.2.3"
+
 acorn@^8.8.2:
   version "8.11.2"
   resolved "https://registry.npmjs.org/acorn/-/acorn-8.11.2.tgz"
@@ -1703,6 +1735,11 @@ array-union@^2.1.0:
   resolved "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz"
   integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==
 
+async@^3.2.3:
+  version "3.2.5"
+  resolved "https://registry.yarnpkg.com/async/-/async-3.2.5.tgz#ebd52a8fdaf7a2289a24df399f8d8485c8a46b66"
+  integrity sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==
+
 asynckit@^0.4.0:
   version "0.4.0"
   resolved "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz"
@@ -1757,19 +1794,16 @@ blockstore-core@^4.0.0:
     multiformats "^12.0.1"
     uint8arrays "^5.0.0"
 
-blockstore-fs@1.1.8:
+blockstore-level@1.1.8:
   version "1.1.8"
-  resolved "https://registry.npmjs.org/blockstore-fs/-/blockstore-fs-1.1.8.tgz"
-  integrity sha512-1KD1+yEkxszZ3GWQdJbGgXAOs12LJC/Pit7JCPueJT/Pjt9GWtGZ4+8mgoaR3bjXVBgBIdhNlUxxw2NS787noA==
+  resolved "https://registry.yarnpkg.com/blockstore-level/-/blockstore-level-1.1.8.tgz#e8016a710ccd93c5f167ac8c746395a392422dec"
+  integrity sha512-8+NLeoyAQZzWIf9TWl0kC3x0JpJf6bts02K1fg8MCxgx/z/Leh4gVCzSGvYDuorHicOCVsyWv4+3ldlrnzRXoA==
   dependencies:
     blockstore-core "^4.0.0"
-    fast-write-atomic "^0.2.0"
     interface-blockstore "^5.0.0"
     interface-store "^5.0.0"
-    it-glob "^2.0.1"
-    it-map "^3.0.1"
-    it-parallel-batch "^3.0.0"
-    multiformats "^12.0.1"
+    level "^8.0.1"
+    multiformats "^13.0.1"
 
 bluebird@^3.7.2:
   version "3.7.2"
@@ -1795,6 +1829,16 @@ braces@^3.0.2, braces@~3.0.2:
   dependencies:
     fill-range "^7.0.1"
 
+browser-level@^1.0.1:
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/browser-level/-/browser-level-1.0.1.tgz#36e8c3183d0fe1c405239792faaab5f315871011"
+  integrity sha512-XECYKJ+Dbzw0lbydyQuJzwNXtOpbMSq737qxJN11sIRTErOMShvDpbzTlgju7orJKvx4epULolZAuJGLzCmWRQ==
+  dependencies:
+    abstract-level "^1.0.2"
+    catering "^2.1.1"
+    module-error "^1.0.2"
+    run-parallel-limit "^1.1.0"
+
 buffer-from@^1.0.0:
   version "1.1.2"
   resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz"
@@ -1836,6 +1880,11 @@ camel-case@^4.1.2:
     pascal-case "^3.1.2"
     tslib "^2.0.3"
 
+catering@^2.1.0, catering@^2.1.1:
+  version "2.1.1"
+  resolved "https://registry.yarnpkg.com/catering/-/catering-2.1.1.tgz#66acba06ed5ee28d5286133982a927de9a04b510"
+  integrity sha512-K7Qy8O9p76sL3/3m7/zLKbRkyOlSZAgzEaLhyj2mXS8PsCud2Eo4hAb8aLtZqHh0QGqLcb9dlJSu6lHRVENm1w==
+
 catharsis@^0.9.0:
   version "0.9.0"
   resolved "https://registry.npmjs.org/catharsis/-/catharsis-0.9.0.tgz"
@@ -1863,6 +1912,17 @@ chokidar@^3.5.1:
   optionalDependencies:
     fsevents "~2.3.2"
 
+classic-level@^1.2.0:
+  version "1.4.1"
+  resolved "https://registry.yarnpkg.com/classic-level/-/classic-level-1.4.1.tgz#169ecf9f9c6200ad42a98c8576af449c1badbaee"
+  integrity sha512-qGx/KJl3bvtOHrGau2WklEZuXhS3zme+jf+fsu6Ej7W7IP/C49v7KNlWIsT1jZu0YnfzSIYDGcEWpCa1wKGWXQ==
+  dependencies:
+    abstract-level "^1.0.2"
+    catering "^2.1.0"
+    module-error "^1.0.1"
+    napi-macros "^2.2.2"
+    node-gyp-build "^4.3.0"
+
 clean-css@~5.3.2:
   version "5.3.3"
   resolved "https://registry.npmjs.org/clean-css/-/clean-css-5.3.3.tgz"
@@ -1882,6 +1942,13 @@ clean-jsdoc-theme@4.2.17:
     lodash "^4.17.21"
     showdown "^2.1.0"
 
+color-convert@^1.9.3:
+  version "1.9.3"
+  resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
+  integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
+  dependencies:
+    color-name "1.1.3"
+
 color-convert@^2.0.1:
   version "2.0.1"
   resolved "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz"
@@ -1889,11 +1956,40 @@ color-convert@^2.0.1:
   dependencies:
     color-name "~1.1.4"
 
-color-name@~1.1.4:
+color-name@1.1.3:
+  version "1.1.3"
+  resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
+  integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==
+
+color-name@^1.0.0, color-name@~1.1.4:
   version "1.1.4"
   resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz"
   integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
 
+color-string@^1.6.0:
+  version "1.9.1"
+  resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.9.1.tgz#4467f9146f036f855b764dfb5bf8582bf342c7a4"
+  integrity sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==
+  dependencies:
+    color-name "^1.0.0"
+    simple-swizzle "^0.2.2"
+
+color@^3.1.3:
+  version "3.2.1"
+  resolved "https://registry.yarnpkg.com/color/-/color-3.2.1.tgz#3544dc198caf4490c3ecc9a790b54fe9ff45e164"
+  integrity sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==
+  dependencies:
+    color-convert "^1.9.3"
+    color-string "^1.6.0"
+
+colorspace@1.1.x:
+  version "1.1.4"
+  resolved "https://registry.yarnpkg.com/colorspace/-/colorspace-1.1.4.tgz#8d442d1186152f60453bf8070cd66eb364e59243"
+  integrity sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==
+  dependencies:
+    color "^3.1.3"
+    text-hex "1.0.x"
+
 combined-stream@^1.0.8:
   version "1.0.8"
   resolved "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz"
@@ -2006,6 +2102,11 @@ emoji-regex@^9.2.2:
   resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz"
   integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==
 
+enabled@2.0.x:
+  version "2.0.0"
+  resolved "https://registry.yarnpkg.com/enabled/-/enabled-2.0.0.tgz#f9dd92ec2d6f4bbc0d5d1e64e21d61cd4665e7c2"
+  integrity sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==
+
 entities@^4.4.0:
   version "4.5.0"
   resolved "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz"
@@ -2091,11 +2192,6 @@ fast-glob@^3.2.9:
     merge2 "^1.3.0"
     micromatch "^4.0.4"
 
-fast-write-atomic@^0.2.0:
-  version "0.2.1"
-  resolved "https://registry.npmjs.org/fast-write-atomic/-/fast-write-atomic-0.2.1.tgz"
-  integrity sha512-WvJe06IfNYlr+6cO3uQkdKdy3Cb1LlCJSF8zRs2eT8yuhdbSlR9nIt+TgQ92RUxiRrQm+/S7RARnMfCs5iuAjw==
-
 fast-xml-parser@4.2.5:
   version "4.2.5"
   resolved "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz"
@@ -2110,6 +2206,11 @@ fastq@^1.6.0:
   dependencies:
     reusify "^1.0.4"
 
+fecha@^4.2.0:
+  version "4.2.3"
+  resolved "https://registry.yarnpkg.com/fecha/-/fecha-4.2.3.tgz#4d9ccdbc61e8629b259fdca67e65891448d569fd"
+  integrity sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==
+
 fill-range@^7.0.1:
   version "7.0.1"
   resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz"
@@ -2117,6 +2218,11 @@ fill-range@^7.0.1:
   dependencies:
     to-regex-range "^5.0.1"
 
+fn.name@1.x.x:
+  version "1.1.0"
+  resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc"
+  integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==
+
 follow-redirects@^1.15.0:
   version "1.15.3"
   resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz"
@@ -2426,6 +2532,11 @@ ipfs-unixfs@^11.1.3:
     protons-runtime "^5.0.0"
     uint8arraylist "^2.4.3"
 
+is-arrayish@^0.3.1:
+  version "0.3.2"
+  resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.3.2.tgz#4574a2ae56f7ab206896fb431eaeed066fdf8f03"
+  integrity sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==
+
 is-binary-path@~2.1.0:
   version "2.1.0"
   resolved "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz"
@@ -2433,6 +2544,11 @@ is-binary-path@~2.1.0:
   dependencies:
     binary-extensions "^2.0.0"
 
+is-buffer@^2.0.5:
+  version "2.0.5"
+  resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191"
+  integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==
+
 is-extglob@^2.1.1:
   version "2.1.1"
   resolved "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz"
@@ -2504,9 +2620,9 @@ it-foreach@^2.0.2:
   dependencies:
     it-peekable "^3.0.0"
 
-it-glob@^2.0.1, it-glob@^2.0.4, it-glob@^2.0.6:
+it-glob@^2.0.4, it-glob@^2.0.6:
   version "2.0.6"
-  resolved "https://registry.yarnpkg.com/it-glob/-/it-glob-2.0.6.tgz#616a5b008aa456e82be9a29b8c7dd4dc8ef81acb"
+  resolved "https://registry.npmjs.org/it-glob/-/it-glob-2.0.6.tgz"
   integrity sha512-4C6ccz4nhqrq7yZMzBr3MsKhyL+rlnLXIPceyGG6ogl3Lx3eeWMv1RtlySJwFi6q+jVcPyTpeYt/xftwI2JEQQ==
   dependencies:
     minimatch "^9.0.0"
@@ -2528,7 +2644,7 @@ it-length-prefixed@^9.0.0:
     uint8arraylist "^2.0.0"
     uint8arrays "^4.0.2"
 
-it-map@^3.0.1, it-map@^3.0.2, it-map@^3.0.3, it-map@^3.0.5:
+it-map@^3.0.2, it-map@^3.0.3, it-map@^3.0.5:
   version "3.0.5"
   resolved "https://registry.npmjs.org/it-map/-/it-map-3.0.5.tgz"
   integrity sha512-hB0TDXo/h4KSJJDSRLgAPmDroiXP6Fx1ck4Bzl3US9hHfZweTKsuiP0y4gXuTMcJlS6vj0bb+f70rhkD47ZA3w==
@@ -2542,7 +2658,7 @@ it-merge@^3.0.0, it-merge@^3.0.1, it-merge@^3.0.3:
   dependencies:
     it-pushable "^3.2.0"
 
-it-parallel-batch@^3.0.0, it-parallel-batch@^3.0.1:
+it-parallel-batch@^3.0.1:
   version "3.0.4"
   resolved "https://registry.npmjs.org/it-parallel-batch/-/it-parallel-batch-3.0.4.tgz"
   integrity sha512-O1omh8ss8+UtXiMjE+8kM5C20DT0Ma4VtKVfrSHOJU0UHZ+iWBXarabzPYEp+WiuQmrv+klDPPlTZ9KaLN9xOA==
@@ -2672,6 +2788,33 @@ klaw@^3.0.0:
   dependencies:
     graceful-fs "^4.1.9"
 
+kuler@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.yarnpkg.com/kuler/-/kuler-2.0.0.tgz#e2c570a3800388fb44407e851531c1d670b061b3"
+  integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==
+
+level-supports@^4.0.0:
+  version "4.0.1"
+  resolved "https://registry.yarnpkg.com/level-supports/-/level-supports-4.0.1.tgz#431546f9d81f10ff0fea0e74533a0e875c08c66a"
+  integrity sha512-PbXpve8rKeNcZ9C1mUicC9auIYFyGpkV9/i6g76tLgANwWhtG2v7I4xNBUlkn3lE2/dZF3Pi0ygYGtLc4RXXdA==
+
+level-transcoder@^1.0.1:
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/level-transcoder/-/level-transcoder-1.0.1.tgz#f8cef5990c4f1283d4c86d949e73631b0bc8ba9c"
+  integrity sha512-t7bFwFtsQeD8cl8NIoQ2iwxA0CL/9IFw7/9gAjOonH0PWTTiRfY7Hq+Ejbsxh86tXobDQ6IOiddjNYIfOBs06w==
+  dependencies:
+    buffer "^6.0.3"
+    module-error "^1.0.1"
+
+level@^8.0.1:
+  version "8.0.1"
+  resolved "https://registry.yarnpkg.com/level/-/level-8.0.1.tgz#737161db1bc317193aca4e7b6f436e7e1df64379"
+  integrity sha512-oPBGkheysuw7DmzFQYyFe8NAia5jFLAgEnkgWnK3OXAuJr8qFT+xBQIwokAZPME2bhPFzS8hlYcL16m8UZrtwQ==
+  dependencies:
+    abstract-level "^1.0.4"
+    browser-level "^1.0.1"
+    classic-level "^1.2.0"
+
 lilconfig@^3.0.0:
   version "3.0.0"
   resolved "https://registry.npmjs.org/lilconfig/-/lilconfig-3.0.0.tgz"
@@ -2704,6 +2847,18 @@ lodash@^4.17.15, lodash@^4.17.21:
   resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz"
   integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
 
+logform@^2.3.2, logform@^2.4.0:
+  version "2.6.0"
+  resolved "https://registry.yarnpkg.com/logform/-/logform-2.6.0.tgz#8c82a983f05d6eaeb2d75e3decae7a768b2bf9b5"
+  integrity sha512-1ulHeNPp6k/LD8H91o7VYFBng5i1BDE7HoKxVbZiGFidS1Rj65qcywLxX+pVfAPoQJEjRdvKcusKwOupHCVOVQ==
+  dependencies:
+    "@colors/colors" "1.6.0"
+    "@types/triple-beam" "^1.3.2"
+    fecha "^4.2.0"
+    ms "^2.1.1"
+    safe-stable-stringify "^2.3.1"
+    triple-beam "^1.3.0"
+
 lower-case@^2.0.2:
   version "2.0.2"
   resolved "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz"
@@ -2806,6 +2961,11 @@ mkdirp@^1.0.4:
   resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz"
   integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==
 
+module-error@^1.0.1, module-error@^1.0.2:
+  version "1.0.2"
+  resolved "https://registry.yarnpkg.com/module-error/-/module-error-1.0.2.tgz#8d1a48897ca883f47a45816d4fb3e3c6ba404d86"
+  integrity sha512-0yuvsqSCv8LbaOKhnsQ/T5JhyFlCYLPXK3U2sgV10zoKQwzs/MyfuQUOZQ1V/6OCOJsK/TRgNVrPuPDqtdMFtA==
+
 ms@2.1.2, ms@^2.1.1:
   version "2.1.2"
   resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz"
@@ -2840,6 +3000,11 @@ mz@^2.7.0:
     object-assign "^4.0.1"
     thenify-all "^1.0.0"
 
+napi-macros@^2.2.2:
+  version "2.2.2"
+  resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.2.2.tgz#817fef20c3e0e40a963fbf7b37d1600bd0201044"
+  integrity sha512-hmEVtAGYzVQpCKdbQea4skABsdXW4RUh5t5mJ2zzqowJS2OyXZTU1KhDVFhx+NlWZ4ap9mqR9TcDO3LTTttd+g==
+
 no-case@^3.0.4:
   version "3.0.4"
   resolved "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz"
@@ -2855,6 +3020,11 @@ node-fetch@^2.6.1:
   dependencies:
     whatwg-url "^5.0.0"
 
+node-gyp-build@^4.3.0:
+  version "4.8.0"
+  resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.8.0.tgz#3fee9c1731df4581a3f9ead74664369ff00d26dd"
+  integrity sha512-u6fs2AEUljNho3EYTJNBfImO5QTo/J/1Etd+NVdCj7qWKUSN/bSLkZwhDv7I+w/MSC6qJ4cknepkAYykDdK8og==
+
 normalize-path@^3.0.0, normalize-path@~3.0.0:
   version "3.0.0"
   resolved "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz"
@@ -2872,6 +3042,13 @@ object-assign@^4.0.1:
   resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz"
   integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==
 
+one-time@^1.0.0:
+  version "1.0.0"
+  resolved "https://registry.yarnpkg.com/one-time/-/one-time-1.0.0.tgz#e06bc174aed214ed58edede573b433bbf827cb45"
+  integrity sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==
+  dependencies:
+    fn.name "1.x.x"
+
 onetime@^5.1.2:
   version "5.1.2"
   resolved "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz"
@@ -2990,7 +3167,7 @@ punycode@^2.1.0:
   resolved "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz"
   integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==
 
-queue-microtask@^1.2.2:
+queue-microtask@^1.2.2, queue-microtask@^1.2.3:
   version "1.2.3"
   resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz"
   integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==
@@ -3089,6 +3266,13 @@ rollup@^4.0.2:
     "@rollup/rollup-win32-x64-msvc" "4.9.6"
     fsevents "~2.3.2"
 
+run-parallel-limit@^1.1.0:
+  version "1.1.0"
+  resolved "https://registry.yarnpkg.com/run-parallel-limit/-/run-parallel-limit-1.1.0.tgz#be80e936f5768623a38a963262d6bef8ff11e7ba"
+  integrity sha512-jJA7irRNM91jaKc3Hcl1npHsFLOXOoTkPCUL1JEa1R82O2miplXXRaGdjW/KM/98YQWDhJLiSs793CnXfblJUw==
+  dependencies:
+    queue-microtask "^1.2.2"
+
 run-parallel@^1.1.9:
   version "1.2.0"
   resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz"
@@ -3101,6 +3285,11 @@ safe-buffer@~5.2.0:
   resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz"
   integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
 
+safe-stable-stringify@^2.3.1:
+  version "2.4.3"
+  resolved "https://registry.yarnpkg.com/safe-stable-stringify/-/safe-stable-stringify-2.4.3.tgz#138c84b6f6edb3db5f8ef3ef7115b8f55ccbf886"
+  integrity sha512-e2bDA2WJT0wxseVd4lsDP4+3ONX6HpMXQa1ZhFQ7SU+GjvORCmShbCMltrtIDfkYhVHrOcPtj+KhmDBdPdZD1g==
+
 shebang-command@^2.0.0:
   version "2.0.0"
   resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz"
@@ -3130,6 +3319,13 @@ signal-exit@^4.0.1:
   resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz"
   integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==
 
+simple-swizzle@^0.2.2:
+  version "0.2.2"
+  resolved "https://registry.yarnpkg.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz#a4da6b635ffcccca33f70d17cb92592de95e557a"
+  integrity sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==
+  dependencies:
+    is-arrayish "^0.3.1"
+
 slash@^3.0.0:
   version "3.0.0"
   resolved "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz"
@@ -3160,6 +3356,11 @@ sparse-array@^1.3.1, sparse-array@^1.3.2:
   resolved "https://registry.npmjs.org/sparse-array/-/sparse-array-1.3.2.tgz"
   integrity sha512-ZT711fePGn3+kQyLuv1fpd3rNSkNF8vd5Kv2D+qnOANeyKs3fx6bUMGWRPvgTTcYV64QMqZKZwcuaQSP3AZ0tg==
 
+stack-trace@0.0.x:
+  version "0.0.10"
+  resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0"
+  integrity sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==
+
 stream-browserify@3.0.0:
   version "3.0.0"
   resolved "https://registry.npmjs.org/stream-browserify/-/stream-browserify-3.0.0.tgz"
@@ -3247,6 +3448,11 @@ terser@^5.15.1:
     commander "^2.20.0"
     source-map-support "~0.5.20"
 
+text-hex@1.0.x:
+  version "1.0.0"
+  resolved "https://registry.yarnpkg.com/text-hex/-/text-hex-1.0.0.tgz#69dc9c1b17446ee79a92bf5b884bb4b9127506f5"
+  integrity sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==
+
 thenify-all@^1.0.0:
   version "1.6.0"
   resolved "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz"
@@ -3292,6 +3498,11 @@ tree-kill@^1.2.2:
   resolved "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz"
   integrity sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==
 
+triple-beam@^1.3.0:
+  version "1.4.1"
+  resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.4.1.tgz#6fde70271dc6e5d73ca0c3b24e2d92afb7441984"
+  integrity sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==
+
 ts-interface-checker@^0.1.9:
   version "0.1.13"
   resolved "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz"
@@ -3456,6 +3667,32 @@ which@^2.0.1:
   dependencies:
     isexe "^2.0.0"
 
+winston-transport@^4.7.0:
+  version "4.7.0"
+  resolved "https://registry.yarnpkg.com/winston-transport/-/winston-transport-4.7.0.tgz#e302e6889e6ccb7f383b926df6936a5b781bd1f0"
+  integrity sha512-ajBj65K5I7denzer2IYW6+2bNIVqLGDHqDw3Ow8Ohh+vdW+rv4MZ6eiDvHoKhfJFZ2auyN8byXieDDJ96ViONg==
+  dependencies:
+    logform "^2.3.2"
+    readable-stream "^3.6.0"
+    triple-beam "^1.3.0"
+
+winston@3.12.0:
+  version "3.12.0"
+  resolved "https://registry.yarnpkg.com/winston/-/winston-3.12.0.tgz#a5d965a41d3dc31be5408f8c66e927958846c0d0"
+  integrity sha512-OwbxKaOlESDi01mC9rkM0dQqQt2I8DAUMRLZ/HpbwvDXm85IryEHgoogy5fziQy38PntgZsLlhAYHz//UPHZ5w==
+  dependencies:
+    "@colors/colors" "^1.6.0"
+    "@dabh/diagnostics" "^2.0.2"
+    async "^3.2.3"
+    is-stream "^2.0.0"
+    logform "^2.4.0"
+    one-time "^1.0.0"
+    readable-stream "^3.4.0"
+    safe-stable-stringify "^2.3.1"
+    stack-trace "0.0.x"
+    triple-beam "^1.3.0"
+    winston-transport "^4.7.0"
+
 "wrap-ansi-cjs@npm:wrap-ansi@^7.0.0":
   version "7.0.0"
   resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz"

From f9234d8f834461ac5dc686a79eb49e09f8dee9dd Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Mon, 18 Mar 2024 12:05:47 -0500
Subject: [PATCH 13/16] Fix tmp path

---
 src/objectManager.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/objectManager.js b/src/objectManager.js
index 4b87c1d..2c3e176 100644
--- a/src/objectManager.js
+++ b/src/objectManager.js
@@ -188,7 +188,7 @@ class ObjectManager {
         // Setup Blockstore
         temporaryBlockstoreDir = path.resolve(
           os.tmpdir(),
-          ".filebase-sdk",
+          "filebase-sdk",
           "uploads",
           uploadUUID,
         );

From b1080c246000362cb2ca7aeadc2862788cbe2a19 Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Mon, 18 Mar 2024 12:08:11 -0500
Subject: [PATCH 14/16] Fix package in build

---
 tsup.config.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tsup.config.js b/tsup.config.js
index a088dea..2433f50 100644
--- a/tsup.config.js
+++ b/tsup.config.js
@@ -9,7 +9,7 @@ export default defineConfig({
     "@helia/car",
     "@helia/unixfs",
     "@helia/mfs",
-    "blockstore-fs",
+    "blockstore-level",
     "datastore-core",
     "p-queue",
   ],

From 10ae3f2a9ef3778046ba95f40044e3dd4d420e2e Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Mon, 18 Mar 2024 12:11:13 -0500
Subject: [PATCH 15/16] Fix package in build

---
 package.json         |   2 +-
 src/objectManager.js |   4 +-
 tsup.config.js       |   2 +-
 yarn.lock            | 110 ++++++-------------------------------------
 4 files changed, 19 insertions(+), 99 deletions(-)

diff --git a/package.json b/package.json
index 0e131d9..7c36787 100644
--- a/package.json
+++ b/package.json
@@ -56,7 +56,7 @@
     "@helia/unixfs": "1.4.3",
     "@ipld/car": "5.2.4",
     "axios": "1.6.2",
-    "blockstore-level": "1.1.8",
+    "blockstore-fs": "1.1.10",
     "datastore-core": "9.2.9",
     "p-queue": "8.0.1",
     "uuid": "9.0.1",
diff --git a/src/objectManager.js b/src/objectManager.js
index 2c3e176..0045672 100644
--- a/src/objectManager.js
+++ b/src/objectManager.js
@@ -15,7 +15,7 @@ import { CarWriter } from "@ipld/car";
 import { car } from "@helia/car";
 import { mfs } from "@helia/mfs";
 import { unixfs } from "@helia/unixfs";
-import { LevelBlockstore } from "blockstore-level";
+import { FsBlockstore } from "blockstore-fs";
 import { MemoryDatastore } from "datastore-core";
 // Utility Imports
 import { once } from "node:events";
@@ -194,7 +194,7 @@ class ObjectManager {
         );
         temporaryCarFilePath = `${temporaryBlockstoreDir}/main.car`;
         await mkdir(temporaryBlockstoreDir, { recursive: true });
-        const temporaryBlockstore = new LevelBlockstore(temporaryBlockstoreDir),
+        const temporaryBlockstore = new FsBlockstore(temporaryBlockstoreDir),
           temporaryDatastore = new MemoryDatastore();
 
         let createdFiles = new Map();
diff --git a/tsup.config.js b/tsup.config.js
index 2433f50..a088dea 100644
--- a/tsup.config.js
+++ b/tsup.config.js
@@ -9,7 +9,7 @@ export default defineConfig({
     "@helia/car",
     "@helia/unixfs",
     "@helia/mfs",
-    "blockstore-level",
+    "blockstore-fs",
     "datastore-core",
     "p-queue",
   ],
diff --git a/yarn.lock b/yarn.lock
index 8dbf092..0fb7845 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -1667,19 +1667,6 @@ abortable-iterator@^5.0.1:
     get-iterator "^2.0.0"
     it-stream-types "^2.0.1"
 
-abstract-level@^1.0.2, abstract-level@^1.0.4:
-  version "1.0.4"
-  resolved "https://registry.yarnpkg.com/abstract-level/-/abstract-level-1.0.4.tgz#3ad8d684c51cc9cbc9cf9612a7100b716c414b57"
-  integrity sha512-eUP/6pbXBkMbXFdx4IH2fVgvB7M0JvR7/lIL33zcs0IBcwjdzSSl31TOJsaCzmKSSDF9h8QYSOJux4Nd4YJqFg==
-  dependencies:
-    buffer "^6.0.3"
-    catering "^2.1.0"
-    is-buffer "^2.0.5"
-    level-supports "^4.0.0"
-    level-transcoder "^1.0.1"
-    module-error "^1.0.1"
-    queue-microtask "^1.2.3"
-
 acorn@^8.8.2:
   version "8.11.2"
   resolved "https://registry.npmjs.org/acorn/-/acorn-8.11.2.tgz"
@@ -1794,15 +1781,18 @@ blockstore-core@^4.0.0:
     multiformats "^12.0.1"
     uint8arrays "^5.0.0"
 
-blockstore-level@1.1.8:
-  version "1.1.8"
-  resolved "https://registry.yarnpkg.com/blockstore-level/-/blockstore-level-1.1.8.tgz#e8016a710ccd93c5f167ac8c746395a392422dec"
-  integrity sha512-8+NLeoyAQZzWIf9TWl0kC3x0JpJf6bts02K1fg8MCxgx/z/Leh4gVCzSGvYDuorHicOCVsyWv4+3ldlrnzRXoA==
+blockstore-fs@1.1.10:
+  version "1.1.10"
+  resolved "https://registry.yarnpkg.com/blockstore-fs/-/blockstore-fs-1.1.10.tgz#0dc8119b0bc293e6df4e056356e33e85528c7a05"
+  integrity sha512-Dg0mbdma0OY4NEk78efcAAiG5ZrMcIVrM7s+0e2p4uavnvrcBT6vDj5ITfnRfid3idKHOoCYShGEi9ENNgJg1A==
   dependencies:
     blockstore-core "^4.0.0"
+    fast-write-atomic "^0.2.1"
     interface-blockstore "^5.0.0"
     interface-store "^5.0.0"
-    level "^8.0.1"
+    it-glob "^2.0.6"
+    it-map "^3.0.5"
+    it-parallel-batch "^3.0.4"
     multiformats "^13.0.1"
 
 bluebird@^3.7.2:
@@ -1829,16 +1819,6 @@ braces@^3.0.2, braces@~3.0.2:
   dependencies:
     fill-range "^7.0.1"
 
-browser-level@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/browser-level/-/browser-level-1.0.1.tgz#36e8c3183d0fe1c405239792faaab5f315871011"
-  integrity sha512-XECYKJ+Dbzw0lbydyQuJzwNXtOpbMSq737qxJN11sIRTErOMShvDpbzTlgju7orJKvx4epULolZAuJGLzCmWRQ==
-  dependencies:
-    abstract-level "^1.0.2"
-    catering "^2.1.1"
-    module-error "^1.0.2"
-    run-parallel-limit "^1.1.0"
-
 buffer-from@^1.0.0:
   version "1.1.2"
   resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz"
@@ -1880,11 +1860,6 @@ camel-case@^4.1.2:
     pascal-case "^3.1.2"
     tslib "^2.0.3"
 
-catering@^2.1.0, catering@^2.1.1:
-  version "2.1.1"
-  resolved "https://registry.yarnpkg.com/catering/-/catering-2.1.1.tgz#66acba06ed5ee28d5286133982a927de9a04b510"
-  integrity sha512-K7Qy8O9p76sL3/3m7/zLKbRkyOlSZAgzEaLhyj2mXS8PsCud2Eo4hAb8aLtZqHh0QGqLcb9dlJSu6lHRVENm1w==
-
 catharsis@^0.9.0:
   version "0.9.0"
   resolved "https://registry.npmjs.org/catharsis/-/catharsis-0.9.0.tgz"
@@ -1912,17 +1887,6 @@ chokidar@^3.5.1:
   optionalDependencies:
     fsevents "~2.3.2"
 
-classic-level@^1.2.0:
-  version "1.4.1"
-  resolved "https://registry.yarnpkg.com/classic-level/-/classic-level-1.4.1.tgz#169ecf9f9c6200ad42a98c8576af449c1badbaee"
-  integrity sha512-qGx/KJl3bvtOHrGau2WklEZuXhS3zme+jf+fsu6Ej7W7IP/C49v7KNlWIsT1jZu0YnfzSIYDGcEWpCa1wKGWXQ==
-  dependencies:
-    abstract-level "^1.0.2"
-    catering "^2.1.0"
-    module-error "^1.0.1"
-    napi-macros "^2.2.2"
-    node-gyp-build "^4.3.0"
-
 clean-css@~5.3.2:
   version "5.3.3"
   resolved "https://registry.npmjs.org/clean-css/-/clean-css-5.3.3.tgz"
@@ -2192,6 +2156,11 @@ fast-glob@^3.2.9:
     merge2 "^1.3.0"
     micromatch "^4.0.4"
 
+fast-write-atomic@^0.2.1:
+  version "0.2.1"
+  resolved "https://registry.yarnpkg.com/fast-write-atomic/-/fast-write-atomic-0.2.1.tgz#7ee8ef0ce3c1f531043c09ae8e5143361ab17ede"
+  integrity sha512-WvJe06IfNYlr+6cO3uQkdKdy3Cb1LlCJSF8zRs2eT8yuhdbSlR9nIt+TgQ92RUxiRrQm+/S7RARnMfCs5iuAjw==
+
 fast-xml-parser@4.2.5:
   version "4.2.5"
   resolved "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz"
@@ -2544,11 +2513,6 @@ is-binary-path@~2.1.0:
   dependencies:
     binary-extensions "^2.0.0"
 
-is-buffer@^2.0.5:
-  version "2.0.5"
-  resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191"
-  integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==
-
 is-extglob@^2.1.1:
   version "2.1.1"
   resolved "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz"
@@ -2658,7 +2622,7 @@ it-merge@^3.0.0, it-merge@^3.0.1, it-merge@^3.0.3:
   dependencies:
     it-pushable "^3.2.0"
 
-it-parallel-batch@^3.0.1:
+it-parallel-batch@^3.0.1, it-parallel-batch@^3.0.4:
   version "3.0.4"
   resolved "https://registry.npmjs.org/it-parallel-batch/-/it-parallel-batch-3.0.4.tgz"
   integrity sha512-O1omh8ss8+UtXiMjE+8kM5C20DT0Ma4VtKVfrSHOJU0UHZ+iWBXarabzPYEp+WiuQmrv+klDPPlTZ9KaLN9xOA==
@@ -2793,28 +2757,6 @@ kuler@^2.0.0:
   resolved "https://registry.yarnpkg.com/kuler/-/kuler-2.0.0.tgz#e2c570a3800388fb44407e851531c1d670b061b3"
   integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==
 
-level-supports@^4.0.0:
-  version "4.0.1"
-  resolved "https://registry.yarnpkg.com/level-supports/-/level-supports-4.0.1.tgz#431546f9d81f10ff0fea0e74533a0e875c08c66a"
-  integrity sha512-PbXpve8rKeNcZ9C1mUicC9auIYFyGpkV9/i6g76tLgANwWhtG2v7I4xNBUlkn3lE2/dZF3Pi0ygYGtLc4RXXdA==
-
-level-transcoder@^1.0.1:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/level-transcoder/-/level-transcoder-1.0.1.tgz#f8cef5990c4f1283d4c86d949e73631b0bc8ba9c"
-  integrity sha512-t7bFwFtsQeD8cl8NIoQ2iwxA0CL/9IFw7/9gAjOonH0PWTTiRfY7Hq+Ejbsxh86tXobDQ6IOiddjNYIfOBs06w==
-  dependencies:
-    buffer "^6.0.3"
-    module-error "^1.0.1"
-
-level@^8.0.1:
-  version "8.0.1"
-  resolved "https://registry.yarnpkg.com/level/-/level-8.0.1.tgz#737161db1bc317193aca4e7b6f436e7e1df64379"
-  integrity sha512-oPBGkheysuw7DmzFQYyFe8NAia5jFLAgEnkgWnK3OXAuJr8qFT+xBQIwokAZPME2bhPFzS8hlYcL16m8UZrtwQ==
-  dependencies:
-    abstract-level "^1.0.4"
-    browser-level "^1.0.1"
-    classic-level "^1.2.0"
-
 lilconfig@^3.0.0:
   version "3.0.0"
   resolved "https://registry.npmjs.org/lilconfig/-/lilconfig-3.0.0.tgz"
@@ -2961,11 +2903,6 @@ mkdirp@^1.0.4:
   resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz"
   integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==
 
-module-error@^1.0.1, module-error@^1.0.2:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/module-error/-/module-error-1.0.2.tgz#8d1a48897ca883f47a45816d4fb3e3c6ba404d86"
-  integrity sha512-0yuvsqSCv8LbaOKhnsQ/T5JhyFlCYLPXK3U2sgV10zoKQwzs/MyfuQUOZQ1V/6OCOJsK/TRgNVrPuPDqtdMFtA==
-
 ms@2.1.2, ms@^2.1.1:
   version "2.1.2"
   resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz"
@@ -3000,11 +2937,6 @@ mz@^2.7.0:
     object-assign "^4.0.1"
     thenify-all "^1.0.0"
 
-napi-macros@^2.2.2:
-  version "2.2.2"
-  resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.2.2.tgz#817fef20c3e0e40a963fbf7b37d1600bd0201044"
-  integrity sha512-hmEVtAGYzVQpCKdbQea4skABsdXW4RUh5t5mJ2zzqowJS2OyXZTU1KhDVFhx+NlWZ4ap9mqR9TcDO3LTTttd+g==
-
 no-case@^3.0.4:
   version "3.0.4"
   resolved "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz"
@@ -3020,11 +2952,6 @@ node-fetch@^2.6.1:
   dependencies:
     whatwg-url "^5.0.0"
 
-node-gyp-build@^4.3.0:
-  version "4.8.0"
-  resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.8.0.tgz#3fee9c1731df4581a3f9ead74664369ff00d26dd"
-  integrity sha512-u6fs2AEUljNho3EYTJNBfImO5QTo/J/1Etd+NVdCj7qWKUSN/bSLkZwhDv7I+w/MSC6qJ4cknepkAYykDdK8og==
-
 normalize-path@^3.0.0, normalize-path@~3.0.0:
   version "3.0.0"
   resolved "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz"
@@ -3167,7 +3094,7 @@ punycode@^2.1.0:
   resolved "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz"
   integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==
 
-queue-microtask@^1.2.2, queue-microtask@^1.2.3:
+queue-microtask@^1.2.2:
   version "1.2.3"
   resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz"
   integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==
@@ -3266,13 +3193,6 @@ rollup@^4.0.2:
     "@rollup/rollup-win32-x64-msvc" "4.9.6"
     fsevents "~2.3.2"
 
-run-parallel-limit@^1.1.0:
-  version "1.1.0"
-  resolved "https://registry.yarnpkg.com/run-parallel-limit/-/run-parallel-limit-1.1.0.tgz#be80e936f5768623a38a963262d6bef8ff11e7ba"
-  integrity sha512-jJA7irRNM91jaKc3Hcl1npHsFLOXOoTkPCUL1JEa1R82O2miplXXRaGdjW/KM/98YQWDhJLiSs793CnXfblJUw==
-  dependencies:
-    queue-microtask "^1.2.2"
-
 run-parallel@^1.1.9:
   version "1.2.0"
   resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz"

From 6efd06509b85daf19801f74f3bc4e01ac80d9908 Mon Sep 17 00:00:00 2001
From: jtsmedley <38006759+jtsmedley@users.noreply.github.com>
Date: Mon, 18 Mar 2024 12:25:03 -0500
Subject: [PATCH 16/16] Unneeded dev dep

---
 package.json | 1 -
 yarn.lock    | 5 -----
 2 files changed, 6 deletions(-)

diff --git a/package.json b/package.json
index 7c36787..577ef7b 100644
--- a/package.json
+++ b/package.json
@@ -44,7 +44,6 @@
     "clean-jsdoc-theme": "4.2.17",
     "jsdoc": "4.0.2",
     "prettier": "3.1.0",
-    "recursive-fs": "2.1.0",
     "tsup": "8.0.1",
     "typescript": "5.3.3"
   },
diff --git a/yarn.lock b/yarn.lock
index 0fb7845..a16ec07 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -3139,11 +3139,6 @@ receptacle@^1.3.2:
   dependencies:
     ms "^2.1.1"
 
-recursive-fs@2.1.0:
-  version "2.1.0"
-  resolved "https://registry.yarnpkg.com/recursive-fs/-/recursive-fs-2.1.0.tgz#1e20cf7836b292ed81208c4817550a58ad0e15ff"
-  integrity sha512-oed3YruYsD52Mi16s/07eYblQOLi5dTtxpIJNdfCEJ7S5v8dDgVcycar0pRWf4IBuPMIkoctC8RTqGJzIKMNAQ==
-
 relateurl@^0.2.7:
   version "0.2.7"
   resolved "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz"