From af4cd5c82137c161125a7fedde10b1c470bd8802 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Andrade?= Date: Sun, 17 Sep 2023 20:33:13 +0200 Subject: [PATCH] * Refactor types * Refactor reading excel file * Add generate command for pfps --- README.md | 206 +++++-- oclif.manifest.json | 114 +++- package.json | 4 +- src/base/BaseCommand.ts | 6 +- src/commands/assets/mint.ts | 45 +- src/commands/config/auth.ts | 2 +- src/commands/config/init.ts | 2 +- src/commands/config/set.ts | 2 +- .../{mint-metadata.ts => mint-file.ts} | 7 +- src/commands/generate/pfp-file.ts | 205 +++++++ ...{template-metadata.ts => template-file.ts} | 7 +- src/commands/pfps/generate.ts | 148 +++++ src/commands/pfps/index.ts | 9 + src/commands/templates/create.ts | 40 +- src/commands/templates/export.ts | 3 +- src/services/antelope-service.ts | 2 +- src/services/asset-service.ts | 13 +- src/services/pfp-service.ts | 542 ++++++++++++++++++ src/services/schema-service.ts | 9 +- src/services/template-service.ts | 15 +- src/types/assets.ts | 10 + src/types/commands.ts | 55 ++ src/types/index.ts | 5 + src/types/schemas.ts | 7 + src/types/templates.ts | 12 + src/utils/config-utils.ts | 2 +- src/utils/excel-utils.ts | 84 +++ yarn.lock | 193 ++++++- 28 files changed, 1601 insertions(+), 148 deletions(-) rename src/commands/generate/{mint-metadata.ts => mint-file.ts} (94%) create mode 100644 src/commands/generate/pfp-file.ts rename src/commands/generate/{template-metadata.ts => template-file.ts} (92%) create mode 100644 src/commands/pfps/generate.ts create mode 100644 src/commands/pfps/index.ts create mode 100644 src/services/pfp-service.ts create mode 100644 src/types/assets.ts create mode 100644 src/types/commands.ts create mode 100644 src/types/index.ts create mode 100644 src/types/schemas.ts create mode 100644 src/types/templates.ts create mode 100644 src/utils/excel-utils.ts diff --git a/README.md b/README.md index 8fa78d6..2bba914 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ $ npm install -g @nefty/cli $ nefty COMMAND running command... $ nefty (--version) -@nefty/cli/0.1.2 darwin-arm64 node-v18.12.1 +@nefty/cli/0.1.3 darwin-arm64 node-v18.12.1 $ nefty --help [COMMAND] USAGE $ nefty COMMAND @@ -98,14 +98,19 @@ The required parameters are the collection name and the path where the file will You can also filter by schema in case you just want to work based on 1 schema You can generate and download these templates by running the following commands: -## Generate XLS Template for Template Creation +## Generate XLS File for Template Creation ``` -nefty generate template-metadata ~/Downloads/template-file-path -c yourCollectionName -s yourSchemaName +nefty generate template-file ~/Downloads/template-file-path -c yourCollectionName -s yourSchemaName ``` -## Generate XLS Template for Minting Assets +## Generate XLS File for Minting Assets ``` -nefty generate mint-metadata ~/Downloads/mint-file-path -c yourCollectionName -s yourSchemaName +nefty generate mint-file ~/Downloads/mint-file-path -c yourCollectionName -s yourSchemaName +``` + +## Generate XLS File for PFP Generation +``` +nefty generate pfp-file ~/Downloads/pfp-file-path -l Body -l Head -l Hair ``` ## Create Templates @@ -124,47 +129,83 @@ You can mint NFTs by running the following command: nefty assets mint ~/path/to/xls/file -c collectionName ``` +## Generate PFPs + +You can generate PFPs by running the following command: + +``` +nefty pfps generate ~/path/to/xls/file ~/path/to/output/dir -r ~/path/to/source/layers +``` + # XLS files -The CLI will read from a XLS template that will contain the schema(s) of the template(s) that we want to create. +The CLI reads data from an XLS template, which provides the schema(s) for the desired template(s). ## Template creation file -This file will have to contain the following headers with the template information: +Each sheet within this file should represent a distinct schema name. The mandatory headers detailing the template information are as follows: -| Header | Description | -|--------------------------|-------------------------------------------------------------------------------------------------| -| template_max_supply | The amount of assets that will be available to mint for this template (0 means infinite supply) | -| template_is_burnable | Indicates if you will be able to burn your assets | -| template_is_transferable | Indicates if you can transfer your assets to another account | +| Header | Description | +|----------------------------|--------------------------------------------------------------------------------------| +| `template_max_supply` | Total assets available for minting under this template. (0 denotes unlimited supply) | +| `template_is_burnable` | Specifies whether assets can be burned. | +| `template_is_transferable` | Indicates if assets are transferable to other accounts. | -After that we can add the custom attributes for the templates +Following these headers, custom attributes for the templates can be added: | template_max_supply | template_is_burnable | template_is_transferable | name | image | custom attr1 | custom attr2 | ... | |---------------------|----------------------|--------------------------|-------|-----------|---------------|---------------|-----| | 2000 | TRUE/FALSE | TRUE/FALSE | nefty | ipfs_hash | custom value1 | custom value2 | ... | | 4000 | TRUE/FALSE | TRUE/FALSE | nefty | ipfs_hash | custom value1 | custom value2 | ... | -## Mint file +## Mint File -This file will have to contain the following headers with the asset information: +Similarly, each sheet in this file should represent a unique schema name. The required headers detailing the asset information are: -| Header | Description | -|----------|-----------------------------------------------------------------------------| -| template | The id of the template to mint the asset (-1 if no template should be used) | -| amount | The amount of NFTs to be minted | -| owner | The owner of minted NFT | +| Header | Description | +|------------|----------------------------------------------------------------------------------------------------| +| `template` | ID of the template to which the asset belongs. Use -1 if no specific template is to be associated. | +| `amount` | Quantity of NFTs set to be minted. | +| `owner` | Account that will own the minted NFT. | -After that we can add the custom attributes for the templates +After specifying these headers, you can include custom attributes for the assets: | template | amount | owner | name | image | custom attr1 | custom attr2 | ... | |----------|--------|--------------|-------|-----------|---------------|---------------|-----| | -1 | 10 | superaccount | nefty | ipfs_hash | custom value1 | custom value2 | ... | | 631839 | 20 | superaccount | nefty | ipfs_hash | custom value1 | custom value2 | ... | +## PFP Layers File + +The PFP layers file is used to define how layers are organized and interact in the given project. This file specifies the names of the layers in each sheet, wherein the order of the sheets determines the sequence in which the layers are placed on top of each other. + +### Mandatory Headers + +Each sheet must contain these required headers: + +| Header | Description | +|---------|---------------------------------------------------------------------------------------------------------------------------------| +| `id` | A unique identifier for each option. | +| `value` | Specifies the attribute's value for the given option. | +| `odds` | Indicates the likelihood of the option being selected. All odds within the options sum up to determine the overall probability. | + +### Optional Headers + +In addition to the mandatory headers, the following headers can be included: + +| Header | Description | +|--------------------|----------------------------------------------------------------------------------------------------------------| +| `path` | Path of the image (relative to the `rootDir`) associated with the option. | +| `skip` | Skips certain options within a layer or an entire layer. Specify a layer name to skip. For specific options, mention the layer name followed by ":" and the option ids, separated by commas. List one layer per line if skipping multiple layers. | +| `dependencies` | Sets dependencies based on a previous layer's value. Specify the layer name followed by ":" and the option ids that it depends on. For multiple layers, list one layer per line. | +| `insertFromLayers` | Includes options from previous layers in the image when the current option is chosen. Define the layer name, followed by ":" and the option ids as dependencies. For multiple layers, list one layer per line. | +| `sameIdRestrictions` | Puts a constraint on an option if another option has the same id. Define the layer name, followed by ":" and the option ids that it relies on. | +| `removeLayers` | Specifies which preceding layers should be removed if the current option is chosen. Only the layer name is required. For multiple layers, list one layer per line. | + + @@ -178,9 +219,12 @@ After that we can add the custom attributes for the templates * [`nefty config init`](#nefty-config-init) * [`nefty config set [PROPERTY] [VALUE]`](#nefty-config-set-property-value) * [`nefty generate`](#nefty-generate) -* [`nefty generate mint-metadata OUTPUT`](#nefty-generate-mint-metadata-output) -* [`nefty generate template-metadata OUTPUT`](#nefty-generate-template-metadata-output) +* [`nefty generate mint-file OUTPUT`](#nefty-generate-mint-file-output) +* [`nefty generate pfp-file OUTPUT`](#nefty-generate-pfp-file-output) +* [`nefty generate template-file OUTPUT`](#nefty-generate-template-file-output) * [`nefty help [COMMANDS]`](#nefty-help-commands) +* [`nefty pfps`](#nefty-pfps) +* [`nefty pfps generate INPUT OUTPUT`](#nefty-pfps-generate-input-output) * [`nefty templates`](#nefty-templates) * [`nefty templates create INPUT`](#nefty-templates-create-input) * [`nefty templates export OUTPUT`](#nefty-templates-export-output) @@ -197,7 +241,7 @@ DESCRIPTION Manages a collection's assets. ``` -_See code: [dist/commands/assets/index.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.2/src/commands/assets/index.ts)_ +_See code: [dist/commands/assets/index.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/assets/index.ts)_ ## `nefty assets mint INPUT` @@ -222,7 +266,7 @@ EXAMPLES $ nefty assets mint test.xls -c alpacaworlds ``` -_See code: [dist/commands/assets/mint.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.2/src/commands/assets/mint.ts)_ +_See code: [dist/commands/assets/mint.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/assets/mint.ts)_ ## `nefty config` @@ -236,7 +280,7 @@ DESCRIPTION Manages the configuration. ``` -_See code: [dist/commands/config/index.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.2/src/commands/config/index.ts)_ +_See code: [dist/commands/config/index.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/config/index.ts)_ ## `nefty config auth` @@ -255,7 +299,7 @@ EXAMPLES $ nefty config auth auth ``` -_See code: [dist/commands/config/auth.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.2/src/commands/config/auth.ts)_ +_See code: [dist/commands/config/auth.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/config/auth.ts)_ ## `nefty config get` @@ -272,7 +316,7 @@ EXAMPLES $ nefty config get ``` -_See code: [dist/commands/config/get.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.2/src/commands/config/get.ts)_ +_See code: [dist/commands/config/get.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/config/get.ts)_ ## `nefty config init` @@ -292,7 +336,7 @@ EXAMPLES $ nefty config init ``` -_See code: [dist/commands/config/init.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.2/src/commands/config/init.ts)_ +_See code: [dist/commands/config/init.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/config/init.ts)_ ## `nefty config set [PROPERTY] [VALUE]` @@ -315,7 +359,7 @@ EXAMPLES $ nefty config set explorerUrl https://waxblock.io ``` -_See code: [dist/commands/config/set.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.2/src/commands/config/set.ts)_ +_See code: [dist/commands/config/set.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/config/set.ts)_ ## `nefty generate` @@ -329,15 +373,15 @@ DESCRIPTION Generates files to use in other batch commands. ``` -_See code: [dist/commands/generate/index.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.2/src/commands/generate/index.ts)_ +_See code: [dist/commands/generate/index.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/generate/index.ts)_ -## `nefty generate mint-metadata OUTPUT` +## `nefty generate mint-file OUTPUT` Generates the file to batch mint assets in a collection. Each schema will be a different sheet. ``` USAGE - $ nefty generate mint-metadata OUTPUT -c [-s ] + $ nefty generate mint-file OUTPUT -c [-s ] ARGUMENTS OUTPUT Location where the file will be generated. @@ -353,23 +397,55 @@ EXAMPLES Generates the file for the collection alpacaworlds, schema thejourney and saves it in the current directory in a file called mints.xlsx. - $ nefty generate mint-metadata mints.xlsx -c alpacaworlds -s thejourney + $ nefty generate mint-file mints.xlsx -c alpacaworlds -s thejourney Generates the file for the collection alpacaworlds, all schemas and saves it in the current directory in a file called mints.xlsx. - $ nefty generate mint-metadata mints.xlsx -c alpacaworlds + $ nefty generate mint-file mints.xlsx -c alpacaworlds +``` + +_See code: [dist/commands/generate/mint-file.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/generate/mint-file.ts)_ + +## `nefty generate pfp-file OUTPUT` + +Generates the file to generate a pfp collection with the specified layers. + ``` +USAGE + $ nefty generate pfp-file OUTPUT [-l ] [-a] -_See code: [dist/commands/generate/mint-metadata.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.2/src/commands/generate/mint-metadata.ts)_ +ARGUMENTS + OUTPUT Location where the file will be generated. + +FLAGS + -a, --advanced Include advanced headers. + -l, --layers=... The names of the layers to include in the file. + +DESCRIPTION + Generates the file to generate a pfp collection with the specified layers. + +EXAMPLES + Generates the file to create pfps with the layers Body, Face and Hair and saves it in the current directory in a + file called pfp-layers.xlsx. -## `nefty generate template-metadata OUTPUT` + $ nefty generate pfp-file pfp-layers.xlsx -l Body -l Face -l Hair + + Generates the file to create pfps with the layers Body, Face and Hair with advanced headers and saves it in the + current directory in a file called pfp-layers.xlsx. + + $ nefty generate pfp-file pfp-layers.xlsx -l Body -l Face -l Hair -a +``` + +_See code: [dist/commands/generate/pfp-file.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/generate/pfp-file.ts)_ + +## `nefty generate template-file OUTPUT` Generates the file to batch create templates in a collection. Each schema will be a different sheet. ``` USAGE - $ nefty generate template-metadata OUTPUT -c [-s ] + $ nefty generate template-file OUTPUT -c [-s ] ARGUMENTS OUTPUT Location where the file will be generated. @@ -385,15 +461,15 @@ EXAMPLES Generates the file for the collection alpacaworlds, schema thejourney and saves it in the current directory in a file called templates.xlsx. - $ nefty generate template-metadata templates.xlsx -c alpacaworlds -s thejourney + $ nefty generate template-file templates.xlsx -c alpacaworlds -s thejourney Generates the file for the collection alpacaworlds, all schemas and saves it in the current directory in a file called templates.xlsx. - $ nefty generate template-metadata templates.xlsx -c alpacaworlds + $ nefty generate template-file templates.xlsx -c alpacaworlds ``` -_See code: [dist/commands/generate/template-metadata.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.2/src/commands/generate/template-metadata.ts)_ +_See code: [dist/commands/generate/template-file.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/generate/template-file.ts)_ ## `nefty help [COMMANDS]` @@ -415,6 +491,48 @@ DESCRIPTION _See code: [@oclif/plugin-help](https://github.com/oclif/plugin-help/blob/v5.2.19/src/commands/help.ts)_ +## `nefty pfps` + +Commands to manage a PFP collection. + +``` +USAGE + $ nefty pfps + +DESCRIPTION + Commands to manage a PFP collection. +``` + +_See code: [dist/commands/pfps/index.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/pfps/index.ts)_ + +## `nefty pfps generate INPUT OUTPUT` + +Generates the images and attributes for a pfp collection. + +``` +USAGE + $ nefty pfps generate INPUT OUTPUT -q [-r ] [-w ] + +ARGUMENTS + INPUT Location or google sheets id of the excel file with the pfps definitions. + OUTPUT Directory where the images will be saved. + +FLAGS + -q, --quantity= (required) Number of pfps to generate. + -r, --rootDir= Directory where the assets are stored. + -w, --resizeWidth= Width to resize the images to. + +DESCRIPTION + Generates the images and attributes for a pfp collection. + +EXAMPLES + Generates all the pfps defined in the pfps-specs.xlsx file and saves them in the pfps directory. + + $ nefty pfps generate pfps-specs.xlsx pfps +``` + +_See code: [dist/commands/pfps/generate.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/pfps/generate.ts)_ + ## `nefty templates` Manages a collection's templates. @@ -427,7 +545,7 @@ DESCRIPTION Manages a collection's templates. ``` -_See code: [dist/commands/templates/index.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.2/src/commands/templates/index.ts)_ +_See code: [dist/commands/templates/index.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/templates/index.ts)_ ## `nefty templates create INPUT` @@ -451,7 +569,7 @@ EXAMPLES $ nefty templates create template.xls -c alpacaworlds ``` -_See code: [dist/commands/templates/create.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.2/src/commands/templates/create.ts)_ +_See code: [dist/commands/templates/create.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/templates/create.ts)_ ## `nefty templates export OUTPUT` @@ -483,5 +601,5 @@ EXAMPLES $ nefty templates export templates.xlsx -c alpacaworlds ``` -_See code: [dist/commands/templates/export.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.2/src/commands/templates/export.ts)_ +_See code: [dist/commands/templates/export.ts](https://github.com/neftyblocks/neftyblocks-cli/blob/v0.1.3/src/commands/templates/export.ts)_ diff --git a/oclif.manifest.json b/oclif.manifest.json index d76a2a3..f9017bc 100644 --- a/oclif.manifest.json +++ b/oclif.manifest.json @@ -1,5 +1,5 @@ { - "version": "0.1.2", + "version": "0.1.3", "commands": { "assets": { "id": "assets", @@ -163,8 +163,8 @@ "flags": {}, "args": {} }, - "generate:mint-metadata": { - "id": "generate:mint-metadata", + "generate:mint-file": { + "id": "generate:mint-file", "description": "Generates the file to batch mint assets in a collection. Each schema will be a different sheet.", "strict": true, "pluginName": "@nefty/cli", @@ -206,8 +206,50 @@ } } }, - "generate:template-metadata": { - "id": "generate:template-metadata", + "generate:pfp-file": { + "id": "generate:pfp-file", + "description": "Generates the file to generate a pfp collection with the specified layers.", + "strict": true, + "pluginName": "@nefty/cli", + "pluginAlias": "@nefty/cli", + "pluginType": "core", + "aliases": [], + "examples": [ + { + "command": "<%= config.bin %> <%= command.id %> pfp-layers.xlsx -l Body -l Face -l Hair", + "description": "Generates the file to create pfps with the layers Body, Face and Hair and saves it in the current directory in a file called pfp-layers.xlsx." + }, + { + "command": "<%= config.bin %> <%= command.id %> pfp-layers.xlsx -l Body -l Face -l Hair -a", + "description": "Generates the file to create pfps with the layers Body, Face and Hair with advanced headers and saves it in the current directory in a file called pfp-layers.xlsx." + } + ], + "flags": { + "layers": { + "name": "layers", + "type": "option", + "char": "l", + "description": "The names of the layers to include in the file.", + "multiple": true + }, + "advanced": { + "name": "advanced", + "type": "boolean", + "char": "a", + "description": "Include advanced headers.", + "allowNo": false + } + }, + "args": { + "output": { + "name": "output", + "description": "Location where the file will be generated.", + "required": true + } + } + }, + "generate:template-file": { + "id": "generate:template-file", "description": "Generates the file to batch create templates in a collection. Each schema will be a different sheet.", "strict": true, "pluginName": "@nefty/cli", @@ -249,6 +291,68 @@ } } }, + "pfps:generate": { + "id": "pfps:generate", + "description": "Generates the images and attributes for a pfp collection.", + "strict": true, + "pluginName": "@nefty/cli", + "pluginAlias": "@nefty/cli", + "pluginType": "core", + "aliases": [], + "examples": [ + { + "command": "<%= config.bin %> <%= command.id %> pfps-specs.xlsx pfps", + "description": "Generates all the pfps defined in the pfps-specs.xlsx file and saves them in the pfps directory." + } + ], + "flags": { + "rootDir": { + "name": "rootDir", + "type": "option", + "char": "r", + "description": "Directory where the assets are stored.", + "multiple": false + }, + "resizeWidth": { + "name": "resizeWidth", + "type": "option", + "char": "w", + "description": "Width to resize the images to.", + "multiple": false + }, + "quantity": { + "name": "quantity", + "type": "option", + "char": "q", + "description": "Number of pfps to generate.", + "required": true, + "multiple": false + } + }, + "args": { + "input": { + "name": "input", + "description": "Location or google sheets id of the excel file with the pfps definitions.", + "required": true + }, + "output": { + "name": "output", + "description": "Directory where the images will be saved.", + "required": true + } + } + }, + "pfps": { + "id": "pfps", + "description": "Commands to manage a PFP collection.", + "strict": true, + "pluginName": "@nefty/cli", + "pluginAlias": "@nefty/cli", + "pluginType": "core", + "aliases": [], + "flags": {}, + "args": {} + }, "templates:create": { "id": "templates:create", "description": "Create templates in a collection by batches using a spreadsheet.", diff --git a/package.json b/package.json index 55fc49a..7de61b1 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@nefty/cli", - "version": "0.1.2", + "version": "0.1.3", "description": "The NeftyBlocks cli will help you manage your collection with commands to create templates, mint assets, and more.", "author": "NeftyBlocks", "bin": { @@ -25,9 +25,11 @@ "@wharfkit/wallet-plugin-anchor": "^1.0.0", "@wharfkit/wallet-plugin-privatekey": "^1.0.0", "atomicassets": "^1.5.1", + "cli-progress": "^3.12.0", "node-fetch": "2.6.7", "qrcode-terminal": "^0.12.0", "read-excel-file": "^5.6.1", + "sharp": "^0.32.5", "write-excel-file": "^1.4.27" }, "devDependencies": { diff --git a/src/base/BaseCommand.ts b/src/base/BaseCommand.ts index 0fa29ec..2e6e88e 100644 --- a/src/base/BaseCommand.ts +++ b/src/base/BaseCommand.ts @@ -1,9 +1,13 @@ import { Command } from '@oclif/core'; -import { CliConfig } from '../types/cli-config'; +import { CliConfig } from '../types'; import { readConfiguration } from '../utils/config-utils'; import { getSession } from '../services/antelope-service'; export abstract class BaseCommand extends Command { + async catch(error: any) { + this.log(error.message); + } + async getCliConfig(requireSession = true): Promise { const config = readConfiguration(this.config.configDir); if (!config) { diff --git a/src/commands/assets/mint.ts b/src/commands/assets/mint.ts index d859412..e87cbaf 100644 --- a/src/commands/assets/mint.ts +++ b/src/commands/assets/mint.ts @@ -1,28 +1,20 @@ import { ux, Flags, Args } from '@oclif/core'; -import readXlsxFile, { readSheetNames } from 'read-excel-file/node'; -import { MintData, mintAssets } from '../../services/asset-service'; +import { mintAssets } from '../../services/asset-service'; import { Cell, Row } from 'read-excel-file/types'; import { getTemplatesMap } from '../../services/template-service'; import { getBatchesFromArray } from '../../utils/array-utils'; -import { fileExists } from '../../utils/file-utils'; -import { AssetSchema, getCollectionSchemas } from '../../services/schema-service'; +import { getCollectionSchemas } from '../../services/schema-service'; import { isValidAttribute, typeAliases } from '../../utils/attributes-utils'; -import { CliConfig } from '../../types/cli-config'; +import { AssetSchema, CliConfig } from '../../types'; import { TransactResult } from '@wharfkit/session'; import { BaseCommand } from '../../base/BaseCommand'; +import { readExcelContents } from '../../utils/excel-utils'; +import { MintRow } from '../../types'; const templateField = 'template'; const amountField = 'amount'; const ownerField = 'owner'; -type MintRow = { - schema: AssetSchema; - templateId: string; - amount: number; - owner: string; - mintActionData: MintData; -}; - export default class MintCommand extends BaseCommand { static description = 'Mints assets in batches using a spreadsheet.'; @@ -68,22 +60,21 @@ export default class MintCommand extends BaseCommand { ux.action.stop(); // Read XLS file - if (!fileExists(mintsFile)) { - this.error('XLS file not found!'); - } - - ux.action.start('Reading mints in file'); - const sheetNames = await readSheetNames(mintsFile); - const sheets = await Promise.all(sheetNames.map((name) => readXlsxFile(mintsFile, { sheet: name }))); - const mintRows: MintRow[] = []; - for (let i = 0; i < sheetNames.length; i++) { - const sheet = sheets[i]; - const schemaName = sheetNames[i].trim(); - const schema = schemasMap[schemaName]; - mintRows.push(...(await this.getMintRows(sheet, schema, config, ignoreSupply))); + try { + ux.action.start('Reading mints in file'); + const sheets = await readExcelContents(mintsFile); + for (let i = 0; i < sheets.length; i++) { + const { name, rows } = sheets[i]; + const schemaName = name.trim(); + const schema = schemasMap[schemaName]; + mintRows.push(...(await this.getMintRows(rows, schema, config, ignoreSupply))); + } + } catch (error: any) { + this.error(`Error reading file: ${error.message}`); + } finally { + ux.action.stop(); } - ux.action.stop(); // Create table columns and print table const columns: any = { diff --git a/src/commands/config/auth.ts b/src/commands/config/auth.ts index f0f058e..1c698d7 100644 --- a/src/commands/config/auth.ts +++ b/src/commands/config/auth.ts @@ -1,5 +1,5 @@ import { Command } from '@oclif/core'; -import { SettingsConfig } from '../../types/cli-config'; +import { SettingsConfig } from '../../types'; import { readConfiguration } from '../../utils/config-utils'; import { getSession } from '../../services/antelope-service'; import { removeDir } from '../../utils/file-utils'; diff --git a/src/commands/config/init.ts b/src/commands/config/init.ts index e44e50e..9ebaacd 100644 --- a/src/commands/config/init.ts +++ b/src/commands/config/init.ts @@ -3,7 +3,7 @@ import { configFileExists, getSessionDir, removeConfigFile, writeConfiguration } import { getChainId, validateExplorerUrl, validateAtomicAssetsUrl } from '../../utils/config-utils'; import { getSession } from '../../services/antelope-service'; -import { SettingsConfig } from '../../types/cli-config'; +import { SettingsConfig } from '../../types'; import { input, select } from '@inquirer/prompts'; interface Preset { diff --git a/src/commands/config/set.ts b/src/commands/config/set.ts index 409971e..eac6d40 100644 --- a/src/commands/config/set.ts +++ b/src/commands/config/set.ts @@ -1,5 +1,5 @@ import { Args, Command, ux } from '@oclif/core'; -import { SettingsConfig } from '../../types/cli-config'; +import { SettingsConfig } from '../../types'; import { readConfiguration, validate, writeConfiguration } from '../../utils/config-utils'; export default class SetCommand extends Command { diff --git a/src/commands/generate/mint-metadata.ts b/src/commands/generate/mint-file.ts similarity index 94% rename from src/commands/generate/mint-metadata.ts rename to src/commands/generate/mint-file.ts index 09145e8..5f94111 100644 --- a/src/commands/generate/mint-metadata.ts +++ b/src/commands/generate/mint-file.ts @@ -2,10 +2,11 @@ import { Args, Flags, ux } from '@oclif/core'; import { getTemplatesForCollection, getTemplatesFromSchema } from '../../services/template-service'; import { BaseCommand } from '../../base/BaseCommand'; import writeXlsxFile from 'write-excel-file/node'; -import { AssetSchema, getCollectionSchemas, getSchema } from '../../services/schema-service'; +import { getCollectionSchemas, getSchema } from '../../services/schema-service'; import { ITemplate } from 'atomicassets/build/API/Explorer/Objects'; import { getXlsType, transformValueToType } from '../../utils/attributes-utils'; import { fileExists } from '../../utils/file-utils'; +import { AssetSchema } from '../../types'; const headers = [ { @@ -19,7 +20,7 @@ const headers = [ }, ]; -export default class GenerateMintMetadataCommand extends BaseCommand { +export default class GenerateMintFileCommand extends BaseCommand { static examples = [ { command: '<%= config.bin %> <%= command.id %> mints.xlsx -c alpacaworlds -s thejourney', @@ -55,7 +56,7 @@ export default class GenerateMintMetadataCommand extends BaseCommand { }; public async run(): Promise { - const { flags, args } = await this.parse(GenerateMintMetadataCommand); + const { flags, args } = await this.parse(GenerateMintFileCommand); const config = await this.getCliConfig(); const output = args.output; diff --git a/src/commands/generate/pfp-file.ts b/src/commands/generate/pfp-file.ts new file mode 100644 index 0000000..52b4e3a --- /dev/null +++ b/src/commands/generate/pfp-file.ts @@ -0,0 +1,205 @@ +import { Args, Flags, ux } from '@oclif/core'; +import { BaseCommand } from '../../base/BaseCommand'; +import writeXlsxFile from 'write-excel-file/node'; +import { fileExists } from '../../utils/file-utils'; +import { + dependenciesHeader, + idHeader, + insertFromLayersHeader, + oddsHeader, + pathHeader, + removeLayersHeader, + sameIdRestrictionsHeader, + skipHeader, + valueHeader, +} from '../../services/pfp-service'; + +export default class GeneratePfpFileCommand extends BaseCommand { + static examples = [ + { + command: '<%= config.bin %> <%= command.id %> pfp-layers.xlsx -l Body -l Face -l Hair', + description: + 'Generates the file to create pfps with the layers Body, Face and Hair and saves it in the current directory in a file called pfp-layers.xlsx.', + }, + { + command: '<%= config.bin %> <%= command.id %> pfp-layers.xlsx -l Body -l Face -l Hair -a', + description: + 'Generates the file to create pfps with the layers Body, Face and Hair with advanced headers and saves it in the current directory in a file called pfp-layers.xlsx.', + }, + ]; + static description = 'Generates the file to generate a pfp collection with the specified layers.'; + + static args = { + output: Args.file({ + description: 'Location where the file will be generated.', + required: true, + }), + }; + + static flags = { + layers: Flags.string({ + char: 'l', + description: 'The names of the layers to include in the file.', + multiple: true, + }), + advanced: Flags.boolean({ + char: 'a', + description: 'Include advanced headers.', + }), + }; + + public async run(): Promise { + const { flags, args } = await this.parse(GeneratePfpFileCommand); + + const output = args.output; + const layers = flags.layers || ['Layer1']; + const advanced = flags.advanced; + + if (fileExists(output)) { + const proceed = await ux.confirm('File already exists. Do you want to overwrite it?'); + if (!proceed) { + this.exit(); + } + } + + const headersRow = [ + { + value: idHeader, + type: String, + fontWeight: 'bold', + }, + { + value: valueHeader, + type: String, + fontWeight: 'bold', + }, + { + value: oddsHeader, + type: String, + fontWeight: 'bold', + }, + { + value: pathHeader, + type: String, + fontWeight: 'bold', + }, + ...(advanced + ? [ + { + value: dependenciesHeader, + type: String, + fontWeight: 'bold', + }, + { + value: sameIdRestrictionsHeader, + type: String, + fontWeight: 'bold', + }, + { + value: skipHeader, + type: String, + fontWeight: 'bold', + }, + { + value: insertFromLayersHeader, + type: String, + fontWeight: 'bold', + }, + { + value: removeLayersHeader, + type: String, + fontWeight: 'bold', + }, + ] + : []), + ]; + + const advancedContent = advanced + ? [ + { + value: '', + type: String, + }, + { + value: '', + type: String, + }, + { + value: '', + type: String, + }, + { + value: '', + type: String, + }, + { + value: '', + type: String, + }, + ] + : []; + + const data = layers.map((layer) => [ + headersRow, + [ + { + value: '1', + type: String, + }, + { + value: `${layer} option 1`, + type: String, + }, + { + value: '1', + type: String, + }, + { + value: '', + type: String, + }, + ...advancedContent, + ], + [ + { + value: 'None', + type: String, + }, + { + value: 'None', + type: String, + }, + { + value: '1', + type: String, + }, + { + value: '', + type: String, + }, + ...advancedContent, + ], + ]); + + data.push([ + layers.map((layer) => ({ + value: layer, + type: String, + fontWeight: 'bold', + })), + layers.map(() => ({ + value: 'None', + type: String, + })), + ]); + + ux.action.start('Generating file...'); + await writeXlsxFile(data, { + sheets: [...layers.map((layer) => layer), '_force_'], + filePath: output, + }); + ux.action.stop(); + + this.log(`File generated at ${output}`); + } +} diff --git a/src/commands/generate/template-metadata.ts b/src/commands/generate/template-file.ts similarity index 92% rename from src/commands/generate/template-metadata.ts rename to src/commands/generate/template-file.ts index 0917f50..6e8f6a9 100644 --- a/src/commands/generate/template-metadata.ts +++ b/src/commands/generate/template-file.ts @@ -1,9 +1,10 @@ import { Args, Flags, ux } from '@oclif/core'; import { BaseCommand } from '../../base/BaseCommand'; import writeXlsxFile from 'write-excel-file/node'; -import { AssetSchema, getCollectionSchemas, getSchema } from '../../services/schema-service'; +import { getCollectionSchemas, getSchema } from '../../services/schema-service'; import { getXlsType } from '../../utils/attributes-utils'; import { fileExists } from '../../utils/file-utils'; +import { AssetSchema } from '../../types'; const headers = [ { @@ -17,7 +18,7 @@ const headers = [ }, ]; -export default class GenerateTemplateMetadataCommand extends BaseCommand { +export default class GenerateTemplateFileCommand extends BaseCommand { static examples = [ { command: '<%= config.bin %> <%= command.id %> templates.xlsx -c alpacaworlds -s thejourney', @@ -53,7 +54,7 @@ export default class GenerateTemplateMetadataCommand extends BaseCommand { }; public async run(): Promise { - const { flags, args } = await this.parse(GenerateTemplateMetadataCommand); + const { flags, args } = await this.parse(GenerateTemplateFileCommand); const config = await this.getCliConfig(); const output = args.output; diff --git a/src/commands/pfps/generate.ts b/src/commands/pfps/generate.ts new file mode 100644 index 0000000..45dda2b --- /dev/null +++ b/src/commands/pfps/generate.ts @@ -0,0 +1,148 @@ +import { Args, Flags, ux } from '@oclif/core'; +import { BaseCommand } from '../../base/BaseCommand'; +import { join } from 'node:path'; +import { fileExists } from '../../utils/file-utils'; +import { SingleBar } from 'cli-progress'; +import writeXlsxFile from 'write-excel-file/node'; +import { generateImage, generatePfps, readPfpLayerSpecs } from '../../services/pfp-service'; +import { existsSync, mkdirSync, rmSync } from 'node:fs'; + +export default class GeneratePfpsCommand extends BaseCommand { + static examples = [ + { + command: '<%= config.bin %> <%= command.id %> pfps-specs.xlsx pfps', + description: 'Generates all the pfps defined in the pfps-specs.xlsx file and saves them in the pfps directory.', + }, + ]; + static description = 'Generates the images and attributes for a pfp collection.'; + + static args = { + input: Args.string({ + description: 'Location or google sheets id of the excel file with the pfps definitions.', + required: true, + }), + output: Args.directory({ + description: 'Directory where the images will be saved.', + required: true, + }), + }; + + static flags = { + rootDir: Flags.directory({ + char: 'r', + exists: true, + description: 'Directory where the assets are stored.', + }), + resizeWidth: Flags.integer({ + char: 'w', + description: 'Width to resize the images to.', + }), + quantity: Flags.integer({ + char: 'q', + description: 'Number of pfps to generate.', + required: true, + }), + }; + + public async run(): Promise { + const { flags, args } = await this.parse(GeneratePfpsCommand); + + const output = args.output; + const rootDir = flags.rootDir || process.cwd(); + const quantity = flags.quantity; + + ux.action.start('Reading excel file...'); + const { layerSpecs, forcedPfps } = await readPfpLayerSpecs({ + filePathOrSheetsId: args.input, + }); + ux.action.stop(); + + ux.action.start('Mixing pfps...'); + const pfps = generatePfps({ + quantity, + layerSpecs, + forcedPfps, + }); + + // Save pfps to json file in output directory + if (!fileExists(output)) { + mkdirSync(output, { recursive: true }); + } + + const headerRow = [ + { + type: String, + value: 'dna', + }, + { + type: String, + value: 'imageLayers', + }, + ...layerSpecs.flatMap((layerSpec) => [ + { + type: String, + value: layerSpec.name, + }, + { + type: String, + value: `${layerSpec.name} id`, + }, + ]), + ]; + + const excelPfps = pfps.map((pfp) => [ + { + type: String, + value: pfp.dna, + }, + { + type: String, + value: pfp.imageLayers.join('\n'), + }, + ...pfp.attributes.flatMap((attribute) => [ + { + type: String, + value: attribute.value, + }, + { + type: String, + value: attribute.id, + }, + ]), + ]); + + const excelData = [headerRow, ...excelPfps]; + writeXlsxFile(excelData, { filePath: join(output, 'pfps.xlsx') }); + ux.action.stop(); + + // Generate images for pfps + const progressBar = new SingleBar({ + format: 'Generating images | {bar} | {percentage}% | {value}/{total} pfps | ETA: {eta_formatted}', + barCompleteChar: '\u2588', + barIncompleteChar: '\u2591', + hideCursor: true, + }); + + try { + const outputFolder = join(output, 'images'); + if (existsSync(outputFolder)) { + rmSync(outputFolder, { recursive: true }); + } + + mkdirSync(outputFolder, { recursive: true }); + + progressBar.start(pfps.length, 0); + for (const pfp of pfps) { + await generateImage({ + pfp, + rootDir, + outputFolder, + resizeWidth: flags.resizeWidth, + }); + progressBar.increment(); + } + } finally { + progressBar.stop(); + } + } +} diff --git a/src/commands/pfps/index.ts b/src/commands/pfps/index.ts new file mode 100644 index 0000000..df3d0f2 --- /dev/null +++ b/src/commands/pfps/index.ts @@ -0,0 +1,9 @@ +import { Command, run } from '@oclif/core'; + +export default class Pfps extends Command { + static description = 'Commands to manage a PFP collection.'; + + async run(): Promise { + run([Pfps.id, '--help']); + } +} diff --git a/src/commands/templates/create.ts b/src/commands/templates/create.ts index 69fae9c..f623998 100644 --- a/src/commands/templates/create.ts +++ b/src/commands/templates/create.ts @@ -1,13 +1,13 @@ import { Args, Flags, ux } from '@oclif/core'; -import readXlsxFile, { readSheetNames } from 'read-excel-file/node'; -import { AssetSchema, getCollectionSchemas } from '../../services/schema-service'; -import { TemplateToCreate, createTemplates } from '../../services/template-service'; +import { getCollectionSchemas } from '../../services/schema-service'; +import { createTemplates } from '../../services/template-service'; import { Cell, Row } from 'read-excel-file/types'; import { getBatchesFromArray } from '../../utils/array-utils'; -import { fileExists } from '../../utils/file-utils'; import { isValidAttribute } from '../../utils/attributes-utils'; import { TransactResult } from '@wharfkit/session'; import { BaseCommand } from '../../base/BaseCommand'; +import { readExcelContents } from '../../utils/excel-utils'; +import { AssetSchema, TemplateToCreate } from '../../types'; // Required headers const maxSupplyField = 'template_max_supply'; @@ -70,26 +70,24 @@ export default class CreateCommand extends BaseCommand { ux.action.stop(); // Read XLS file - if (!fileExists(templatesFile)) { - this.error('XLS file not found!'); - } - - ux.action.start('Reading templates in file'); - const sheetNames = await readSheetNames(templatesFile); - const sheets = await Promise.all(sheetNames.map((name) => readXlsxFile(templatesFile, { sheet: name }))); - - // Get Templates const templates: TemplateToCreate[] = []; - for (let i = 0; i < sheetNames.length; i++) { - const schemaName = sheetNames[i].trim(); - const sheet = sheets[i]; - const schema = schemasMap[schemaName]; - if (!schema) { - this.error(`Schema ${schemaName} doesn't exist`); + try { + ux.action.start('Reading templates in file'); + const sheets = await readExcelContents(templatesFile); + for (let i = 0; i < sheets.length; i++) { + const { name, rows } = sheets[i]; + const schemaName = name.trim(); + const schema = schemasMap[schemaName]; + if (!schema) { + this.error(`Schema ${schemaName} doesn't exist`); + } + templates.push(...this.getTemplateToCreate(rows, schema)); } - templates.push(...this.getTemplateToCreate(sheet, schema)); + } catch (error: any) { + this.error(`Error reading file: ${error.message}`); + } finally { + ux.action.stop(); } - ux.action.stop(); const batches = getBatchesFromArray(templates, batchSize); batches.forEach((templatesBatch: any[]) => { diff --git a/src/commands/templates/export.ts b/src/commands/templates/export.ts index d57b51e..1fea61b 100644 --- a/src/commands/templates/export.ts +++ b/src/commands/templates/export.ts @@ -2,10 +2,11 @@ import { Args, Flags, ux } from '@oclif/core'; import { getTemplatesForCollection, getTemplatesFromSchema } from '../../services/template-service'; import { BaseCommand } from '../../base/BaseCommand'; import writeXlsxFile from 'write-excel-file/node'; -import { AssetSchema, getCollectionSchemas, getSchema } from '../../services/schema-service'; +import { getCollectionSchemas, getSchema } from '../../services/schema-service'; import { ITemplate } from 'atomicassets/build/API/Explorer/Objects'; import { getXlsType, transformValueToType } from '../../utils/attributes-utils'; import { fileExists } from '../../utils/file-utils'; +import { AssetSchema } from '../../types'; const headers = [ { diff --git a/src/services/antelope-service.ts b/src/services/antelope-service.ts index 755276a..71fb2a4 100644 --- a/src/services/antelope-service.ts +++ b/src/services/antelope-service.ts @@ -1,6 +1,6 @@ import fetch from 'node-fetch'; import { ExplorerApi, RpcApi } from 'atomicassets'; -import { CliConfig, SettingsConfig } from '../types/cli-config'; +import { CliConfig, SettingsConfig } from '../types'; import { Session, SessionKit, TransactArgs, TransactResult, APIClient, API, AssetType } from '@wharfkit/session'; import { WalletPluginAnchor } from '@wharfkit/wallet-plugin-anchor'; import { ConsoleUserInterface } from '../wallet/ConsoleRenderer'; diff --git a/src/services/asset-service.ts b/src/services/asset-service.ts index 5571806..a6498dc 100644 --- a/src/services/asset-service.ts +++ b/src/services/asset-service.ts @@ -9,20 +9,9 @@ import { } from 'atomicassets/build/API/Explorer/Params'; import { IAccountStats, IAsset } from 'atomicassets/build/API/Explorer/Objects'; import { getBatchesFromArray } from '../utils/array-utils'; -import { CliConfig, SettingsConfig } from '../types/cli-config'; +import { CliConfig, MintData, SettingsConfig } from '../types'; import { TransactResult, AnyAction } from '@wharfkit/session'; -export interface MintData { - authorized_minter: string; - collection_name: string; - schema_name: string; - template_id: string; - new_asset_owner: string; - immutable_data: any[]; - mutable_data: any[]; - tokens_to_back: any[]; -} - export async function getAccountTemplates( account: string, options: GreylistParams & HideOffersParams, diff --git a/src/services/pfp-service.ts b/src/services/pfp-service.ts new file mode 100644 index 0000000..8ac8dec --- /dev/null +++ b/src/services/pfp-service.ts @@ -0,0 +1,542 @@ +import { PfpAttributeMap, PfpLayerOption, PfpLayerSpec, PfpSpec } from '../types'; +import { join } from 'node:path'; +import sharp from 'sharp'; +import crypto from 'crypto'; +import { SheetContents, getSheetHeader, readExcelContents } from '../utils/excel-utils'; +import { Row } from 'read-excel-file/types'; + +export const forceSheetName = '_force_'; +export const idHeader = 'id'; +export const valueHeader = 'value'; +export const oddsHeader = 'odds'; +export const pathHeader = 'path'; +export const dependenciesHeader = 'dependencies'; +export const sameIdRestrictionsHeader = 'sameIdRestrictions'; +export const insertFromLayersHeader = 'insertFromLayers'; +export const skipHeader = 'skip'; +export const removeLayersHeader = 'removeLayers'; + +export async function readPfpLayerSpecs({ filePathOrSheetsId }: { filePathOrSheetsId: string }): Promise<{ + layerSpecs: PfpLayerSpec[]; + forcedPfps?: PfpAttributeMap[]; +}> { + const sheets = await readExcelContents(filePathOrSheetsId); + const forceSheet = sheets.find((sheet) => sheet.name.toLowerCase() === forceSheetName); + const layerSheets = sheets.filter((sheet) => sheet.name.toLowerCase() !== forceSheetName); + const layerSpecs = layerSheets.map((sheet) => + getLayersSpecs({ + sheet, + }), + ); + + const forcedPfps = forceSheet ? getForcePfps({ sheet: forceSheet, layerSpecs }) : []; + + return { + layerSpecs, + forcedPfps, + }; +} + +export function getLayersSpecs({ sheet }: { sheet: SheetContents }): PfpLayerSpec { + const rows = sheet.rows; + if (rows.length < 2) { + throw new Error(`No entries in the ${sheet.name} sheet`); + } + + const { headersMap, validateHeaders } = getSheetHeader(rows); + const validationError = validateHeaders([valueHeader, oddsHeader, idHeader]); + + if (validationError) { + throw new Error(`Error in sheet ${sheet.name}: ${validationError}`); + } + + const contentRows = rows.slice(1); + const optionsMap: Record = {}; + + contentRows.forEach((row: Row) => { + const { + id, + value, + odds, + skipLayers, + insertFromLayer, + imagePath, + dependencies, + sameIdRestrictions, + layersToRemove, + } = readContentRows({ row, headersMap }); + + const option = optionsMap[id]; + if (!option) { + optionsMap[id] = { + id, + value, + odds, + imagePaths: [], + skipLayers, + insertFromLayer, + layersToRemove, + }; + } + + if (imagePath) { + optionsMap[id].odds += odds; + optionsMap[id]?.imagePaths.push({ + value: imagePath, + dependencies, + sameIdRestrictions, + }); + } + }); + + return { + name: sheet.name, + options: Object.values(optionsMap), + }; +} + +function getForcePfps({ sheet, layerSpecs }: { sheet: SheetContents; layerSpecs: PfpLayerSpec[] }): PfpAttributeMap[] { + const rows = sheet.rows; + if (rows.length < 1) { + throw new Error(`No entries in the ${sheet.name} sheet`); + } + + const { headersMap, validateHeaders } = getSheetHeader(rows); + const layerNames = layerSpecs.map((layerSpec) => layerSpec.name); + const validationError = validateHeaders(layerNames); + + if (validationError) { + throw new Error(`Error in sheet ${sheet.name}: ${validationError}`); + } + + const contentRows = rows.slice(1); + const forcePfps: PfpAttributeMap[] = []; + + contentRows.forEach((row: Row) => { + const forcePfp: PfpAttributeMap = {}; + layerNames.forEach((layerName) => { + const layerValue = row[headersMap[layerName]]?.toString()?.trim(); + if (layerValue) { + forcePfp[layerName] = layerValue; + } + }); + if (Object.keys(forcePfp).length > 0) { + forcePfps.push(forcePfp); + } + }); + + return forcePfps; +} + +function readContentRows({ row, headersMap }: { row: Row; headersMap: Record }): { + id: string; + value: string; + odds: number; + imagePath?: string; + skipLayers: { + [key: string]: { + values: string[]; + skipNone: boolean; + }; + }; + insertFromLayer: { + [key: string]: string; + }; + dependencies: { + [key: string]: string; + }; + sameIdRestrictions: { + [key: string]: string; + }; + layersToRemove: string[]; +} { + const id = row[headersMap[idHeader]].toString(); + const value = row[headersMap[valueHeader]].toString(); + const odds = row[headersMap[oddsHeader]] as number; + const pathString = (row[headersMap[pathHeader]] || '') as string; + const imagePath = pathString ? pathString : undefined; + const skipString = (row[headersMap[skipHeader]] || '') as string; + const removeLayersString = (row[headersMap[removeLayersHeader]] || '') as string; + const dependenciesString = (row[headersMap[dependenciesHeader]] || '') as string; + const insertFromLayersString = (row[headersMap[insertFromLayersHeader]] || '') as string; + const sameIdRestrictionString = (row[headersMap[sameIdRestrictionsHeader]] || '') as string; + + // Get layers to remove + const layersToRemove = removeLayersString + .split(',') + .map((optionId) => optionId.trim()) + .filter((optionId) => optionId.length > 0); + + // Get skipped options + const skipLayers: { + [key: string]: { + values: string[]; + skipNone: boolean; + }; + } = {}; + const skipOptions = skipString + .split('\n') + .map((option) => option.trim()) + .filter((option) => option.length > 0); + skipOptions.forEach((option) => { + const [layerName, optionsString] = option.split(':').map((value) => value.trim()); + const optionIds = (optionsString || '') + .split(',') + .map((optionId) => optionId.trim()) + .filter((optionId) => optionId.length > 0); + if (!skipLayers[layerName]) { + skipLayers[layerName] = { + values: [], + skipNone: false, + }; + } + if (optionIds.length > 0) { + const skipNone = optionIds.find((optionId) => isNoneOption(optionId)); + if (skipNone) { + skipLayers[layerName].skipNone = true; + } + skipLayers[layerName].values.push(...optionIds); + } + }); + + // Get dependencies + const dependencies = dependenciesString + .split('\n') + .map((option) => option.trim()) + .filter((option) => option.length > 0) + .reduce((acc: { [key: string]: string }, option: string) => { + const [layerName, dependencyValue] = option.split(':').map((value) => value.trim()); + if (dependencyValue) { + acc[layerName] = dependencyValue; + } + return acc; + }, {}); + + // Same id restrictions + const sameIdRestrictions = sameIdRestrictionString + .split('\n') + .map((option) => option.trim()) + .filter((option) => option.length > 0) + .reduce((acc: { [key: string]: string }, option: string) => { + const [restrictionName, restrictionValue] = option.split(':').map((value) => value.trim()); + if (restrictionValue) { + acc[restrictionName] = restrictionValue; + } + return acc; + }, {}); + + // Layers to insert + const insertFromLayer = insertFromLayersString + .split('\n') + .map((option) => option.trim()) + .filter((option) => option.length > 0) + .reduce((acc: { [key: string]: string }, option: string) => { + const [layerName, optionId] = option.split(':').map((value) => value.trim()); + if (optionId) { + acc[layerName] = optionId; + } else { + throw new Error(`Invalid insertFromLayers value ${layerName}:${option}`); + } + return acc; + }, {}); + + return { + id, + value, + odds, + imagePath, + skipLayers, + insertFromLayer, + dependencies, + sameIdRestrictions, + layersToRemove, + }; +} + +export function generatePfps({ + quantity, + layerSpecs, + forcedPfps, +}: { + quantity: number; + layerSpecs: PfpLayerSpec[]; + forcedPfps?: PfpAttributeMap[]; +}): PfpSpec[] { + const pfps: PfpSpec[] = []; + const usedDnas: string[] = []; + + // Add forced pfps first + if (forcedPfps) { + forcedPfps.forEach((forcedPfp) => { + const pfp = generatePfp({ layerSpecs, fromAttributes: forcedPfp }); + if (pfp && !usedDnas.includes(pfp.dna)) { + pfps.push(pfp); + usedDnas.push(pfp.dna); + } + }); + } + + while (pfps.length < quantity) { + const pfp = generatePfp({ layerSpecs }); + if (pfp && !usedDnas.includes(pfp.dna)) { + pfps.push(pfp); + usedDnas.push(pfp.dna); + } + } + + return pfps; +} + +export function generatePfp({ + layerSpecs, + fromAttributes, +}: { + layerSpecs: PfpLayerSpec[]; + fromAttributes?: PfpAttributeMap; +}): PfpSpec | null { + const attributes: PfpAttributeMap = {}; + const attributesById: PfpAttributeMap = {}; + const restrictions: Record< + string, + { + values: string[]; + skipNone: boolean; + } + > = {}; + const layerImages: Record = {}; + + for (let i = 0; i < layerSpecs.length; i++) { + const layerSpec = layerSpecs[i]; + const sortedOptions = getAvailableOptions({ + layerSpec, + restrictions, + attributesById, + }); + + let option: PfpLayerOption | undefined; + if (fromAttributes) { + option = sortedOptions.find((option) => option.id === fromAttributes[layerSpec.name]); + } else { + const totalOdds = sortedOptions.reduce((acc, option) => acc + option.odds, 0); + let random = Math.floor(Math.random() * totalOdds); + option = sortedOptions.find((option) => { + if (random < option.odds) { + return true; + } + random -= option.odds; + return false; + }); + } + + if (!option) { + // If no option is found the pfp is invalid + if (fromAttributes) { + throw new Error( + `Invalid pfp, no option found for ${fromAttributes[layerSpec.name]} in layer ${layerSpec.name}`, + ); + } + return null; + } + + // Add to restrictions if incompatible options are found + if (option.skipLayers) { + Object.keys(option.skipLayers).forEach((layerName) => { + const incompatibleOptions = option!.skipLayers![layerName] || { + values: [], + skipNone: false, + }; + if (!restrictions[layerName]) { + restrictions[layerName] = { + values: [...incompatibleOptions.values], + skipNone: incompatibleOptions.skipNone, + }; + } + + if (incompatibleOptions.skipNone) { + restrictions[layerName].skipNone = true; + } + + if (incompatibleOptions.values.length === 0) { + restrictions[layerName].values = []; + } else if (restrictions[layerName].values.length > 0) { + restrictions[layerName].values.push(...incompatibleOptions.values); + restrictions[layerName].values = [...new Set(restrictions[layerName].values)]; + } + }); + } + + // If there are multiple image paths, the pfp is invalid + if (option.imagePaths.length > 1) { + throw new Error(`Invalid pfp, multiple image paths found for ${option.id} in layer ${layerSpec.name}`); + } + + // Add image path (if present) to layer + const imagePath = option.imagePaths[0]?.value; + if (imagePath) { + layerImages[layerSpec.name] = [imagePath]; + } else { + layerImages[layerSpec.name] = []; + } + + // If there is an insertFromLayer option, we need to insert the image path at the layer index + if (option.insertFromLayer) { + Object.keys(option.insertFromLayer).forEach((layerName) => { + const optionId = option!.insertFromLayer![layerName]; + const optionToInclude = layerSpecs + .find((layerSpec) => layerSpec.name === layerName) + ?.options.find((option) => option.id === optionId); + if (optionToInclude) { + // Insert the image path at the layer index without replacing the existing one + const randomIndex = Math.floor(Math.random() * optionToInclude.imagePaths.length); + const imagePath = optionToInclude.imagePaths[randomIndex]?.value; + if (imagePath) { + layerImages[layerName].splice(0, 0, imagePath); + } + } + }); + } + + // Remove layers if needed + if (option.layersToRemove) { + option.layersToRemove.forEach((layerName) => { + layerImages[layerName] = []; + attributes[layerName] = formatOptionValue(''); + attributesById[layerName] = ''; + }); + } + + attributes[layerSpec.name] = formatOptionValue(option.value); + attributesById[layerSpec.name] = option.id; + } + + const dnaComponents = layerSpecs.map((layerSpec) => `${layerSpec.name}:${attributesById[layerSpec.name]}`).join(''); + const dna = crypto.createHash('sha256').update(dnaComponents).digest('hex'); + return { + imageLayers: layerSpecs.flatMap((layerSpec) => layerImages[layerSpec.name]), + dna, + attributes: layerSpecs + .map((layerSpec) => ({ + name: layerSpec.name, + value: attributes[layerSpec.name], + id: attributesById[layerSpec.name], + })) + .filter((attribute) => attribute.value), + }; +} + +export function getAvailableOptions({ + layerSpec, + restrictions, + attributesById, +}: { + layerSpec: PfpLayerSpec; + restrictions: Record< + string, + { + values: string[]; + skipNone: boolean; + } + >; + attributesById: PfpAttributeMap; +}): PfpLayerOption[] { + const sortedOptions = layerSpec.options + .map((option) => { + // Filter out options that are restricted + const layerRestrictions = restrictions[layerSpec.name]; + if (layerRestrictions) { + const restrictictedOptionIds = layerRestrictions.values; + if (restrictictedOptionIds.length === 0) { + // Edge case for none option so that it can be used as a valid option + if (!layerRestrictions.skipNone) { + if (isNoneOption(option.id)) { + return { + ...option, + imagePaths: [], + }; + } + } + + return null; + } + + if (restrictictedOptionIds.includes(option.id)) { + return null; + } + } + + if (option.imagePaths.length === 0) { + return { + ...option, + imagePaths: [], + }; + } + + // Filter out options that don't match dependencies or same id restrictions + const matchingImagePaths = option.imagePaths.filter((imagePath) => { + const matchesDependencies = Object.keys(imagePath.dependencies || {}).every((dependency) => { + return attributesById[dependency] === imagePath.dependencies?.[dependency]; + }); + + const matchesSameIdRestrictions = Object.keys(imagePath.sameIdRestrictions || {}).every((restriction) => { + return attributesById[restriction] !== imagePath.sameIdRestrictions?.[restriction]; + }); + + return matchesDependencies && matchesSameIdRestrictions; + }); + + if (matchingImagePaths.length === 0) { + return null; + } + + return { + ...option, + imagePaths: matchingImagePaths, + }; + }) + .filter((option) => !!option) + .sort((a, b) => a!.odds - b!.odds) as PfpLayerOption[]; + + return sortedOptions; +} + +export async function generateImage({ + pfp, + outputFolder, + rootDir, + resizeWidth, +}: { + pfp: PfpSpec; + rootDir: string; + outputFolder: string; + resizeWidth?: number; +}): Promise { + const compositeOptions = pfp.imageLayers.map((layer) => ({ + input: join(rootDir, layer), + })); + + const outputPath = join(outputFolder, `${pfp.dna}.png`); + + const composition = await sharp(compositeOptions[0].input) + .composite(compositeOptions.slice(1)) + .removeAlpha() + .png() + .toBuffer(); + + if (resizeWidth) { + sharp(composition).resize(resizeWidth).toFile(outputPath); + } else { + sharp(composition).toFile(outputPath); + } +} + +function isNoneOption(value: string): boolean { + const lowerCaseValue = value.toLowerCase(); + return lowerCaseValue === 'none' || lowerCaseValue === '' || lowerCaseValue === '-1'; +} + +function formatOptionValue(value: string): string { + if (isNoneOption(value)) { + return 'None'; + } + return value.trim(); +} diff --git a/src/services/schema-service.ts b/src/services/schema-service.ts index 382637e..21f4339 100644 --- a/src/services/schema-service.ts +++ b/src/services/schema-service.ts @@ -1,16 +1,9 @@ import { getAtomicRpc, transact } from './antelope-service'; import RpcSchema from 'atomicassets/build/API/Rpc/Schema'; -import { CliConfig, SettingsConfig } from '../types/cli-config'; +import { AssetSchema, CliConfig, SettingsConfig } from '../types'; import { TransactResult } from '@wharfkit/session'; -import { SchemaObject } from 'atomicassets/build/Schema'; import { ux } from '@oclif/core'; -export interface AssetSchema { - name: string; - collectionName: string; - format: SchemaObject[]; -} - export async function getCollectionSchemas(collection: string, config: SettingsConfig): Promise { const result: RpcSchema[] = await getAtomicRpc(config.rpcUrl).getCollectionsSchemas(collection); return Promise.all( diff --git a/src/services/template-service.ts b/src/services/template-service.ts index 35665b7..6c80b3a 100644 --- a/src/services/template-service.ts +++ b/src/services/template-service.ts @@ -4,23 +4,10 @@ import { ITemplate } from 'atomicassets/build/API/Explorer/Objects'; import timeUtils from '../utils/time-utils'; import { getAtomicApi, transact } from './antelope-service'; import { getBatchesFromArray } from '../utils/array-utils'; -import { CliConfig, SettingsConfig } from '../types/cli-config'; +import { CliConfig, SettingsConfig, TemplateIdentifier, TemplateToCreate } from '../types'; import { TransactResult } from '@wharfkit/session'; import { ux } from '@oclif/core'; -export interface TemplateToCreate { - schema: string; - maxSupply: number; - isBurnable: boolean; - isTransferable: boolean; - immutableAttributes: unknown; -} - -export interface TemplateIdentifier { - templateId: string | number; - collectionName: string; -} - export async function getTemplate(collection: string, templateId: string, config: SettingsConfig): Promise { return getAtomicApi(config.aaUrl).getTemplate(collection, templateId); } diff --git a/src/types/assets.ts b/src/types/assets.ts new file mode 100644 index 0000000..7e7eb6a --- /dev/null +++ b/src/types/assets.ts @@ -0,0 +1,10 @@ +export interface MintData { + authorized_minter: string; + collection_name: string; + schema_name: string; + template_id: string; + new_asset_owner: string; + immutable_data: any[]; + mutable_data: any[]; + tokens_to_back: any[]; +} diff --git a/src/types/commands.ts b/src/types/commands.ts new file mode 100644 index 0000000..4b1f0c0 --- /dev/null +++ b/src/types/commands.ts @@ -0,0 +1,55 @@ +import { MintData, AssetSchema } from '../types'; + +export type MintRow = { + schema: AssetSchema; + templateId: string; + amount: number; + owner: string; + mintActionData: MintData; +}; + +export interface PfpLayerOption { + id: string; + value: string; + odds: number; + imagePaths: { + value: string; + dependencies?: { + [key: string]: string; + }; + sameIdRestrictions?: { + [key: string]: string; + }; + }[]; + skipLayers?: { + [key: string]: { + values: string[]; + skipNone: boolean; + }; + }; + insertFromLayer?: { + [key: string]: string; + }; + layersToRemove?: string[]; +} + +export interface PfpLayerSpec { + name: string; + options: PfpLayerOption[]; +} + +export interface PfpAttributeMap { + [key: string]: string; +} + +export interface PfpAttribute { + name: string; + value: string; + id: string; +} + +export interface PfpSpec { + imageLayers: string[]; + dna: string; + attributes: PfpAttribute[]; +} diff --git a/src/types/index.ts b/src/types/index.ts new file mode 100644 index 0000000..02a671a --- /dev/null +++ b/src/types/index.ts @@ -0,0 +1,5 @@ +export * from './commands'; +export * from './cli-config'; +export * from './templates'; +export * from './assets'; +export * from './schemas'; diff --git a/src/types/schemas.ts b/src/types/schemas.ts new file mode 100644 index 0000000..e0c3a95 --- /dev/null +++ b/src/types/schemas.ts @@ -0,0 +1,7 @@ +import { SchemaObject } from 'atomicassets/build/Schema'; + +export interface AssetSchema { + name: string; + collectionName: string; + format: SchemaObject[]; +} diff --git a/src/types/templates.ts b/src/types/templates.ts new file mode 100644 index 0000000..4e544b2 --- /dev/null +++ b/src/types/templates.ts @@ -0,0 +1,12 @@ +export interface TemplateToCreate { + schema: string; + maxSupply: number; + isBurnable: boolean; + isTransferable: boolean; + immutableAttributes: unknown; +} + +export interface TemplateIdentifier { + templateId: string | number; + collectionName: string; +} diff --git a/src/utils/config-utils.ts b/src/utils/config-utils.ts index a973953..3cd496e 100644 --- a/src/utils/config-utils.ts +++ b/src/utils/config-utils.ts @@ -1,6 +1,6 @@ import fetch from 'node-fetch'; import { readFile, removeDir, removeFile, writeFile } from './file-utils'; -import { SettingsConfig } from '../types/cli-config'; +import { SettingsConfig } from '../types'; import path, { join } from 'node:path'; import { existsSync, mkdirSync } from 'node:fs'; import { PrivateKey } from '@wharfkit/antelope'; diff --git a/src/utils/excel-utils.ts b/src/utils/excel-utils.ts new file mode 100644 index 0000000..531568a --- /dev/null +++ b/src/utils/excel-utils.ts @@ -0,0 +1,84 @@ +import readXlsxFile, { Row, readSheetNames } from 'read-excel-file/node'; +import { fileExists } from './file-utils'; +import fetch from 'node-fetch'; +import { Cell } from 'read-excel-file/types'; + +export interface SheetContents { + name: string; + rows: Row[]; +} + +const GOOGLE_DOCS_HOST = 'docs.google.com'; +const GOOGLE_ACCOUNTS_HOST = 'accounts.google.com'; + +export async function readExcelContents(filePathOrSheetsId: string): Promise { + let excelInput: string | Buffer; + // Check if file exists + if (!fileExists(filePathOrSheetsId)) { + let url; + try { + url = new URL(filePathOrSheetsId); + } catch (err) { + url = new URL(`https://${GOOGLE_DOCS_HOST}/spreadsheets/d/${filePathOrSheetsId}`); + } + + // Check if it's a google sheets url + if (url.host === GOOGLE_DOCS_HOST) { + // Extract the sheet id + const sheetId = url.pathname.split('/')[3]; + url = new URL(`https://${GOOGLE_DOCS_HOST}/spreadsheets/d/${sheetId}/export?format=xlsx`); + } + + const res = await fetch(url); + if (res.status !== 200) { + throw new Error(`Error downloading ${url}`); + } + + if (new URL(res.url).host === GOOGLE_ACCOUNTS_HOST && url.host === GOOGLE_DOCS_HOST) { + throw new Error(`Error downloading ${url}. Make sure the file is public.`); + } + + excelInput = await res.buffer(); + } else { + excelInput = filePathOrSheetsId; + } + + const sheetNames = await readSheetNames(excelInput); + const sheetsRows = await Promise.all(sheetNames.map((name) => readXlsxFile(excelInput, { sheet: name }))); + return sheetNames.map((name, index) => ({ + name, + rows: sheetsRows[index], + })); +} + +export function getSheetHeader(rows: Row[]): { + headersMap: { [key: string]: number }; + validateHeaders: (requiredHeaders: string[]) => string; +} { + const headerRow = rows[0]; + const headersMap: { [key: string]: number } = Object.fromEntries( + headerRow + .map((name: Cell, index: number) => ({ + name: name.valueOf() as string, + index, + })) + .map((entry: { name: string; index: number }) => [entry.name, entry.index]), + ); + + const isHeaderPresent = (text: string) => { + return headersMap[text] >= 0; + }; + + const validateHeaders = (requiredHeaders: string[]) => { + const missingHeaders = requiredHeaders.filter((header) => isHeaderPresent(header) === false); + if (missingHeaders.length > 0) { + return `Missing headers: ${missingHeaders.join(', ')}`; + } + return ''; + }; + + return { + headersMap, + validateHeaders, + }; +} diff --git a/yarn.lock b/yarn.lock index 93c5dc9..996b2c7 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1335,6 +1335,11 @@ aws-sdk@^2.1231.0: uuid "8.0.0" xml2js "0.5.0" +b4a@^1.6.4: + version "1.6.4" + resolved "https://registry.yarnpkg.com/b4a/-/b4a-1.6.4.tgz#ef1c1422cae5ce6535ec191baeed7567443f36c9" + integrity sha512-fpWrvyVHEKyeEvbKZTVOeZF3VSKKWtJxFIxX/jaVPf+cLbGUSitjb49pHLqPV2BUNNZ0LcoeEGfE/YCpyDYHIw== + balanced-match@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" @@ -1665,6 +1670,11 @@ chokidar@3.5.3: optionalDependencies: fsevents "~2.3.2" +chownr@^1.1.1: + version "1.1.4" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" + integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== + chownr@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece" @@ -1818,16 +1828,32 @@ color-name@1.1.3: resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== -color-name@~1.1.4: +color-name@^1.0.0, color-name@~1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== +color-string@^1.9.0: + version "1.9.1" + resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.9.1.tgz#4467f9146f036f855b764dfb5bf8582bf342c7a4" + integrity sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg== + dependencies: + color-name "^1.0.0" + simple-swizzle "^0.2.2" + color-support@^1.1.2, color-support@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2" integrity sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg== +color@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/color/-/color-4.2.3.tgz#d781ecb5e57224ee43ea9627560107c0e0c6463a" + integrity sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A== + dependencies: + color-convert "^2.0.1" + color-string "^1.9.0" + colorette@^2.0.20: version "2.0.20" resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" @@ -2029,6 +2055,11 @@ deprecation@^2.0.0, deprecation@^2.3.1: resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919" integrity sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ== +detect-libc@^2.0.0, detect-libc@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.2.tgz#8ccf2ba9315350e1241b88d0ac3b0e1fbd99605d" + integrity sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw== + dezalgo@^1.0.0: version "1.0.4" resolved "https://registry.yarnpkg.com/dezalgo/-/dezalgo-1.0.4.tgz#751235260469084c132157dfa857f386d4c33d81" @@ -2308,6 +2339,11 @@ execa@^5.0.0, execa@^5.1.1: signal-exit "^3.0.3" strip-final-newline "^2.0.0" +expand-template@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c" + integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg== + exponential-backoff@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/exponential-backoff/-/exponential-backoff-3.1.1.tgz#64ac7526fe341ab18a39016cd22c787d01e00bf6" @@ -2341,6 +2377,11 @@ fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== +fast-fifo@^1.1.0, fast-fifo@^1.2.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/fast-fifo/-/fast-fifo-1.3.2.tgz#286e31de96eb96d38a97899815740ba2a4f3640c" + integrity sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ== + fast-glob@^3.2.9: version "3.3.1" resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.1.tgz#784b4e897340f3dbbef17413b3f11acf03c874c4" @@ -2619,6 +2660,11 @@ get-stream@^6.0.0, get-stream@^6.0.1: resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== +github-from-package@0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/github-from-package/-/github-from-package-0.0.0.tgz#97fb5d96bfde8973313f20e8288ef9a167fa64ce" + integrity sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw== + github-slugger@^1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/github-slugger/-/github-slugger-1.5.0.tgz#17891bbc73232051474d68bd867a34625c955f7d" @@ -2996,6 +3042,11 @@ inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.0, resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== +ini@~1.3.0: + version "1.3.8" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + inquirer@^8.0.0: version "8.2.6" resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-8.2.6.tgz#733b74888195d8d400a67ac332011b5fae5ea562" @@ -3040,6 +3091,11 @@ is-arrayish@^0.2.1: resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== +is-arrayish@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.3.2.tgz#4574a2ae56f7ab206896fb431eaeed066fdf8f03" + integrity sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ== + is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" @@ -3700,7 +3756,7 @@ minimatch@^9.0.0, minimatch@^9.0.1: dependencies: brace-expansion "^2.0.1" -minimist@^1.2.3, minimist@^1.2.5, minimist@^1.2.6: +minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.5, minimist@^1.2.6: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== @@ -3799,6 +3855,11 @@ minizlib@^2.0.0, minizlib@^2.1.1, minizlib@^2.1.2: minipass "^3.0.0" yallist "^4.0.0" +mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3: + version "0.5.3" + resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" + integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== + mkdirp-infer-owner@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/mkdirp-infer-owner/-/mkdirp-infer-owner-2.0.0.tgz#55d3b368e7d89065c38f32fd38e638f0ab61d316" @@ -3891,6 +3952,11 @@ nanoid@3.3.1: resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.1.tgz#6347a18cac88af88f58af0b3594b723d5e99bb35" integrity sha512-n6Vs/3KGyxPQd6uO0eH4Bv0ojGSUvuLlIHtC3Y0kEO23YRge8H9x1GCzLn28YX0H66pMkxuaeESFq4tKISKwdw== +napi-build-utils@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/napi-build-utils/-/napi-build-utils-1.0.2.tgz#b1fddc0b2c46e380a0b7a76f984dd47c41a13806" + integrity sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg== + natural-compare@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" @@ -3916,6 +3982,18 @@ nock@^13.3.3: lodash "^4.17.21" propagate "^2.0.0" +node-abi@^3.3.0: + version "3.47.0" + resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.47.0.tgz#6cbfa2916805ae25c2b7156ca640131632eb05e8" + integrity sha512-2s6B2CWZM//kPgwnuI0KrYwNjfdByE25zvAaEpq9IH4zcNsarH8Ihu/UuX6XMPEogDAxkuUFeZn60pXNHAqn3A== + dependencies: + semver "^7.3.5" + +node-addon-api@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76" + integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA== + node-fetch@2.6.7: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" @@ -4507,6 +4585,24 @@ pkg-dir@^4.2.0: dependencies: find-up "^4.0.0" +prebuild-install@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.1.tgz#de97d5b34a70a0c81334fd24641f2a1702352e45" + integrity sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw== + dependencies: + detect-libc "^2.0.0" + expand-template "^2.0.3" + github-from-package "0.0.0" + minimist "^1.2.3" + mkdirp-classic "^0.5.3" + napi-build-utils "^1.0.1" + node-abi "^3.3.0" + pump "^3.0.0" + rc "^1.2.7" + simple-get "^4.0.0" + tar-fs "^2.0.0" + tunnel-agent "^0.6.0" + preferred-pm@^3.0.3: version "3.1.2" resolved "https://registry.yarnpkg.com/preferred-pm/-/preferred-pm-3.1.2.tgz#aedb70550734a574dffcbf2ce82642bd1753bdd6" @@ -4613,6 +4709,11 @@ queue-microtask@^1.2.2: resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== +queue-tick@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/queue-tick/-/queue-tick-1.0.1.tgz#f6f07ac82c1fd60f82e098b417a80e52f1f4c142" + integrity sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag== + quick-lru@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" @@ -4625,6 +4726,16 @@ randombytes@^2.1.0: dependencies: safe-buffer "^5.1.0" +rc@^1.2.7: + version "1.2.8" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" + integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== + dependencies: + deep-extend "^0.6.0" + ini "~1.3.0" + minimist "^1.2.0" + strip-json-comments "~2.0.1" + read-cmd-shim@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/read-cmd-shim/-/read-cmd-shim-3.0.1.tgz#868c235ec59d1de2db69e11aec885bc095aea087" @@ -4934,6 +5045,20 @@ setimmediate@^1.0.5, setimmediate@~1.0.4: resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA== +sharp@^0.32.5: + version "0.32.5" + resolved "https://registry.yarnpkg.com/sharp/-/sharp-0.32.5.tgz#9ddc78ead6446094f51e50355a2d4ec6e7220cd4" + integrity sha512-0dap3iysgDkNaPOaOL4X/0akdu0ma62GcdC2NBQ+93eqpePdDdr2/LM0sFdDSMmN7yS+odyZtPsb7tx/cYBKnQ== + dependencies: + color "^4.2.3" + detect-libc "^2.0.2" + node-addon-api "^6.1.0" + prebuild-install "^7.1.1" + semver "^7.5.4" + simple-get "^4.0.1" + tar-fs "^3.0.4" + tunnel-agent "^0.6.0" + shebang-command@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" @@ -4989,6 +5114,27 @@ sigstore@^1.3.0: "@sigstore/tuf" "^1.0.3" make-fetch-happen "^11.0.1" +simple-concat@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/simple-concat/-/simple-concat-1.0.1.tgz#f46976082ba35c2263f1c8ab5edfe26c41c9552f" + integrity sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q== + +simple-get@^4.0.0, simple-get@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-4.0.1.tgz#4a39db549287c979d352112fa03fd99fd6bc3543" + integrity sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA== + dependencies: + decompress-response "^6.0.0" + once "^1.3.1" + simple-concat "^1.0.0" + +simple-swizzle@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz#a4da6b635ffcccca33f70d17cb92592de95e557a" + integrity sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg== + dependencies: + is-arrayish "^0.3.1" + slash@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" @@ -5114,6 +5260,14 @@ stdout-stderr@^0.1.9: debug "^4.1.1" strip-ansi "^6.0.0" +streamx@^2.15.0: + version "2.15.1" + resolved "https://registry.yarnpkg.com/streamx/-/streamx-2.15.1.tgz#396ad286d8bc3eeef8f5cea3f029e81237c024c6" + integrity sha512-fQMzy2O/Q47rgwErk/eGeLu/roaFWV0jVsogDmrszM9uIw8L5OA+t+V93MgYlufNptfjmYR1tOMWhei/Eh7TQA== + dependencies: + fast-fifo "^1.1.0" + queue-tick "^1.0.1" + string-argv@0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/string-argv/-/string-argv-0.3.2.tgz#2b6d0ef24b656274d957d54e0a4bbf6153dc02b6" @@ -5207,6 +5361,11 @@ strip-json-comments@3.1.1, strip-json-comments@^3.1.1: resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== +strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== + supports-color@8.1.1, supports-color@^8.1.0, supports-color@^8.1.1: version "8.1.1" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" @@ -5241,7 +5400,26 @@ supports-preserve-symlinks-flag@^1.0.0: resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== -tar-stream@^2.2.0: +tar-fs@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" + integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== + dependencies: + chownr "^1.1.1" + mkdirp-classic "^0.5.2" + pump "^3.0.0" + tar-stream "^2.1.4" + +tar-fs@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-3.0.4.tgz#a21dc60a2d5d9f55e0089ccd78124f1d3771dbbf" + integrity sha512-5AFQU8b9qLfZCX9zp2duONhPmZv0hGYiBPJsyUdqMjzq/mqVpy/rEUSeHk1+YitmxugaptgBh5oDGU3VsAJq4w== + dependencies: + mkdirp-classic "^0.5.2" + pump "^3.0.0" + tar-stream "^3.1.5" + +tar-stream@^2.1.4, tar-stream@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== @@ -5252,6 +5430,15 @@ tar-stream@^2.2.0: inherits "^2.0.3" readable-stream "^3.1.1" +tar-stream@^3.1.5: + version "3.1.6" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-3.1.6.tgz#6520607b55a06f4a2e2e04db360ba7d338cc5bab" + integrity sha512-B/UyjYwPpMBv+PaFSWAmtYjwdrlEaZQEhMIBFNC5oEG8lpiW8XjcSdmEaClj28ArfKScKHs2nshz3k2le6crsg== + dependencies: + b4a "^1.6.4" + fast-fifo "^1.2.0" + streamx "^2.15.0" + tar@^6.0.2, tar@^6.1.0, tar@^6.1.11, tar@^6.1.2: version "6.2.0" resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.0.tgz#b14ce49a79cb1cd23bc9b016302dea5474493f73"