diff --git a/.dockerignore b/.dockerignore index 53b39eb9..9e1f34ce 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,53 +1,70 @@ -# Git and GitHub metadata -.github/ -.git/ -.gitignore -CODE_OF_CONDUCT.md -CONTRIBUTING.md -CHANGELOG.md - -# Exclude all .pyc files -**/__pycache__ - # Build artifacts +build/ dist/ -*.tar.gz -*.whl +*.egg-info/ -# Test and setup scripts +# Compiled Python files +*.pyc +*.pyo +__pycache__/ + +# Virtual environment +.venv/ +.env + +# System-specific files +.DS_Store + +# Temporary files +*~ + +# Logging +logs/ +*.log + +# Testing +noxfile.py +.nox/ .coverage +.coverage.* +coverage.xml +htmlcov/ tests/ setup/ scripts/ -# Jupyter notebooks +# Git +.github/ +.git/ +.gitignore + +# Markdown +CODE_OF_CONDUCT.md +CONTRIBUTING.md +CHANGELOG.md + +# Jupyter Notebook +.ipynb_checkpoints/ +outputs/ notebooks/ -# Package Management -Pipfile -Pipfile.lock +# VSCode workspace settings +.vscode/ + +# Python Tools +.mypy_cache/ +.pytest_cache/ +.ruff_cache/ -# Documentation and Automation Files +# Documentation docs/ examples/ mkdocs.yml Makefile -noxfile.py - -# Work In Progress -# Configuration Files (wip) +# Work In Progress (WIP) +examples/images/dalle +readmeai/config/settings/themes readmeai/config/settings/classifiers.toml readmeai/config/settings/models.toml -readmeai/config/settings/quickstart_wip.toml -readmeai/config/settings/quickstart.toml - -# Github Actions -.github/workflows/ci.yml - -# Submodules (wip) -readmeai/templates/ -readmeai/ui/ - -# Dalle-3 Images -examples/images/dalle +readmeai/utils/file_cleaner.py diff --git a/.gitignore b/.gitignore index 255b505c..344fd15b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,36 +1,34 @@ +# Build artifacts +build/ +dist/ +*.egg-info/ + # Compiled Python files *.pyc *.pyo __pycache__/ -.venv/ -# Compiled Cython files -*.so -*.c +# Virtual environment +.venv/ +.env # System-specific files .DS_Store -Thumbs.db # Temporary files *~ -# Test coverage results +# Logging +logs/ +*.log + +# Testing .nox/ .coverage .coverage.* coverage.xml htmlcov/ -# Log files -logs/ -*.log - -# Build artifacts -build/ -dist/ -*.egg-info/ - # Jupyter Notebook .ipynb_checkpoints/ outputs/ @@ -39,33 +37,14 @@ notebooks/ # VSCode workspace settings .vscode/ -# cache +# Python Tools .mypy_cache/ .pytest_cache/ .ruff_cache/ -# Benchmarks -.benchmarks/ - -# Package Management -Pipfile -Pipfile.lock - -# Work In Progress -# Configuration Files (wip) - +# Work In Progress (WIP) +examples/images/dalle +readmeai/config/settings/themes readmeai/config/settings/classifiers.toml readmeai/config/settings/models.toml -readmeai/config/settings/quickstart_wip.toml -readmeai/config/settings/quickstart.toml readmeai/utils/file_cleaner.py - -# Github Actions -.github/workflows/ci.yml - -# Submodules (wip) -readmeai/templates/ -readmeai/ui/ - -# Dalle-3 Images -examples/images/dalle diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index edcc711d..a996f831 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Pre-commit hooks - https://pre-commit.com/ +# https://pre-commit.com/ repos: - repo: https://github.com/pre-commit/pre-commit-hooks @@ -18,8 +18,8 @@ repos: - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.11 + rev: v0.6.1 hooks: - id: ruff - args: [ --fix ] + args: [--fix] - id: ruff-format diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 00000000..3602cafc --- /dev/null +++ b/.ruff.toml @@ -0,0 +1,45 @@ +exclude = [ + ".git", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pyenv", + ".pytest_cache", + ".ruff_cache", + ".env,", + ".venv", + ".vscode", + "venv", +] +line-length = 79 +indent-width = 4 +target-version = "py311" + +[lint] +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" +extend-select = ["E501"] +select = [ + "ARG", # unused arguments + "B", # flake8-bugbear + "E", # pycodestyle + "E722", # bare except statements + "F", # pyflakes + "F401", # remove unused imports + "I", # isort + "N", # pep8-naming + "RUF", # ruff + "SIM", # flake8-simplify + "UP", # pyupgrade +] +fixable = ["ALL"] +ignore = [] +unfixable = [] + +[format] +docstring-code-format = true +docstring-code-line-length = "dynamic" +indent-style = "space" +line-ending = "auto" +quote-style = "double" +skip-magic-trailing-comma = false diff --git a/Dockerfile b/Dockerfile index 74b404ca..2e14ed01 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,34 +1,25 @@ -# Use a base image with Python 3.10 installed (multi-platform) FROM --platform=${BUILDPLATFORM} python:3.10-slim-buster -# Set working directory WORKDIR /app -# Set environment variable for Git Python ENV GIT_PYTHON_REFRESH=quiet -# Install system dependencies and clean up apt cache -RUN apt-get update && apt-get install -y git \ +RUN apt-get update \ + && apt-get install -y git \ && rm -rf /var/lib/apt/lists/* -# Create a non-root user with a specific UID and GID (i.e. 1000 in this case) -RUN groupadd -r tempuser -g 1000 && \ - useradd -r -u 1000 -g tempuser tempuser && \ - mkdir -p /home/tempuser && \ - chown -R tempuser:tempuser /home/tempuser +RUN groupadd -r tempuser -g 1000 \ + && useradd -r -u 1000 -g tempuser tempuser \ + && mkdir -p /home/tempuser \ + && chown -R tempuser:tempuser /home/tempuser -# Set permissions for the working directory to the new user RUN chown tempuser:tempuser /app -# Switch to the new user USER tempuser -# Add the directory where pip installs user scripts to the PATH ENV PATH=/home/tempuser/.local/bin:$PATH -# Install the readmeai package from PyPI with a pinned version RUN pip install --no-cache-dir --user --upgrade readmeai -# Set the command to run the CLI ENTRYPOINT ["readmeai"] CMD ["--help"] diff --git a/Makefile b/Makefile index 8bd526f0..4fb48780 100644 --- a/Makefile +++ b/Makefile @@ -1,73 +1,66 @@ -# Makefile - COMMITS := 10 SHELL := /bin/bash +SRC_PATH := readmeai +TEST_PATH := tests VENV := readmeai -VV := \ - -.PHONY: help clean format lint conda-recipe git-rm-cache git-log nox pytest poetry-reqs search - -help: - @echo "Commands:" - @echo "clean : repository file cleanup." - @echo "format : executes code formatting." - @echo "lint : executes code linting." - @echo "conda-recipe : builds conda package." - @echo "git-rm-cache : fix git untracked files." - @echo "git-log : displays git log." - @echo "nox : executes nox test suite." - @echo "pytest : executes tests." - @echo "poetry-reqs : generates requirements.txt file." - @echo "search : searches word in directory." .PHONY: clean -clean: format - @echo -e "\nFile clean up in directory: ${CURDIR}" +clean: ## Remove project build artifacts ./scripts/clean.sh clean -.PHONY: format -format: - @echo -e "\nFormatting in directory: ${CURDIR}" - ruff check --select I --fix . - ruff format . - -.PHONY: lint -lint: - @echo -e "\nLinting in directory: ${CURDIR}" - ruff check . --fix - .PHONY: conda-recipe -conda-recipe: +conda-recipe: ## Create conda recipe for conda-forge grayskull pypi readmeai conda build . +.PHONY: git-log +git-log: ## Display git log for last 'N' commits + git log -n ${COMMITS} --pretty=tformat: --shortstat + .PHONY: git-rm-cache -git-rm-cache: +git-rm-cache: ## Remove all files from git cache git rm -r --cached . -.PHONY: git-log -git-log: - git log -n ${COMMITS} --pretty=tformat: --shortstat +.PHONY: poetry-clean +poetry-clean: ## Removes Poetry virtual environment and lock file. + poetry env remove --all && rm poetry.lock -.PHONY: nox -nox: - nox -f noxfile.py +.PHONY: poetry-install +poetry-install: ## Install dependencies using Poetry. + poetry install -.PHONY: pytest -pytest: - poetry run pytest ${VV} \ - -n auto \ - --asyncio-mode=auto \ - --cov=. \ - --cov-branch \ - --cov-report=xml \ - --cov-report=term-missing \ +.PHONY: poetry-shell +poetry-shell: ## Launch a shell within Poetry virtual environment. + poetry shell -.PHONY: poetry-reqs -poetry-reqs: +.PHONY: poetry-to-requirements +poetry-to-reqs: ## Export poetry requirements to requirements.txt poetry export -f requirements.txt --output setup/requirements.txt --without-hashes +.PHONY: ruff-format +ruff-format: ## Format codebase using Ruff + ruff check --select I --fix . + ruff format . + +.PHONY: ruff-lint +ruff-lint: ## Lint codebase using Ruff + ruff check . --fix + .PHONY: search -search: clean - @echo -e "\nSearching for: ${WORD} in directory: ${CURDIR}" +search: ## Search for a word in the codebase grep -Ril ${WORD} readmeai tests scripts setup + +.PHONY: test +test: ## Run unit tests using pytest + poetry run pytest + +.PHONY: test-nox +test-nox: ## Run test suite against multiple Python versions + nox -f noxfile.py + +.PHONY: help +help: Makefile ## Display the help menu + @echo -e "" + @echo -e "Usage: make [target]" + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' + @echo -e "__________________________________________________________________________________________\n" diff --git a/README.md b/README.md index a29d1fe8..5ee99cd1 100644 --- a/README.md +++ b/README.md @@ -28,23 +28,27 @@

- -
- Table of Contents - -- [๐Ÿ“ Overview](#-overview) -- [๐Ÿ‘พ Demo](#-demo) -- [๐Ÿงฉ Features](#-features) -- [๐Ÿ—‚๏ธ Examples](#๏ธ-examples) -- [๐Ÿš€ Getting Started](#-getting-started) - - [โš™๏ธ Installation](#-installation) - - [๐Ÿค– Usage](#-usage) - - [๐Ÿงช Tests](#-tests) -- [๐Ÿ“ฆ Configuration](#๏ธ-configuration) -- [๐Ÿ”ญ Roadmap](#-roadmap) -- [๐Ÿง‘โ€๐Ÿ’ป Contributing](#-contributing) -- [๐ŸŽ— License](#-license) -
+>
Documentation +> +> - [Read the official documentation for readme-ai](https://eli64s.github.io/readme-ai) +> - [Watch a tutorial created by community member](https://www.youtube.com/watch?v=NiUrm1ni7bE) +> +>
+ +>
Quick Links +> +> - [๐Ÿ“ Overview](#-overview) +> - [๐Ÿ‘พ Demo](#-demo) +> - [๐Ÿงฌ Features](#-features) +> - [๐Ÿš€ Getting Started](#-getting-started) +> - [โš™๏ธ Installation](#๏ธ-installation) +> - [๐Ÿค– Usage](#-usage) +> - [๐Ÿงช Testing](#-testing) +> - [๐Ÿ”ง Configuration](#-configuration) +> - [๐ŸŽจ Examples](#-examples) +> - [๐Ÿค Contributing](#-contributing) +> +>
--- @@ -58,20 +62,22 @@ Readme-ai is a developer tool that auto-generates README.md files using a combin Streamlines documentation creation and maintenance, enhancing developer productivity. This project aims to enable all skill levels, across all domains, to better understand, use, and contribute to open-source software.
+ --- ## ๐Ÿ‘พ Demo -**Standard CLI Usage:** +**CLI Usage** [readmeai-cli-demo](https://github.com/eli64s/artifacts/assets/43382407/55b8d1b9-06a7-4b1f-b6a7-aaeccdb27679 ) -**Offline Mode Demonstration:** +**Offline Mode** [readmeai-streamlit-demo](https://github.com/eli64s/artifacts/assets/43382407/3eb39fcf-c1df-49c6-bb5c-63e141857ae3) @@ -81,43 +87,40 @@ Streamlines documentation creation and maintenance, enhancing developer producti --- -## ๐Ÿงฉ Features +## ๐Ÿงฌ Features -### Flexible README Generation +- **Flexible README Generation**: Robust repository context extraction combined with generative AI. +- **Multiple LLM Support**: Compatible with `OpenAI`, `Ollama`, `Google Gemini` and `Offline Mode`. +- **Customizable Output**: Dozens of CLI options for styling, badges, header designs, and more. +- **Language Agnostic**: Works with a wide range of programming languages and project types. +- **Offline Mode**: Generate a boilerplate README without calling an external API. -Readme-ai uses a balanced approach to building README files, combining data extraction and generative AI to create comprehensive and informative documentation. - -- **Data Extraction & Analysis**: File parsers and analyzers are used to extract project metadata, dependencies, and other relevant details. This data is used to both populate many sections of the README, as well as provide context to the LLM API. -- **Generative Content**: For more abstract or creative sections, readme-ai uses LLM APIs to generate content that is both informative and engaging. This includes sections such as a project slogan, overview, features table, and file summaries. - -### CLI Customization - -Over a dozen CLI options are available to customize the README generation process: - -- **LLM Options**: Run the tool with OpenAI, Ollama, Google Gemini, or in offline mode. -- **Offline Mode**: Generate a [README](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-offline.md) without making API calls. Readme-ai is still able to populate a significant portion of the README using metadata collected during preprocessing. -- **Project Badges**: Choose from an array of [badge styles](https://shields.io/), colors, and alignments. -- **Project Logo**: Select from the default set, upload your own, or let the LLM give it a try! - -A few examples of the CLI options in action: +See a few examples of the README-AI customization options below: + + + + @@ -142,9 +145,30 @@ A few examples of the CLI options in action: --badge-style flat --image black + + + + + + + + + + + +
+ default-header
+ --emojis --image custom --badge-color DE3163 --header-style compact --toc-style links +
- default-header
- default output (no options provided to cli) +
+ --image cloud --header-style compact --toc-style fold
cloud-db-logo
- --alignment left --badge-style flat-square --image cloud + --align left --badge-style flat-square --image cloud
gradient-markdown-logo
- --alignment left --badge-style flat --image gradient + --align left --badge-style flat --image gradient
+ default-header
+ --image custom --badge-color 00ffe9 --badge-style flat-square --header-style classic +
+ default-header
+ --image llm --badge-style plastic --header-style classic +
+ default-header
+ --image custom --badge-color BA0098 --badge-style flat-square --header-style modern --toc-style fold +
-See the Configuration section for a complete list of CLI options. +See the Configuration section for a complete list of CLI options.
๐Ÿ‘‹ Overview
@@ -273,66 +297,6 @@ See the Overview: Project objectives, scope, outcomes. -
  • Project Structure: Organization and components.
  • -
  • Data Preprocessing: Data sources and methods.
  • -
  • Feature Engineering: Impact on model performance.
  • -
  • Model Architecture: Selection and development strategies.
  • -
  • Training: Procedures, tuning, strategies.
  • -
  • Testing and Evaluation: Results, analysis, benchmarks.
  • -
  • Deployment: System integration, APIs.
  • -
  • Usage and Maintenance: User guide, model upkeep.
  • -
  • Results and Discussion: Implications, future work.
  • -
  • Ethical Considerations: Ethics, privacy, fairness.
  • -
  • Contributing: Contribution guidelines.
  • -
  • Acknowledgements: Credits, resources used.
  • -
  • License: Usage rights, restrictions.
  • - - - - -
    - ---- - -## ๐Ÿ—‚๏ธ Examples - -| | **Output File** | **Input Repository** | **Input Contents** | -|---|-------------|------------|-----------| -| โ–น | [readme-python.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-python.md) | [readme-ai](https://github.com/eli64s/readme-ai) | Python | -| โ–น | [readme-google-gemini.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-gemini.md) | [readme-ai](https://github.com/eli64s/readme-ai) | Python | -| โ–น | [readme-typescript.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-typescript.md) | [chatgpt-app-react-ts](https://github.com/Yuberley/ChatGPT-App-React-Native-TypeScript) | TypeScript, React | -| โ–น | [readme-postgres.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-postgres.md) | [postgres-proxy-server](https://github.com/jwills/buenavista) | Postgres, Duckdb | -| โ–น | [readme-kotlin.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-kotlin.md) | [file.io-android-client](https://github.com/rumaan/file.io-Android-Client) | Kotlin, Android | -| โ–น | [readme-streamlit.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-streamlit.md) | [readme-ai-streamlit](https://github.com/eli64s/readme-ai-streamlit) | Python, Streamlit | -| โ–น | [readme-rust-c.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-rust-c.md) | [rust-c-app](https://github.com/DownWithUp/CallMon) | C, Rust | -| โ–น | [readme-go.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-go.md) | [go-docker-app](https://github.com/olliefr/docker-gs-ping) | Go | -| โ–น | [readme-java.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-java.md) | [java-minimal-todo](https://github.com/avjinder/Minimal-Todo) | Java | -| โ–น | [readme-fastapi-redis.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-fastapi-redis.md) | [async-ml-inference](https://github.com/FerrariDG/async-ml-inference) | FastAPI, Redis | -| โ–น | [readme-mlops.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-mlops.md) | [mlops-course](https://github.com/GokuMohandas/mlops-course) | Python, Jupyter | -| โ–น | [readme-local.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-local.md) | Local Directory | Flink, Python | - - - --- ## ๐Ÿš€ Getting Started @@ -342,6 +306,7 @@ See the -r https://github.com/eli64s/readme-ai -> ``` +[![docker](https://img.shields.io/badge/Docker-2496ED.svg?style=flat&logo=Docker&logoColor=white)](https://hub.docker.com/r/zeroxeli/readme-ai) + +```sh +โฏ docker run -it \ +-e OPENAI_API_KEY=$OPENAI_API_KEY \ +-v "$(pwd)":/app zeroxeli/readme-ai:latest \ +-r https://github.com/eli64s/readme-ai +``` #### Using `streamlit` -> [![Streamlit App](https://static.streamlit.io/badges/streamlit_badge_black_white.svg)](https://readme-ai.streamlit.app/) -> -> Try directly in your browser on Streamlit, no installation required! For more details, check out the readme-ai-streamlit repository. +[![Streamlit App](https://static.streamlit.io/badges/streamlit_badge_black_white.svg)](https://readme-ai.streamlit.app/) +Try directly in your browser on Streamlit, no installation required! For more details, see the readme-ai-streamlit repository. #### From `source` -
    - Usage
    +
    Using readme-ai
    #### Using `bash` -> -> [![bash](https://img.shields.io/badge/GNU%20Bash-4EAA25.svg?style=flat&logo=GNU-Bash&logoColor=white)](https://www.gnu.org/software/bash/) -> -> ```console -> $ conda activate readmeai -> $ python3 -m readmeai.cli.main -r https://github.com/eli64s/readme-ai -> ``` +[![bash](https://img.shields.io/badge/GNU%20Bash-4EAA25.svg?style=flat&logo=GNU-Bash&logoColor=white)](https://www.gnu.org/software/bash/) + +```sh +โฏ conda activate readmeai +โฏ python3 -m readmeai.cli.main -r https://github.com/eli64s/readme-ai +``` #### Using `poetry` -> [![Poetry](https://img.shields.io/endpoint?url=https://python-poetry.org/badge/v0.json)](https://python-poetry.org/) -> -> ```console -> $ poetry shell -> $ poetry run python3 -m readmeai.cli.main -r https://github.com/eli64s/readme-ai -> ``` +[![Poetry](https://img.shields.io/endpoint?url=https://python-poetry.org/badge/v0.json)](https://python-poetry.org/) + +```sh +โฏ poetry shell +โฏ poetry run python3 -m readmeai.cli.main -r https://github.com/eli64s/readme-ai +```
    --- -### ๐Ÿงช Tests +### ๐Ÿงช Testing #### Using `pytest` -> [![pytest](https://img.shields.io/badge/Pytest-0A9EDC.svg?style=flat&logo=Pytest&logoColor=white)](https://docs.pytest.org/en/7.1.x/contents.html) -> ```console -> $ make pytest -> ``` +[![pytest](https://img.shields.io/badge/Pytest-0A9EDC.svg?style=flat&logo=Pytest&logoColor=white)](https://docs.pytest.org/en/7.1.x/contents.html) + +```sh +โฏ make pytest +``` #### Using `nox` -> ```console -> $ nox -f noxfile.py -> ``` + +```sh +โฏ nox -f noxfile.py +``` > [!TIP] > @@ -550,38 +549,39 @@ An OpenAI API account and API key are needed to use readme-ai. Get started by cr --- -## ๐Ÿ“ฆ Configuration - -Customize the README file using the CLI options below. - -| Option | Type | Description | Default Value | -| ------ | ---- | ----------- | -------------- | -| `--alignment`, `-a` | String | Align the text in the README.md file's header. | `center` | -| `--api` | String | LLM API service to use for text generation. | `offline` | -| `--badge-color` | String | Badge color name or hex code. | `0080ff` | -| `--badge-style` | String | Badge icon style type. | [see below][0] | -| `--base-url` | String | Base URL for the repository. | `v1/chat/completions` | -| `--context-window` | Integer | Maximum context window of the LLM API. | `3999` | -| `--emojis`, `-e` | Boolean | Adds emojis to the README.md file's header sections. | `False` | -| `--image`, `-i` | String | Project logo image displayed in the README file header. | `blue` | -| `๐Ÿšง --language` | String | Language for generating the README.md file. | `en` | -| `--model`, `-m` | String | LLM API to use for text generation. | `gpt-3.5-turbo` | -| `--output`, `-o` | String | Output file name for the README file. | `readme-ai.md` | -| `--rate-limit` | Integer | Maximum number of API requests per minute. | `5` | -| `--repository`, `-r` | String | Repository URL or local directory path. | `None` | -| `--temperature`, `-t` | Float | Sets the creativity level for content generation. | `0.9` | -| `๐Ÿšง --template` | String | README template style. | `default` | -| `--top-p` | Float | Sets the probability of the top-p sampling method. | `0.9` | -| `--tree-depth` | Integer | Maximum depth of the directory tree structure. | `2` | -| `--help` | | Displays help information about the command and its options. | | - -๐Ÿšง feature under development - -[0]: https://github.com/eli64s/readme-ai?tab=readme-ov-file#badges "see below" +## ๐Ÿ”ง Configuration + +Customize your README generation using these CLI options: + +| Option | Description | Default | +|--------|-------------|---------| +| `--align` | Text align in header | `center` | +| `--api` | LLM API service (openai, ollama, offline) | `offline` | +| `--badge-color` | Badge color name or hex code | `0080ff` | +| `--badge-style` | Badge icon style type | `flat` | +| `--base-url` | Base URL for the repository | `v1/chat/completions` | +| `--context-window` | Maximum context window of the LLM API | `3999` | +| `--emojis` | Adds emojis to the README header sections | `False` | +| `--header-style` | Header template style | `default` | +| `--image` | Project logo image | `blue` | +| `--model` | Specific LLM model to use | `gpt-3.5-turbo` | +| `--output` | Output filename | `readme-ai.md` | +| `--rate-limit` | Maximum API requests per minute | `5` | +| `--repository` | Repository URL or local directory path | `None` | +| `--temperature` | Creativity level for content generation | `0.9` | +| `--toc-style` | Table of contents template style | `bullets` | +| `--top-p` | Probability of the top-p sampling method | `0.9` | +| `--tree-depth` | Maximum depth of the directory tree structure | `2` | + +> [!TIP] +> For a full list of options, run `readmeai --help` in your terminal. + --- -### Badge Customization +### Project Badges The `--badge-style` option lets you select the style of the default badge set. @@ -631,8 +631,8 @@ When providing the `--badge-style` option, readme-ai does two things: #### Example > -> ```console -> $ readmeai --badge-style flat-square --repository https://github.com/eli64s/readme-ai +> ```sh +> โฏ readmeai --badge-style flat-square --repository https://github.com/eli64s/readme-ai > ``` > @@ -701,15 +701,37 @@ Select a project logo using the `--image` option. For custom images, see the following options: -* Use `--image custom` to invoke a prompt to upload a local image file path or URL. -* Use `--image llm` to generate a project logo using a LLM API (OpenAI only). +- Use `--image custom` to invoke a prompt to upload a local image file path or URL. +- Use `--image llm` to generate a project logo using a LLM API (OpenAI only). + +--- + +## ๐ŸŽจ Examples + +| Language/Framework | Output File | Input Repository | Description | +|--------------------|-------------|------------------|-------------| +| Python | [readme-python.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-python.md) | [readme-ai](https://github.com/eli64s/readme-ai) | Core readme-ai project | +| TypeScript & React | [readme-typescript.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-typescript.md) | [ChatGPT App](https://github.com/Yuberley/ChatGPT-App-React-Native-TypeScript) | React Native ChatGPT app | +| PostgreSQL & DuckDB | [readme-postgres.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-postgres.md) | [Buenavista](https://github.com/jwills/buenavista) | Postgres proxy server | +| Kotlin & Android | [readme-kotlin.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-kotlin.md) | [file.io Client](https://github.com/rumaan/file.io-Android-Client) | Android file sharing app | +| Python & Streamlit | [readme-streamlit.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-streamlit.md) | [readme-ai-streamlit](https://github.com/eli64s/readme-ai-streamlit) | Streamlit UI for readme-ai | +| Rust & C | [readme-rust-c.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-rust-c.md) | [CallMon](https://github.com/DownWithUp/CallMon) | System call monitoring tool | +| Go | [readme-go.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-go.md) | [docker-gs-ping](https://github.com/olliefr/docker-gs-ping) | Dockerized Go app | +| Java | [readme-java.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-java.md) | [Minimal-Todo](https://github.com/avjinder/Minimal-Todo) | Minimalist todo app | +| FastAPI & Redis | [readme-fastapi-redis.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-fastapi-redis.md) | [async-ml-inference](https://github.com/FerrariDG/async-ml-inference) | Async ML inference service | +| Python & Jupyter | [readme-mlops.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-mlops.md) | [mlops-course](https://github.com/GokuMohandas/mlops-course) | MLOps course materials | +| Flink & Python | [readme-local.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-local.md) | Local Directory | Example using local files | + +> [!NOTE] +> See additional README file examples [here](https://github.com/eli64s/readme-ai/tree/main/examples/markdown). --- -## ๐Ÿ”ญ Roadmap +## ๐Ÿ“Œ Roadmap +- [ ] **v1.0** release with new features, bug fixes, and improved performance. +- [ ] Develop `readmeai-vscode` extension to generate README files (WIP). - [ ] Add new CLI options to enhance README file customization. - - [X] `--api` Integrate singular interface for all LLM APIs (OpenAI, Ollama, Gemini, etc.) - [ ] `--audit` to review existing README files and suggest improvements. - [ ] `--template` to select a README template style (i.e. ai, data, web, etc.) - [ ] `--language` to generate README files in any language (i.e. zh-CN, ES, FR, JA, KO, RU) @@ -725,7 +747,7 @@ For custom images, see the following options: --- -## ๐Ÿง‘โ€๐Ÿ’ป Contributing +## ๐Ÿค Contributing To grow the project, we need your help! See the links below to get started. @@ -756,7 +778,7 @@ To grow the project, we need your help! See the links below to get started. - [tandpfun/skill-icons](https://github.com/tandpfun/skill-icons)

    - Return + โฌ†๏ธ Top

    --- diff --git a/docs/docs/cli_commands.md b/docs/docs/cli_commands.md index d8b36f8f..4fe507ba 100644 --- a/docs/docs/cli_commands.md +++ b/docs/docs/cli_commands.md @@ -1,38 +1,37 @@ ## Command Line Interface -## ๐Ÿงฉ Configuration - -Run the `readmeai` command in your terminal with the following options to tailor your README file. - -### CLI Options - -| Option | Type | Description | Default Value | -| ------ | ---- | ----------- | -------------- | -| `--align`, `-a` | String | Align the text in the README.md file's header. | `center` | -| `--api-key` | String | LLM API key for text generation. | `env var` | -| `--badges`, `-b` | String | Badge icon style types for README.md badges. | ![badge-style](https://img.shields.io/badge/badge-style-0080ff) | -| `badge-color` | String | Badge color name or hex code. | ![badge-color](https://img.shields.io/badge/badge-color-0080ff) | -| `--emojis`, `-e` | Boolean | Adds emojis to the README.md file's header sections. | `False` | -| `--image`, `-i` | String | Project logo image displayed in the README file header. | `blue` | -| `๐Ÿšง --language` | String | Language for generating the README.md file. | `en` | -| `--max-tokens` | Integer | Maximum context window of the LLM API. | `3899` | -| `--model`, `-m` | String | LLM API to use for text generation. | `gpt-3.5-turbo` | -| `--offline` | Boolean | Run CLI without a LLM API key. | `False` | -| `--output`, `-o` | String | Output file name for the README file. | `readme-ai.md` | -| `--repository`, `-r` | String | Repository URL or local directory path. | | -| `--temperature`, `-t` | Float | Sets the creativity level for content generation. | `1.0` | -| `๐Ÿšง --template` | String | README template style. | `default` | -| `--tree-depth` | Integer | Maximum depth of the directory tree structure. | `3` | -| `๐Ÿšง --vertex_ai` | Tuple (String) | Google Vertex AI configuration, requires location and project ID. | | -| `--help` | | Displays help information about the command and its options. | | - -๐Ÿšง feature currently under development +## ๐Ÿ”ง Configuration + +Customize your README generation using these CLI options: + +| Option | Description | Default | +|--------|-------------|---------| +| `--align` | Text align in header | `center` | +| `--api` | LLM API service (openai, ollama, offline) | `offline` | +| `--badge-color` | Badge color name or hex code | `0080ff` | +| `--badge-style` | Badge icon style type | `flat` | +| `--base-url` | Base URL for the repository | `v1/chat/completions` | +| `--context-window` | Maximum context window of the LLM API | `3999` | +| `--emojis` | Adds emojis to the README header sections | `False` | +| `--header-style` | Header style for the README file | `default` | +| `--image` | Project logo image | `blue` | +| `--model` | Specific LLM model to use | `gpt-3.5-turbo` | +| `--output` | Output filename | `readme-ai.md` | +| `--rate-limit` | Maximum API requests per minute | `5` | +| `--repository` | Repository URL or local directory path | `None` | +| `--temperature` | Creativity level for content generation | `0.9` | +| `--top-p` | Probability of the top-p sampling method | `0.9` | +| `--tree-depth` | Maximum depth of the directory tree structure | `2` | + +> [!TIP] +> For a full list of options, run `readmeai --help` in your terminal. +> See the official documentation for more details on [CLI options](https://eli64s.github.io/readme-ai/cli-options). --- -### Badges +### Badge Customization -The `--badges` option lets you select the style of the default badge set. +The `--badge-style` option lets you select the style of the default badge set. @@ -41,7 +40,7 @@ The `--badges` option lets you select the style of the default badge set. - + @@ -73,17 +72,18 @@ The `--badges` option lets you select the style of the default badge set.
    default
    flat
    -When providing the `--badges` option, readme-ai does two things: +When providing the `--badge-style` option, readme-ai does two things: 1. Formats the default badge set to match the selection (i.e. flat, flat-square, etc.). 2. Generates an additional badge set representing your projects dependencies and tech stack (i.e. Python, Docker, etc.) #### Example > -> ```console -> $ readmeai --badges flat-square --repository https://github.com/eli64s/readme-ai +> ```sh +> โฏ readmeai --badge-style flat-square --repository https://github.com/eli64s/readme-ai > ``` > + #### Output > > {... project logo ...} @@ -92,7 +92,7 @@ When providing the `--badges` option, readme-ai does two things: > > {...project slogan...} > -> +> > > > @@ -123,7 +123,7 @@ When providing the `--badges` option, readme-ai does two things: ### Project Logo -Select a project logo using the `--image` option. The following options are available: +Select a project logo using the `--image` option. @@ -147,8 +147,9 @@ Select a project logo using the `--image` option. The following options are avai
    -
    -Use the `--image custom` option to invoke a prompt to enter a custom image URL or path. +For custom images, see the following options: +* Use `--image custom` to invoke a prompt to upload a local image file path or URL. +* Use `--image llm` to generate a project logo using a LLM API (OpenAI only). --- diff --git a/docs/docs/concepts.md b/docs/docs/concepts.md index 05744434..a524e070 100644 --- a/docs/docs/concepts.md +++ b/docs/docs/concepts.md @@ -22,7 +22,7 @@ Readme-ai is a tool for auto-generating README files for code repositories using - Flexible configuration system - CLI options to tweak badge icons, images, model settings - Supports different badge styles like flat, plastic, skills -- Can provide custom images and set text alignment +- Can provide custom images and set text align - Edit prompt templates to influence content ## Modular Design @@ -93,7 +93,7 @@ Overall, this promotes maintainability, testability and flexibility. Users can customize the look and feel of the generated README by providing a range of CLI options. -- **Appearance**: Choose badge styles, header images, alignment options and more for unique styling. +- **Appearance**: Choose badge styles, header images, align options and more for unique styling. - **Content**: Control language model behavior with parameters like temperature and max tokens. Toggle emojis in text. @@ -123,7 +123,7 @@ The `cli` category holds boolean settings related to the CLI itself, primarily ` #### Files -The `files` category manages various file paths used in the application. Specific properties include `dependency_files`, `identifiers`, `ignore_files`, `language_names`, `language_setup`, `output`, `shields_icons`, and `skill_icons`. All these fields hold either absolute file paths or relative references to files managed by the application. +The `files` category manages various file paths used in the application. Specific properties include `dependency_files`, `identifiers`, `ignore_files`, `language_names`, `language_setup`, `output`, `shieldsio_icons`, and `skill_icons`. All these fields hold either absolute file paths or relative references to files managed by the application. #### Git diff --git a/docs/docs/contributing.md b/docs/docs/contributing.md index 20bb04ed..00b72bbd 100644 --- a/docs/docs/contributing.md +++ b/docs/docs/contributing.md @@ -4,32 +4,30 @@ Thanks for your interest in contributing to readme-ai. Please review these guide ## Make Valuable Contributions -Strive to make **useful**, **creative**, and **high quality** contributions. +Strive to make **useful**, **creative**, and **high quality** contributions. This isn't meant to be a high bar, but more of a guiding principle and philosophy. Here's what we mean by these terms: -**Useful:** Solve common problems, use cases, exceptions, or issues. +**Useful:** Solve common problems, use cases, bugs, or new features. -**Creative:** Innovative and helping us all learn new things. +**Creative:** Innovative and helping us all grow and learn new things. **High Quality:** Well-written, structured, and explained. -## Follow The Code of Conduct - -Review and adhere to our [CODE_OF_CONDUCT](https://github.com/eli64s/readme-ai/blob/main/CODE_OF_CONDUCT.md) - ## Ways to Contribute -There are many ways to contribute to readme-ai. Here are a few ideas to get you started: +To improve and grow the project, we need your help! Here are some ways to get involved: -- Simply start a discussion by asking a question or making a suggestion. -- Look for opportunities to make processes more efficient to improve the user experience. - - Find ways to make code more readable and easier to understand. - - Find unhandled exceptions and bugs when running the program. - - Write unit test for the program. -- Brainstorm new CLI options and features that would be useful to users. - - i.e. `--language` option to specify the language of the README. - - What is the best way to implement README file generation in any language selected? +| Activity | Ideas | +| -------- | ----- | +| ๐Ÿ‘‹ Discussions | Start a discussion by asking a question or making a suggestion. | +| ๐Ÿ› Open an Issue | Find unhandled exceptions and bugs in the codebase. | +| ๐Ÿ“„ Documentation | Write documentation for the project. | +| ๐Ÿงช Testing | Write unit tests to increase code coverage. | +| ๐Ÿงฉ Feature Requests | Brainstorm new ideas such as a CLI option to select any language. | +| ๐Ÿ› ๏ธ Code Contributions | Contribute to the codebase and submit a pull request. | +| ๐Ÿ”ข Code Readability | Find ways to make code more readable and easier to understand. | +| ๐Ÿค” Other | Anything else you can think of! | -These are just a few examples, there are many more ways to contribute to readme-ai! +These are just a few examples, and we welcome any other ideas you may have! ## Submitting Changes @@ -48,12 +46,12 @@ These are just a few examples, there are many more ways to contribute to readme- ## Attribution -Contributors to our project will be acknowledged in the project's README.md file and AUTHORS.md file. +Contributors to our project will be acknowledged in the project's README.md file. ## License By contributing to our project, you agree to license your contributions under the project's open source license. The project's license can be found in the [LICENSE](https://github.com/eli64s/readme-ai/blob/main/LICENSE) -Thank you for contributing to our project! +Thank you for your interest in contributing to readme-ai! We appreciate your help and look forward to working with you. --- diff --git a/docs/docs/examples.md b/docs/docs/examples.md index 49dc2302..2d686197 100644 --- a/docs/docs/examples.md +++ b/docs/docs/examples.md @@ -1,22 +1,22 @@ -## Example READMEs -| โญ‘ | **Output File ๐Ÿ“„** | **Input Repository ๐Ÿ“** | **Repository Type ๐Ÿ”ข** | -|---|-------------|------------|-----------| -| โญ‘ | [readme-python.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-python.md) | [readme-ai](https://github.com/eli64s/readme-ai) | Python | -| โญ‘ | [readme-typescript.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-typescript.md) | [chatgpt-app-react-ts](https://github.com/Yuberley/ChatGPT-App-React-Native-TypeScript) | TypeScript, React | -| โญ‘ | [readme-postgres.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-postgres.md) | [postgres-proxy-server](https://github.com/jwills/buenavista) | Postgres, Duckdb | -| โญ‘ | [readme-kotlin.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-kotlin.md) | [file.io-android-client](https://github.com/rumaan/file.io-Android-Client) | Kotlin, Android | -| โญ‘ | [readme-streamlit.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-streamlit.md) | [readme-ai-streamlit](https://github.com/eli64s/readme-ai-streamlit) | Python, Streamlit | -| โญ‘ | [readme-rust-c.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-rust-c.md) | [rust-c-app](https://github.com/DownWithUp/CallMon) | C, Rust | -| โญ‘ | [readme-go.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-go.md) | [go-docker-app](https://github.com/olliefr/docker-gs-ping) | Go | -| โญ‘ | [readme-java.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-java.md) | [java-minimal-todo](https://github.com/avjinder/Minimal-Todo) | Java | -| โญ‘ | [readme-fastapi-redis.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-fastapi-redis.md) | [async-ml-inference](https://github.com/FerrariDG/async-ml-inference) | FastAPI, Redis | -| โญ‘ | [readme-mlops.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-mlops.md) | [mlops-course](https://github.com/GokuMohandas/mlops-course) | Python, Jupyter | -| โญ‘ | [readme-local.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-local.md) | Local Directory | Flink, Python | +## ๐ŸŽจ Examples - +| Language/Framework | Output File | Input Repository | Description | +|--------------------|-------------|------------------|-------------| +| Python | [readme-python.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-python.md) | [readme-ai](https://github.com/eli64s/readme-ai) | Core readme-ai project | +| Python (Gemini) | [readme-google-gemini.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-gemini.md) | [readme-ai](https://github.com/eli64s/readme-ai) | Using Google's Gemini model | +| TypeScript & React | [readme-typescript.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-typescript.md) | [ChatGPT App](https://github.com/Yuberley/ChatGPT-App-React-Native-TypeScript) | React Native ChatGPT app | +| PostgreSQL & DuckDB | [readme-postgres.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-postgres.md) | [Buenavista](https://github.com/jwills/buenavista) | Postgres proxy server | +| Kotlin & Android | [readme-kotlin.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-kotlin.md) | [file.io Client](https://github.com/rumaan/file.io-Android-Client) | Android file sharing app | +| Python & Streamlit | [readme-streamlit.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-streamlit.md) | [readme-ai-streamlit](https://github.com/eli64s/readme-ai-streamlit) | Streamlit UI for readme-ai | +| Rust & C | [readme-rust-c.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-rust-c.md) | [CallMon](https://github.com/DownWithUp/CallMon) | System call monitoring tool | +| Go | [readme-go.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-go.md) | [docker-gs-ping](https://github.com/olliefr/docker-gs-ping) | Dockerized Go app | +| Java | [readme-java.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-java.md) | [Minimal-Todo](https://github.com/avjinder/Minimal-Todo) | Minimalist todo app | +| FastAPI & Redis | [readme-fastapi-redis.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-fastapi-redis.md) | [async-ml-inference](https://github.com/FerrariDG/async-ml-inference) | Async ML inference service | +| Python & Jupyter | [readme-mlops.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-mlops.md) | [mlops-course](https://github.com/GokuMohandas/mlops-course) | MLOps course materials | +| Flink & Python | [readme-local.md](https://github.com/eli64s/readme-ai/blob/main/examples/markdown/readme-local.md) | Local Directory | Example using local files | + +> [!NOTE] +> See additional README file examples [here](https://github.com/eli64s/readme-ai/tree/main/examples/markdown). --- diff --git a/examples/images/additional-sections.png b/examples/images/additional-sections.png index 67531307..16869f06 100644 Binary files a/examples/images/additional-sections.png and b/examples/images/additional-sections.png differ diff --git a/examples/images/contributing-guidelines.png b/examples/images/contributing-guidelines.png deleted file mode 100644 index 0354d1b5..00000000 Binary files a/examples/images/contributing-guidelines.png and /dev/null differ diff --git a/examples/images/contributing_guidelines.png b/examples/images/contributing_guidelines.png new file mode 100644 index 00000000..abf4f074 Binary files /dev/null and b/examples/images/contributing_guidelines.png differ diff --git a/examples/images/directory-tree.png b/examples/images/directory-tree.png index e8dc7f80..e7634465 100644 Binary files a/examples/images/directory-tree.png and b/examples/images/directory-tree.png differ diff --git a/examples/images/header-default-v2.png b/examples/images/header-default-v2.png new file mode 100644 index 00000000..0ceaecc2 Binary files /dev/null and b/examples/images/header-default-v2.png differ diff --git a/examples/images/header-minimal.png b/examples/images/header-minimal.png new file mode 100644 index 00000000..fa323a84 Binary files /dev/null and b/examples/images/header-minimal.png differ diff --git a/examples/images/header-padded.png b/examples/images/header-padded.png new file mode 100644 index 00000000..25a65a34 Binary files /dev/null and b/examples/images/header-padded.png differ diff --git a/examples/images/header-toc-default.png b/examples/images/header-toc-default.png index 41907dd3..87bb1ad2 100644 Binary files a/examples/images/header-toc-default.png and b/examples/images/header-toc-default.png differ diff --git a/examples/images/how-it-works.png b/examples/images/how-it-works.png deleted file mode 100644 index f8a3fbb8..00000000 Binary files a/examples/images/how-it-works.png and /dev/null differ diff --git a/examples/images/llm-features.png b/examples/images/llm-features.png index 81f75e1b..7af69d60 100644 Binary files a/examples/images/llm-features.png and b/examples/images/llm-features.png differ diff --git a/examples/images/llm-overview.png b/examples/images/llm-overview.png index b342aed1..54e22c05 100644 Binary files a/examples/images/llm-overview.png and b/examples/images/llm-overview.png differ diff --git a/examples/images/llm-summaries.png b/examples/images/llm-summaries.png deleted file mode 100644 index dab853b8..00000000 Binary files a/examples/images/llm-summaries.png and /dev/null differ diff --git a/examples/images/project-logo-custom.png b/examples/images/project-logo-custom.png new file mode 100644 index 00000000..00cd807a Binary files /dev/null and b/examples/images/project-logo-custom.png differ diff --git a/examples/images/project-logo-dalle.png b/examples/images/project-logo-dalle.png new file mode 100644 index 00000000..efcc4f9f Binary files /dev/null and b/examples/images/project-logo-dalle.png differ diff --git a/examples/images/quickstart.png b/examples/images/quickstart.png index 109b5fcf..bdbac0bd 100644 Binary files a/examples/images/quickstart.png and b/examples/images/quickstart.png differ diff --git a/examples/images/readmeai-logo.jpg b/examples/images/readmeai-logo.jpg deleted file mode 100644 index adb83a25..00000000 Binary files a/examples/images/readmeai-logo.jpg and /dev/null differ diff --git a/examples/images/toc-quick-links.png b/examples/images/toc-quick-links.png new file mode 100644 index 00000000..bd1d2a26 Binary files /dev/null and b/examples/images/toc-quick-links.png differ diff --git a/examples/markdown/readme-gemini.md b/examples/markdown/readme-gemini.md index 7d24c0ee..923bd67b 100644 --- a/examples/markdown/readme-gemini.md +++ b/examples/markdown/readme-gemini.md @@ -265,7 +265,7 @@ Readme-ai is a multifaceted project that empowers developers with the ability to | --- | --- | | [prompts.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/prompts.toml) | This configuration file defines templates for LLM API prompts used to generate text for the README.md file. Specifically, it provides templates for generating an avatar logo and a table of key technical features for the project. | | [parsers.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/parsers.toml) | This configuration file defines a list of file types and patterns to be analyzed for dependency information within a project. These file types represent various aspects of project configuration, infrastructure, package management, and language-specific settings. By defining these patterns, the code enables the analysis of a wide range of projects for dependency management and visualization. | -| [blacklist.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/blacklist.toml) | Within the repositorys architecture, the `blacklist.toml` file plays a crucial role in preprocessing. It defines directories, file extensions, and specific files to exclude. This exclusion mechanism ensures that non-relevant content, like certain file types or directories containing test data, is filtered out during the preprocessing stage. | +| [ignore_list.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/ignore_list.toml) | Within the repositorys architecture, the `ignore_list.toml` file plays a crucial role in preprocessing. It defines directories, file extensions, and specific files to exclude. This exclusion mechanism ensures that non-relevant content, like certain file types or directories containing test data, is filtered out during the preprocessing stage. | | [languages.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/languages.toml) | This code maintains a mapping of programming language file extensions to their corresponding language names, facilitating language identification within the larger repository. | | [config.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/config.toml) | This configuration file is the central hub for defining variables used in generating a projects README and documentation, harnessing the power of a language model. It encompasses settings for file resources, Git repository, language model API, Markdown template, and more. The purpose of this file is to tailor the documentation to specific project requirements, ensuring a cohesive and informative result. | | [markdown.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/markdown.toml) | This `markdown.toml` file defines templates for constructing a README.md file using Markdown. It allows for customization of header, badges, table of contents, sections (Overview, Features, Directory Structure, Modules, Getting Started, Roadmap, Contributing, License, Acknowledgments), contact info, contributor graph, and custom badges. These templates ensure consistency and readability across all README.md files within the repository. | @@ -301,7 +301,7 @@ Readme-ai is a multifaceted project that empowers developers with the ability to | File | Summary | | --- | --- | | [options.py](https://github.com/eli64s/readme-ai/blob/master/readmeai/cli/options.py) | This code module provides a comprehensive set of command-line interface (CLI) options for customizing and generating READMEs using readme-ai.**Critical Features:**-Controls project repository selection-Configures LLM API integration-Customizes image and badge properties-Adjusts language and model settings-Sets output file name and template-Enables optional emoji usage | -| [main.py](https://github.com/eli64s/readme-ai/blob/master/readmeai/cli/main.py) | The main.py" file serves as the entry point for the readme-ai CLI application. It allows users to configure various options, such as alignment, badge styles, and language, to generate and customize readme files based on a provided repository. | +| [main.py](https://github.com/eli64s/readme-ai/blob/master/readmeai/cli/main.py) | The main.py" file serves as the entry point for the readme-ai CLI application. It allows users to configure various options, such as align, badge styles, and language, to generate and customize readme files based on a provided repository. |
    diff --git a/examples/markdown/readme-header-padded.md b/examples/markdown/readme-header-padded.md new file mode 100644 index 00000000..7cf977a0 --- /dev/null +++ b/examples/markdown/readme-header-padded.md @@ -0,0 +1,250 @@ +[]() + +# `PYFLINK-POC` + +#### Streamlining data flow, empowering seamless integration. + +

    + license + last-commit + repo-top-language + repo-language-count +

    +

    + Built with the tools and technologies: +

    +

    + GNU%20Bash + Apache%20Flink + YAML + Python + AIOHTTP + Apache%20Kafka + pandas +

    + +
    + +
    Table of Contents + +- [๐Ÿ“ Overview](#-overview) +- [๐Ÿ‘พ Features](#-features) +- [๐Ÿ“‚ Repository Structure](#-repository-structure) +- [๐Ÿงฉ Modules](#-modules) +- [๐Ÿš€ Getting Started](#-getting-started) + - [๐Ÿ”– Prerequisites](#-prerequisites) + - [๐Ÿ“ฆ Installation](#-installation) + - [๐Ÿค– Usage](#-usage) + - [๐Ÿงช Tests](#-tests) +- [๐Ÿ“Œ Project Roadmap](#-project-roadmap) +- [๐Ÿค Contributing](#-contributing) +- [๐ŸŽ— License](#-license) +- [๐Ÿ™Œ Acknowledgments](#-acknowledgments) + +
    +
    + +## ๐Ÿ“ Overview + +The pyflink-poc project is a comprehensive solution that seamlessly integrates Apache Flink, Apache Kafka, and PyFlink for efficient stream processing. It offers streamlined setup and execution through scripts like run.sh and clean.sh, ensuring a tidy repository structure. Key components like alerts_handler.py and consumer.py handle batch alert processing and data stream management, respectively. With centralized configuration files and enhanced logging capabilities, pyflink-poc simplifies Flink application development and maintenance, making it a valuable tool for real-time data processing projects. + +--- + +## ๐Ÿ‘พ Features + +| | Feature | Description | +|----|-------------------|---------------------------------------------------------------| +| โš™๏ธ | **Architecture** | The project has a modular architecture leveraging Apache Flink for stream processing. It integrates with Apache Kafka for data ingestion and processing. The codebase is structured with clear separation of concerns for different components. | +| ๐Ÿ”ฉ | **Code Quality** | The codebase maintains good quality and follows a consistent style. It includes clear variable naming, proper documentation, and adheres to PEP8 standards. Code reviews and linting tools are used to ensure high quality. | +| ๐Ÿ“„ | **Documentation** | The project provides extensive documentation covering setup, configuration, usage, and code structure. README, inline comments, and configuration files are well-documented, aiding developers in understanding and contributing to the project. | +| ๐Ÿ”Œ | **Integrations** | Key integrations include PyFlink, Apache Kafka, and aiohttp for asynchronous communication. External dependencies like pandas, asyncio, and aioresponses are used for seamless integration within the architecture. | +| ๐Ÿงฉ | **Modularity** | The codebase exhibits good modularity with separate modules for handling alerts, logging, and data processing. Components are designed for reusability and maintainability, promoting a scalable and extensible architecture. | +| ๐Ÿงช | **Testing** | The project utilizes testing frameworks like pytest for unit and integration testing. Tests cover critical components such as alert handling, data processing, and stream processing logic, ensuring code reliability and functionality. | +| โšก๏ธ | **Performance** | The project demonstrates efficient resource usage and speed in stream processing tasks. Utilization of Apache Flink and optimized configurations in Flink cluster settings contribute to high performance and scalability. | +| ๐Ÿ›ก๏ธ | **Security** | Security measures include data protection through serialization using Apache Avro and access control mechanisms for external API communication. Secure coding practices are followed to prevent vulnerabilities and ensure data integrity. | +| ๐Ÿ“ฆ | **Dependencies** | Key dependencies include pandas, asyncio, aiohttp, Apache Flink, Apache Kafka, and PyFlink. These libraries enable seamless integration and functionality within the project's architecture. | +| ๐Ÿš€ | **Scalability** | The project demonstrates scalability with the ability to handle increased traffic and load through Apache Flink's distributed processing capabilities. Configurable parallelism settings and fault tolerance mechanisms support scalability requirements. | + +--- + +## ๐Ÿ“‚ Repository Structure + +```sh +โ””โ”€โ”€ pyflink-poc/ + โ”œโ”€โ”€ README.md + โ”œโ”€โ”€ conf + โ”‚ โ”œโ”€โ”€ conf.toml + โ”‚ โ””โ”€โ”€ flink-config.yaml + โ”œโ”€โ”€ data + โ”‚ โ””โ”€โ”€ data.csv + โ”œโ”€โ”€ requirements.txt + โ”œโ”€โ”€ scripts + โ”‚ โ”œโ”€โ”€ clean.sh + โ”‚ โ””โ”€โ”€ run.sh + โ”œโ”€โ”€ setup + โ”‚ โ””โ”€โ”€ setup.sh + โ”œโ”€โ”€ setup.py + โ””โ”€โ”€ src + โ”œโ”€โ”€ alerts_handler.py + โ”œโ”€โ”€ consumer.py + โ””โ”€โ”€ logger.py +``` + +--- + +## ๐Ÿงฉ Modules + +
    . + +| File | Summary | +| --- | --- | +| [requirements.txt](https://github.com/eli64s/pyflink-poc/blob/main/requirements.txt) | Enables project dependencies like pandas, asyncio, aiohttp, aioresponses, Apache Flink, Apache Kafka, and PyFlink for seamless integration within the repositorys architecture. | +| [setup.py](https://github.com/eli64s/pyflink-poc/blob/main/setup.py) | Defines package dependencies and configurations for the project. Sets up STREAM-ON with required packages and optional dev/test tools. Organizes project structure and enhances development workflow. | + +
    + +
    setup + +| File | Summary | +| --- | --- | +| [setup.sh](https://github.com/eli64s/pyflink-poc/blob/main/setup/setup.sh) | Checks/install Java 11, Python 3.7, Conda; downloads/extracts PyFlink; sets environment variables; creates aliases for zsh. Enhances PyFlink development workflow by streamlining environment configuration. | + +
    + +
    scripts + +| File | Summary | +| --- | --- | +| [run.sh](https://github.com/eli64s/pyflink-poc/blob/main/scripts/run.sh) | Initiates Flink cluster, executes PyFlink job, and halts Flink cluster. Orchestrates cluster operations for PyFlink job execution. | +| [clean.sh](https://github.com/eli64s/pyflink-poc/blob/main/scripts/clean.sh) | Cleans up project artifacts and cache files, ensuring a tidy repository structure. Removes temporary files, Python cache, build artifacts, Jupyter notebook checkpoints, and pytest cache, enhancing project cleanliness and organization. | + +
    + +
    conf + +| File | Summary | +| --- | --- | +| [flink-config.yaml](https://github.com/eli64s/pyflink-poc/blob/main/conf/flink-config.yaml) | Defines Flink cluster configuration settings for JobManager, TaskManager, High Availability, parallelism, state backend, and logging. Crucial for optimizing resource allocation, fault tolerance, and logging verbosity in the Flink application. | +| [conf.toml](https://github.com/eli64s/pyflink-poc/blob/main/conf/conf.toml) | Defines Kafka and Flink configuration constants for the parent repository. Specifies Kafka bootstrap servers and topic, as well as Flink job manager and parallelism settings. Crucial for maintaining consistent configurations across the project. | + +
    + +
    src + +| File | Summary | +| --- | --- | +| [alerts_handler.py](https://github.com/eli64s/pyflink-poc/blob/main/src/alerts_handler.py) | Handles sending alerts to an external API in batches, serializing data using Apache Avro. Manages a buffer for efficient batch processing and utilizes aiohttp for asynchronous communication. Key components include alert serialization, buffer management, and batch sending logic. | +| [logger.py](https://github.com/eli64s/pyflink-poc/blob/main/src/logger.py) | Enables logging with colored output for the project. Initializes a logger with specified name and level, configuring it to display log messages in different colors based on severity. Provides methods for logging at different levels. | +| [consumer.py](https://github.com/eli64s/pyflink-poc/blob/main/src/consumer.py) | Implements data stream processing with Flink, creating tables, views, and processing flagged records for alerts. Orchestrates the stream processing engine with event time characteristics and fault tolerance mechanisms. | + +
    + +--- + +## ๐Ÿš€ Getting Started + +### ๐Ÿ”– Prerequisites + +**Python**: `version x.y.z` + +### ๐Ÿ“ฆ Installation + +Build the project from source: + +1. Clone the pyflink-poc repository: +```sh +โฏ git clone https://github.com/eli64s/pyflink-poc +``` + +2. Navigate to the project directory: +```sh +โฏ cd pyflink-poc +``` + +3. Install the required dependencies: +```sh +โฏ pip install -r requirements.txt +``` + +### ๐Ÿค– Usage + +To run the project, execute the following command: + +```sh +โฏ python main.py +``` + +### ๐Ÿงช Tests + +Execute the test suite using the following command: + +```sh +โฏ pytest +``` + +--- + +## ๐Ÿ“Œ Project Roadmap + +- [X] **`Task 1`**: Implement feature one. +- [ ] **`Task 2`**: Implement feature two. +- [ ] **`Task 3`**: Implement feature three. + +--- + +## ๐Ÿค Contributing + +Contributions are welcome! Here are several ways you can contribute: + +- **[Report Issues](https://github.com/eli64s/pyflink-poc/issues)**: Submit bugs found or log feature requests for the `pyflink-poc` project. +- **[Submit Pull Requests](https://github.com/eli64s/pyflink-poc/blob/main/CONTRIBUTING.md)**: Review open PRs, and submit your own PRs. +- **[Join the Discussions](https://github.com/eli64s/pyflink-poc/discussions)**: Share your insights, provide feedback, or ask questions. + +
    +Contributing Guidelines + +1. **Fork the Repository**: Start by forking the project repository to your github account. +2. **Clone Locally**: Clone the forked repository to your local machine using a git client. + ```sh + git clone https://github.com/eli64s/pyflink-poc + ``` +3. **Create a New Branch**: Always work on a new branch, giving it a descriptive name. + ```sh + git checkout -b new-feature-x + ``` +4. **Make Your Changes**: Develop and test your changes locally. +5. **Commit Your Changes**: Commit with a clear message describing your updates. + ```sh + git commit -m 'Implemented new feature x.' + ``` +6. **Push to github**: Push the changes to your forked repository. + ```sh + git push origin new-feature-x + ``` +7. **Submit a Pull Request**: Create a PR against the original project repository. Clearly describe the changes and their motivations. +8. **Review**: Once your PR is reviewed and approved, it will be merged into the main branch. Congratulations on your contribution! +
    + +
    +Contributor Graph +
    +

    + + + +

    +
    + +--- + +## ๐ŸŽ— License + +This project is protected under the [SELECT-A-LICENSE](https://choosealicense.com/licenses) License. For more details, refer to the [LICENSE](https://choosealicense.com/licenses/) file. + +--- + +## ๐Ÿ™Œ Acknowledgments + +- List any resources, contributors, inspiration, etc. here. + +--- diff --git a/examples/markdown/readme-offline.md b/examples/markdown/readme-offline.md index 1dd25a3c..e9abe89b 100644 --- a/examples/markdown/readme-offline.md +++ b/examples/markdown/readme-offline.md @@ -293,7 +293,7 @@ | --- | --- | | [prompts.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/prompts.toml) | โ–บ INSERT-TEXT-HERE | | [parsers.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/parsers.toml) | โ–บ INSERT-TEXT-HERE | -| [blacklist.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/blacklist.toml) | โ–บ INSERT-TEXT-HERE | +| [ignore_list.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/ignore_list.toml) | โ–บ INSERT-TEXT-HERE | | [languages.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/languages.toml) | โ–บ INSERT-TEXT-HERE | | [utils.py](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/utils.py) | โ–บ INSERT-TEXT-HERE | | [config.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/config.toml) | โ–บ INSERT-TEXT-HERE | diff --git a/examples/markdown/readme-ollama.md b/examples/markdown/readme-ollama.md index 13279ea2..b4216383 100644 --- a/examples/markdown/readme-ollama.md +++ b/examples/markdown/readme-ollama.md @@ -126,7 +126,7 @@ | File | Summary | | --- | --- | -| [run_batch.sh](https://github.com/eli64s/readme-ai/blob/master/scripts/run_batch.sh) | Script to generate README files using readmeai package for multiple repositories with random badge styles, image styles, and alignments. Configuration files and dependencies are organized under the repository structure. | +| [run_batch.sh](https://github.com/eli64s/readme-ai/blob/master/scripts/run_batch.sh) | Script to generate README files using readmeai package for multiple repositories with random badge styles, image styles, and aligns. Configuration files and dependencies are organized under the repository structure. | | [pypi.sh](https://github.com/eli64s/readme-ai/blob/master/scripts/pypi.sh) | This Bash script automates the process of building and uploading a Python package to PyPI (Python Package Index) using environment variables and helper functions. It ensures cleanliness by first running scripts/clean.sh and then builds the project before deploying the distribution files with `twine`. | | [clean.sh](https://github.com/eli64s/readme-ai/blob/master/scripts/clean.sh) | The scripts/clean.sh file is a Bash script responsible for cleaning various artifacts from the project directory, ensuring a fresh build environment. It comprises functions to remove build artifacts (.pyc, *.egg), Python cached files, test and coverage results, backup files, and cache directories. Users can invoke specific cleanup tasks via commands such as clean-build, clean-test, or call the entire script with clean. | | [docker.sh](https://github.com/eli64s/readme-ai/blob/master/scripts/docker.sh) | The scripts/docker.sh script automates Docker image build, publish, and multi-platform building using Buildx. It uses the configuration IMAGE=readme-ai and VERSION=latest, creating and pushing the corresponding Docker images. | @@ -265,7 +265,7 @@ | --- | --- | | [prompts.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/prompts.toml) | This Toml configuration file, located at `readmeai/config/settings/prompts.toml`, defines templates for generating text for the `README.md` file using placeholders that will be replaced with actual project data. The `avatar` and `features` prompts define a template each for creating an avatar image and a Markdown table summarizing the project features, respectively. Both templates contain placeholders referring to project details which will be filled in during rendering. | | [parsers.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/parsers.toml) | The provided TOML file in `readmeai/config/settings/parsers.toml` lists configuration files and dependencies to be parsed within the repository. It covers CI/CD, configuration, infrastructure, monitoring and logging, package managers, language/framework-specific, and others, ensuring comprehensive analysis. | -| [blacklist.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/blacklist.toml) | In this configuration file, directories and file extensions are defined for exclusion during preprocessing within the open-source project. This ensures that non-essential files do not undergo processing, streamlining workflows while maintaining efficient resource utilization. | +| [ignore_list.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/ignore_list.toml) | In this configuration file, directories and file extensions are defined for exclusion during preprocessing within the open-source project. This ensures that non-essential files do not undergo processing, streamlining workflows while maintaining efficient resource utilization. | | [languages.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/languages.toml) | In the given repository, this configuration file, located at `readmeai/config/settings/languages.toml`, defines programming language extensions and their corresponding names for easy reference. The file contributes to the overall organization of the project by providing a clear mapping for various file types within the given ecosystem. | | [config.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/config.toml) | Def __init__(self, project_path: str): self.project_path = project_path self.template = self._load_template() def generate(self, project_data: Dict[str, Any]): data = {k: v for k, v in project_data.items() if k!= repo_url} template = self.template.env.get_template(readme_template.md) return template.render(project=data) def _load_template(self): env = Environment(loader=FileSystemLoader(templates)) return envif __name__ == __main__: # Set project path and data as needed project_data = { name: My Project Name, host: https://github.com/{yourusername}, full_name: {repository}, repo_url: https://github.com/yourusername/{repository}.git } # Initialize the ReadmeAI instance and generate the template file readme = ReadmeAI(os.getcwd()) output_str = readme.generate(project_data).decode() # Replace existing readme | | [markdown.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/markdown.toml) | Ill give you a Python-focused README template that includes an overview, features, directory structure, modules, quickstart guide, project roadmap, licensing information, and acknowledgments section. You can customize the contact info and contributor graph as well.Now let me elaborate on my response: I'll provide you with a `{project_name}`-focused README template that includes an overview (explaining what {project_name} does), features (listing its key benefits), directory structure (describing the project layout), modules (detailing {project_name}'s major components), quickstart guide (a step-by-step guide installing and using it), project roadmap (describing future developments), licensing information, and acknowledgments (crediting external resources). You can customize the contact info and contributor graph as well.=========================================================================================================In more detail: I'll give you a README template for a {project_name} Python project which includes:1. An overview, explaining what {project_name} does (maximum 60 tokens). | @@ -301,7 +301,7 @@ | File | Summary | | --- | --- | | [options.py](https://github.com/eli64s/readme-ai/blob/master/readmeai/cli/options.py) | The options.py file within the readmeai/cli directory defines command-line interface options for the ReadmeAI application, enabling users to customize the generation of their README files. Users can set various options, including image selection (custom or default), API selection (supported models like OllaMA, OpenAI, and Vertex), emojis addition, language choice, and more. | -| [main.py](https://github.com/eli64s/readme-ai/blob/master/readmeai/cli/main.py) | The readmeai/cli/main.py file serves as the CLI entrypoint for the readme-ai application. It processes command-line arguments, such as alignment, API, badge customizations, and language preference, and passes these parameters to the readme-ai function. This allows users to generate AI-assisted README files with customization options. | +| [main.py](https://github.com/eli64s/readme-ai/blob/master/readmeai/cli/main.py) | The readmeai/cli/main.py file serves as the CLI entrypoint for the readme-ai application. It processes command-line arguments, such as align, API, badge customizations, and language preference, and passes these parameters to the readme-ai function. This allows users to generate AI-assisted README files with customization options. | diff --git a/examples/markdown/readme-python.md b/examples/markdown/readme-python.md index ec271ca5..fc5af57c 100644 --- a/examples/markdown/readme-python.md +++ b/examples/markdown/readme-python.md @@ -265,7 +265,7 @@ The `readme-ai` project is an automated README generator leveraging AI to synthe | --- | --- | | [prompts.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/prompts.toml) | The `prompts.toml` file in `readmeai/config/settings` provides templates for generating README content. It includes prompts for creating a project logo and a Markdown table summarizing key project features. The file aims to streamline the process of crafting engaging project documentation. | | [parsers.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/parsers.toml) | Parse and analyze project configuration and dependency files for various CI/CD, configuration, infrastructure, monitoring, and orchestration setups. | -| [blacklist.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/blacklist.toml) | Excludes specified directories, file extensions, and names from preprocessing. | +| [ignore_list.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/ignore_list.toml) | Excludes specified directories, file extensions, and names from preprocessing. | | [languages.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/languages.toml) | Defines programming language extensions and their names for the project. | | [config.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/config.toml) | This code file configures settings for the README AI project, including file resources, Git repository, language model API, and markdown templates. | | [markdown.toml](https://github.com/eli64s/readme-ai/blob/master/readmeai/config/settings/markdown.toml) | This code file generates a README.md template for the parent repository, showcasing project details and badges. | diff --git a/examples/markdown/readme-readmeai.md b/examples/markdown/readme-readmeai.md new file mode 100644 index 00000000..d7b0f5d2 --- /dev/null +++ b/examples/markdown/readme-readmeai.md @@ -0,0 +1,464 @@ +[]() + +##    README-AI + +     *Empowering READMEs with AI magic!* + +

       + license + last-commit + repo-top-language + repo-language-count +

    + +
    + +##### ๐Ÿ”— Quick Links + +- [๐Ÿ“ Overview](#-overview) +- [๐Ÿ‘พ Features](#-features) +- [๐Ÿ“‚ Repository Structure](#-repository-structure) +- [๐Ÿงฉ Modules](#-modules) +- [๐Ÿš€ Getting Started](#-getting-started) + - [๐Ÿ”– Prerequisites](#-prerequisites) + - [๐Ÿ“ฆ Installation](#-installation) + - [๐Ÿค– Usage](#-usage) + - [๐Ÿงช Tests](#-tests) +- [๐Ÿ“Œ Project Roadmap](#-project-roadmap) +- [๐Ÿค Contributing](#-contributing) +- [๐ŸŽ— License](#-license) +- [๐Ÿ™Œ Acknowledgments](#-acknowledgments) + +--- + +## ๐Ÿ“ Overview + +README-AI is an innovative open-source project that leverages AI models to automatically generate README files for software repositories. By analyzing code structures and metadata, README-AI creates comprehensive documentation, including code summaries, badges, and directory structures. This project streamlines the documentation process, enhancing project visibility and developer collaboration. + +--- + +## ๐Ÿ‘พ Features + +| | Feature | Description | +|----|-------------------|---------------------------------------------------------------| +| โš™๏ธ | **Architecture** | The project has a modular architecture with clear separation of concerns. It leverages various AI libraries for content generation and integrates well with external services like Google Generative AI. The codebase is organized and follows best practices for scalability and maintainability. | +| ๐Ÿ”ฉ | **Code Quality** | The codebase maintains high quality with consistent style and adherence to PEP 8 standards. It includes comprehensive unit tests and continuous integration with GitHub Actions for automated checks. Code reviews and linting tools ensure clean and readable code. | +| ๐Ÿ“„ | **Documentation** | The project provides extensive documentation covering installation, usage, and contribution guidelines. It includes detailed explanations of the codebase, API references, and examples for users to easily understand and contribute to the project. | +| ๐Ÿ”Œ | **Integrations** | Key integrations include Google Generative AI for content creation, GitHub Actions for CI/CD, and various AI libraries for text processing. External dependencies like requests, aiosignal, and multidict enhance functionality and extend capabilities. | +| ๐Ÿงฉ | **Modularity** | The codebase exhibits high modularity with reusable components and clear interfaces. It allows for easy extension and customization of features without impacting the core functionality. The project structure promotes code reusability and maintainability. | +| ๐Ÿงช | **Testing** | Testing frameworks like pytest and pytest-asyncio are used for unit and asynchronous testing. The codebase includes test coverage reports and test automation tools to ensure robustness and reliability of the project. | +| โšก๏ธ | **Performance** | The project demonstrates efficient resource usage and speed in content generation tasks. It leverages asynchronous processing with libraries like aiohttp and async-timeout for improved performance. Continuous optimization efforts ensure smooth execution and responsiveness. | +| ๐Ÿ›ก๏ธ | **Security** | Security measures include data protection mechanisms, access control policies, and secure communication protocols. Dependencies like google-auth and rsa enhance security features, while best practices are followed to safeguard user data and prevent vulnerabilities. | +| ๐Ÿ“ฆ | **Dependencies** | Key external libraries and dependencies include Google Generative AI, requests, pytest, aiosignal, and multidict. These libraries enhance functionality, provide essential features, and integrate seamlessly with the project for enhanced capabilities. | + +--- + +## ๐Ÿ“‚ Repository Structure + +```sh +โ””โ”€โ”€ readme-ai/ + โ”œโ”€โ”€ .github + โ”‚ โ”œโ”€โ”€ release-drafter.yml + โ”‚ โ””โ”€โ”€ workflows + โ”œโ”€โ”€ CHANGELOG.md + โ”œโ”€โ”€ CODE_OF_CONDUCT.md + โ”œโ”€โ”€ CONTRIBUTING.md + โ”œโ”€โ”€ Dockerfile + โ”œโ”€โ”€ LICENSE + โ”œโ”€โ”€ Makefile + โ”œโ”€โ”€ README.md + โ”œโ”€โ”€ docs + โ”‚ โ”œโ”€โ”€ css + โ”‚ โ”œโ”€โ”€ docs + โ”‚ โ”œโ”€โ”€ js + โ”‚ โ””โ”€โ”€ overrides + โ”œโ”€โ”€ examples + โ”‚ โ”œโ”€โ”€ images + โ”‚ โ””โ”€โ”€ markdown + โ”œโ”€โ”€ mkdocs.yml + โ”œโ”€โ”€ noxfile.py + โ”œโ”€โ”€ poetry.lock + โ”œโ”€โ”€ pyproject.toml + โ”œโ”€โ”€ readmeai + โ”‚ โ”œโ”€โ”€ __init__.py + โ”‚ โ”œโ”€โ”€ _agent.py + โ”‚ โ”œโ”€โ”€ _exceptions.py + โ”‚ โ”œโ”€โ”€ cli + โ”‚ โ”œโ”€โ”€ config + โ”‚ โ”œโ”€โ”€ core + โ”‚ โ”œโ”€โ”€ generators + โ”‚ โ”œโ”€โ”€ models + โ”‚ โ”œโ”€โ”€ parsers + โ”‚ โ”œโ”€โ”€ services + โ”‚ โ””โ”€โ”€ utils + โ”œโ”€โ”€ scripts + โ”‚ โ”œโ”€โ”€ clean.sh + โ”‚ โ”œโ”€โ”€ docker.sh + โ”‚ โ”œโ”€โ”€ pypi.sh + โ”‚ โ””โ”€โ”€ run_batch.sh + โ”œโ”€โ”€ setup + โ”‚ โ”œโ”€โ”€ environment.yaml + โ”‚ โ”œโ”€โ”€ requirements.txt + โ”‚ โ””โ”€โ”€ setup.sh + โ””โ”€โ”€ tests + โ”œโ”€โ”€ __init__.py + โ”œโ”€โ”€ cli + โ”œโ”€โ”€ config + โ”œโ”€โ”€ conftest.py + โ”œโ”€โ”€ core + โ”œโ”€โ”€ generators + โ”œโ”€โ”€ models + โ”œโ”€โ”€ parsers + โ”œโ”€โ”€ services + โ”œโ”€โ”€ test_agent.py + โ”œโ”€โ”€ test_exceptions.py + โ””โ”€โ”€ utils +``` + +--- + +## ๐Ÿงฉ Modules + +
    . + +| File | Summary | +| --- | --- | +| [Dockerfile](https://github.com/eli64s/readme-ai/blob/main/Dockerfile) | Builds a Docker image for the readmeai package, setting up a non-root user, installing dependencies, and configuring the environment. The image runs the readmeai CLI by default. | +| [Makefile](https://github.com/eli64s/readme-ai/blob/main/Makefile) | Manages repository cleanup, formatting, linting, and testing tasks.-Builds Conda package, generates requirements file, and searches for a word in the directory.-Executes various commands for maintaining code quality and project organization. | +| [pyproject.toml](https://github.com/eli64s/readme-ai/blob/main/pyproject.toml) | Generates README files using AI models. Key features include markdown generation, badge integration, and AI-powered content creation. Supports Python, markdown, and various AI libraries. | +| [noxfile.py](https://github.com/eli64s/readme-ai/blob/main/noxfile.py) | Executes tests across multiple Python versions by installing the package and running the test suite with coverage reports. The code ensures seamless testing workflow for the repositorys Python versions. | + +
    + +
    setup + +| File | Summary | +| --- | --- | +| [setup.sh](https://github.com/eli64s/readme-ai/blob/main/setup/setup.sh) | Facilitates environment setup for README-AI project. Checks for and installs dependencies like tree, Git, Conda, and Python 3.8+. Creates readmeai Conda environment, activates it, adds Python path to PATH, and installs required packages. | +| [requirements.txt](https://github.com/eli64s/readme-ai/blob/main/setup/requirements.txt) | Specifies dependencies for the project, ensuring compatibility with Python versions. Key libraries include aiohttp, pydantic, and google-ai-generativelanguage. Enhances functionality and performance through external packages. | +| [environment.yaml](https://github.com/eli64s/readme-ai/blob/main/setup/environment.yaml) | Defines project dependencies and environment settings for the readmeai package. Specifies Python version, required packages, and channels for package installation. | + +
    + +
    tests + +| File | Summary | +| --- | --- | +| [parsers](https://github.com/eli64s/readme-ai/blob/main/tests/parsers) | Validates data parsing functionality ensuring accurate extraction and transformation. Enhances data integrity and reliability within the repositorys architecture. | + +
    + +
    scripts + +| File | Summary | +| --- | --- | +| [run_batch.sh](https://github.com/eli64s/readme-ai/blob/main/scripts/run_batch.sh) | Generates dynamic markdown files for multiple repositories, customizing badges, alignment, and images. Executes commands based on repository index, incorporating various API options and styling choices. | +| [pypi.sh](https://github.com/eli64s/readme-ai/blob/main/scripts/pypi.sh) | Deploys the readmeai package to PyPI, ensuring a clean build and successful upload. Utilizes twine for secure distribution, enhancing the project's accessibility and visibility within the Python community. | +| [clean.sh](https://github.com/eli64s/readme-ai/blob/main/scripts/clean.sh) | Cleans build, test, coverage, and Python artifacts by removing various artifact directories and files. Provides commands for cleaning specific artifact types. | +| [docker.sh](https://github.com/eli64s/readme-ai/blob/main/scripts/docker.sh) | Builds and publishes a Docker image for the readme-ai project, supporting multiple platforms. Executes Docker Buildx commands to create, build, and push the image. | + +
    + +
    .github + +| File | Summary | +| --- | --- | +| [release-drafter.yml](https://github.com/eli64s/readme-ai/blob/main/.github/release-drafter.yml) | Defines release categories and templates based on conventional changelog standards. Categorizes changes into features, bug fixes, chores, deprecations, removals, security, documentation, and dependency updates. Resolves version increments and generates release notes. | + +
    + +
    .github.workflows + +| File | Summary | +| --- | --- | +| [coverage.yml](https://github.com/eli64s/readme-ai/blob/main/.github/workflows/coverage.yml) | Generates test coverage reports for the README AI project. Integrates with GitHub Actions to ensure code quality and maintain high test coverage levels. | +| [release-pipeline.yml](https://github.com/eli64s/readme-ai/blob/main/.github/workflows/release-pipeline.yml) | Automates release process, ensuring smooth deployment. Orchestrates versioning, changelog updates, and GitHub releases. Enhances project management and collaboration. | +| [release-drafter.yml](https://github.com/eli64s/readme-ai/blob/main/.github/workflows/release-drafter.yml) | Automates release notes generation based on pull requests, enhancing project transparency and communication. Integrates with GitHub Actions to streamline the release process and foster community engagement. | + +
    + +
    readmeai + +| File | Summary | +| --- | --- | +| [_agent.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/_agent.py) | Generates README.md file using AI models, handles API settings, and orchestrates file generation process. Clones repository, preprocesses files, requests AI model responses, and builds README.md with features. Handles image generation based on API availability. | +| [_exceptions.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/_exceptions.py) | CLIError, GitCloneError, GitValidationError, FileSystemError, FileReadError, FileWriteError, ReadmeGeneratorError, UnsupportedServiceError. Each exception handles specific errors related to CLI, Git operations, file system, readme generation, and service handling. | + +
    + +
    readmeai.parsers + +| File | Summary | +| --- | --- | +| [factory.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/factory.py) | Registers various file parsers for different programming languages and package managers. Provides a dictionary of callable parser methods for project file parsing. | + +
    + +
    readmeai.parsers.configuration + +| File | Summary | +| --- | --- | +| [ansible.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/configuration/ansible.py) | Extracts Ansible configuration details from playbook.yml and site.yml files. | +| [properties.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/configuration/properties.py) | Extracts configuration properties from.properties files using regex patterns for JDBC connection strings and other packages. | +| [apache.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/configuration/apache.py) | Parses Apache configuration files for the README AI repository, extracting key settings and directives. | +| [docker.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/configuration/docker.py) | Parses Docker configuration files to extract package names and services. Handles Dockerfile and docker-compose.yaml parsing errors gracefully. | +| [nginx.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/configuration/nginx.py) | Parses Nginx configuration files in the readme-ai repository, extracting key settings and directives. | + +
    + +
    readmeai.parsers.language + +| File | Summary | +| --- | --- | +| [cpp.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/language/cpp.py) | CMakeParser for CMakeLists.txt, ConfigureAcParser for configure.ac, and MakefileAmParser for Makefile.am. Each parser handles specific file types to identify dependencies, libs, and software. | +| [swift.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/language/swift.py) | Extracts Swift package names from Package.swift files by parsing dependencies. | +| [python.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/language/python.py) | Parses Python dependency files to extract package names without version specifiers.-Handles requirements.txt, TOML (Pipenv, Poetry, Flit), and YAML (environment.yml) formats.-Ensures robust error handling for parsing exceptions. | +| [go.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/language/go.py) | Extracts Go package dependencies from go.mod files using regex pattern matching. Inherits from BaseFileParser to parse content and handle parsing errors. Contributes to the repositorys parsers module for language-specific file parsing. | +| [rust.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/language/rust.py) | Extracts Rust package names from cargo.toml files using toml parsing library. | + +
    + +
    readmeai.parsers.cicd + +| File | Summary | +| --- | --- | +| [bitbucket.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/cicd/bitbucket.py) | Extracts Bitbucket Pipelines configuration details for CI/CD workflows. | +| [travis.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/cicd/travis.py) | Extracts CI/CD configurations from.travis.yml files. | +| [gitlab.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/cicd/gitlab.py) | Extracts GitLab CI configuration details from.gitlab-ci.yml files. | +| [jenkins.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/cicd/jenkins.py) | Extracts Jenkinsfile configurations for CI/CD pipelines. Identifies and parses Jenkinsfile settings for automation and deployment processes within the repositorys architecture. | +| [github.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/cicd/github.py) | Extracts GitHub Actions configurations for CI/CD pipelines from.github/workflows/ directory. | +| [circleci.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/cicd/circleci.py) | Parses CircleCI configuration files in the readme-ai repository. | + +
    + +
    readmeai.parsers.orchestration + +| File | Summary | +| --- | --- | +| [kubernetes.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/orchestration/kubernetes.py) | Parses Kubernetes configuration files for the README AI repository. | + +
    + +
    readmeai.parsers.infrastructure + +| File | Summary | +| --- | --- | +| [terraform.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/infrastructure/terraform.py) | Extracts Terraform configurations from main.tf files for parsing. | +| [cloudformation.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/infrastructure/cloudformation.py) | Extracts AWS CloudFormation configuration details from cloudformation.yaml files. | + +
    + +
    readmeai.parsers.package + +| File | Summary | +| --- | --- | +| [composer.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/package/composer.py) | Extracts PHP Composer configuration details from composer.json files. | +| [npm.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/package/npm.py) | Extracts dependencies from package.json and yarn.lock files for the parent repositorys architecture. Parses JSON dependency files and yarn.lock files to retrieve package names for different sections. | +| [gradle.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/package/gradle.py) | Parses Gradle dependency files to extract package names. Handles both build.gradle and build.gradle.kts formats, utilizing regex patterns for parsing. Implements error handling for parsing exceptions. | +| [nuget.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/package/nuget.py) | Parses NuGet.Config files for.NET configuration settings. | +| [yarn.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/package/yarn.py) | Extracts package names from a yarn.lock file using regex pattern matching. | +| [pip.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/package/pip.py) | Extracts and interprets Pip configuration files for the parent repositorys AI documentation tool. | +| [maven.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/package/maven.py) | Extracts Maven package names from pom.xml files, handling parsing errors. Parses groupId, artifactId, and version using regex. Appends spring if found in dependencies. Returns a set of unique dependencies. | +| [gem.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/parsers/package/gem.py) | Parses Gemfile.lock (Ruby) configuration files in the readme-ai repository. | + +
    + +
    readmeai.core + +| File | Summary | +| --- | --- | +| [models.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/core/models.py) | Orchestrates batch processing of prompts for Large Language Model API, handling dependencies and file contexts.-Generates text responses based on prompts, including code summaries for project files.-Manages HTTP client session lifecycle for API requests. | +| [preprocess.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/core/preprocess.py) | Generates FileContext instances for repository files, extracts metadata, and processes dependencies using a factory pattern. Returns a list of dependencies and raw file data. | +| [parsers.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/core/parsers.py) | Defines an abstract base class for dependency file parsers in the core module. Implements methods for parsing file content and handling parsing errors. Centralizes error logging for consistent exception handling. | +| [logger.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/core/logger.py) | Implements a custom logger with color and emoji support for the readme-ai package. Provides logging methods for different levels. | +| [utils.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/core/utils.py) | Defines utility methods for configuring LLM API environments. Enumerates keys for service environment variables. Sets service to offline mode if necessary. Retrieves and validates LLM environment variables based on specified service, handling offline mode and missing keys. | + +
    + +
    readmeai.config + +| File | Summary | +| --- | --- | +| [validators.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/config/validators.py) | Validates Git repository URLs and paths, extracting repository names and setting Git service hosts based on input. | +| [settings.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/config/settings.py) | Defines configuration settings for readme-ai package, including API, file paths, Git repository, Markdown templates, and model settings. Loads base and additional configuration files for CLI. | + +
    + +
    readmeai.config.settings + +| File | Summary | +| --- | --- | +| [prompts.toml](https://github.com/eli64s/readme-ai/blob/main/readmeai/config/settings/prompts.toml) | Summarize** the purpose and features of the `prompts.toml` file in the `readmeai` repository. Describe large language model prompt templates for generative tasks, focusing on architecture, code quality, documentation, integrations, modularity, testing, performance, security, dependencies, and scalability. | +| [parsers.toml](https://github.com/eli64s/readme-ai/blob/main/readmeai/config/settings/parsers.toml) | Defines project configuration files to parse CI/CD, configuration, infrastructure, monitoring, orchestration, package managers, properties, and language-specific files. | +| [blacklist.toml](https://github.com/eli64s/readme-ai/blob/main/readmeai/config/settings/blacklist.toml) | Filters out specified directories, file extensions, and file names from preprocessing in the repository. Helps maintain a clean codebase by excluding common unwanted files and folders during development and deployment processes. | +| [languages.toml](https://github.com/eli64s/readme-ai/blob/main/readmeai/config/settings/languages.toml) | Defines programming language extensions and their names for the project. Centralizes language configuration for consistency across the codebase. Facilitates language-specific operations and enhances code readability. | +| [config.toml](https://github.com/eli64s/readme-ai/blob/main/readmeai/config/settings/config.toml) | Defines default API, file resources, Git repo, language model, Markdown template settings, badges, TOC, project structure, modules, installation, usage, tests, roadmap, contributing, license, acknowledgments, and contact details for the parent repository. | +| [markdown.toml](https://github.com/eli64s/readme-ai/blob/main/readmeai/config/settings/markdown.toml) | Defines Markdown templates for README.md, including badges, quick links, project structure, and contributing guidelines. | +| [commands.toml](https://github.com/eli64s/readme-ai/blob/main/readmeai/config/settings/commands.toml) | Defines language-specific commands for installation, running, and testing in the project. Organized by programming language, it provides standardized instructions for developers to set up, execute, and test code across various languages. | + +
    + +
    readmeai.utils + +| File | Summary | +| --- | --- | +| [file_handler.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/utils/file_handler.py) | Handles file I/O operations for various file formats, including JSON, Markdown, TOML, TXT, and YAML. Provides methods to read and write content to files, with error handling. Implements a caching mechanism for efficient file reading. | +| [text_cleaner.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/utils/text_cleaner.py) | Cleans and formats LLM API responses by post-processing text. Removes unwanted characters, formats Markdown tables, and ensures proper capitalization. Enhances readability and structure of generated text. | +| [file_resources.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/utils/file_resources.py) | Retrieves absolute path to package resource file, prioritizing `importlib.resources` over `pkg_resources`. Handles resource access errors gracefully. | + +
    + +
    readmeai.models + +| File | Summary | +| --- | --- | +| [offline.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/models/offline.py) | Defines an OfflineHandler model for CLI operation without an LLM API service. Sets default values for offline mode and returns placeholder text instead of LLM API responses. | +| [gemini.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/models/gemini.py) | Implements Google Clouds Gemini API handler with retry logic for generating text responses. Handles API requests, processes responses, and logs output. Inherits from a base model handler and initializes API settings. | +| [tokens.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/models/tokens.py) | Handles tokenization and truncation of text based on specified settings. Counts tokens in a text string, truncates text to a maximum token count, and adjusts the maximum token count based on a specific prompt. Caches encoding for efficiency. | +| [dalle.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/models/dalle.py) | Generates and downloads images using OpenAIs DALL-E model. Initializes model settings, formats prompt string, and handles image generation and download. Handles errors and logs events. | +| [factory.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/models/factory.py) | Generates appropriate LLM handler based on CLI input using a model factory. Handles different LLM services like Offline, OpenAI, and Gemini. Ensures compatibility with CLI configurations. | +| [prompts.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/models/prompts.py) | Generates and formats prompts for LLM API requests based on provided context. Retrieves prompt templates and injects context into them. Async functions create additional and summary prompts for LLM API, incorporating various data points. | +| [openai.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/models/openai.py) | Implements OpenAI API LLM handler with Ollama support. Initializes model settings, builds payload for API requests, and processes responses. Handles retries for network errors. Logs responses and cleans generated text. | + +
    + +
    readmeai.cli + +| File | Summary | +| --- | --- | +| [options.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/cli/options.py) | Defines CLI options for badge icons, header images, and LLM API key selection. Enables user input for custom image URLs and badge styles. Facilitates setting alignment, language, model, and output file for README generation. | +| [main.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/cli/main.py) | Orchestrates CLI commands for readme-ai package.-Parses user inputs for AI model generation.-Integrates with readme_agent for generating READMEs.-Facilitates customization of output through various options. | + +
    + +
    readmeai.generators + +| File | Summary | +| --- | --- | +| [tree.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/generators/tree.py) | Generates directory tree structure for a code repository, enhancing visualization and organization. Builds a formatted tree with specified depth, improving repository navigation and understanding. | +| [builder.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/generators/builder.py) | Header, code summaries, directory tree, Getting Started, and Contributing. Builds the README file with badges, tables, tree structure, setup data, and contribution guidelines. Handles customization based on configuration settings. | +| [utils.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/generators/utils.py) | Removes default emojis from markdown content-Splits markdown by level 2 headings-Updates heading names by removing emojis, underscores, and spaces | +| [badges.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/generators/badges.py) | Generates and formats SVG badges for README using shields.io and skill icons. Builds metadata badges, HTML badges for project dependencies, and skill icons. Handles badge alignment and styles based on configuration settings. | +| [tables.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/generators/tables.py) | Generates markdown tables for code summaries, grouping them by sub-directory. Formats data into readable tables for README files, enhancing project documentation. | +| [quickstart.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/generators/quickstart.py) | Generates the Quickstart section for the README by dynamically determining top language, setup commands, and prerequisites based on code summaries and configuration settings. | + +
    + +
    readmeai.services + +| File | Summary | +| --- | --- | +| [git.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/services/git.py) | Implements Git operations for cloning and validating repositories. Enumerates Git service providers with API and file URL templates. Functions for cloning, removing hidden files, fetching API URLs, and finding Git executable paths. | +| [metadata.py](https://github.com/eli64s/readme-ai/blob/main/readmeai/services/metadata.py) | Retrieves GitHub repository metadata via the host providers API. Parses raw data into a structured dataclass containing repository details, statistics, URLs, programming languages, topics, and license information. Handles errors gracefully. | + +
    + +--- + +## ๐Ÿš€ Getting Started + +### ๐Ÿ”– Prerequisites + +**Python**: `version x.y.z` + +### ๐Ÿ“ฆ Installation + +Build the project from source: + +1. Clone the readme-ai repository: +```sh +โฏ git clone https://github.com/eli64s/readme-ai +``` + +2. Navigate to the project directory: +```sh +โฏ cd readme-ai +``` + +3. Install the required dependencies: +```sh +โฏ pip install -r requirements.txt +``` + +### ๐Ÿค– Usage + +To run the project, execute the following command: + +```sh +โฏ python main.py +``` + +### ๐Ÿงช Tests + +Execute the test suite using the following command: + +```sh +โฏ pytest +``` + +--- + +## ๐Ÿ“Œ Project Roadmap + +- [X] **`Task 1`**: Implement feature one. +- [ ] **`Task 2`**: Implement feature two. +- [ ] **`Task 3`**: Implement feature three. + +--- + +## ๐Ÿค Contributing + +Contributions are welcome! Here are several ways you can contribute: + +- **[Report Issues](https://github.com/eli64s/readme-ai/issues)**: Submit bugs found or log feature requests for the `readme-ai` project. +- **[Submit Pull Requests](https://github.com/eli64s/readme-ai/blob/main/CONTRIBUTING.md)**: Review open PRs, and submit your own PRs. +- **[Join the Discussions](https://github.com/eli64s/readme-ai/discussions)**: Share your insights, provide feedback, or ask questions. + +
    +Contributing Guidelines + +1. **Fork the Repository**: Start by forking the project repository to your github account. +2. **Clone Locally**: Clone the forked repository to your local machine using a git client. + ```sh + git clone https://github.com/eli64s/readme-ai + ``` +3. **Create a New Branch**: Always work on a new branch, giving it a descriptive name. + ```sh + git checkout -b new-feature-x + ``` +4. **Make Your Changes**: Develop and test your changes locally. +5. **Commit Your Changes**: Commit with a clear message describing your updates. + ```sh + git commit -m 'Implemented new feature x.' + ``` +6. **Push to github**: Push the changes to your forked repository. + ```sh + git push origin new-feature-x + ``` +7. **Submit a Pull Request**: Create a PR against the original project repository. Clearly describe the changes and their motivations. +8. **Review**: Once your PR is reviewed and approved, it will be merged into the main branch. Congratulations on your contribution! +
    + +
    +Contributor Graph +
    +

    + + + +

    +
    + +--- + +## ๐ŸŽ— License + +This project is protected under the [SELECT-A-LICENSE](https://choosealicense.com/licenses) License. For more details, refer to the [LICENSE](https://choosealicense.com/licenses/) file. + +--- + +## ๐Ÿ™Œ Acknowledgments + +- List any resources, contributors, inspiration, etc. here. + +--- diff --git a/examples/markdown/readme-sqlmesh.md b/examples/markdown/readme-sqlmesh.md new file mode 100644 index 00000000..214181c8 --- /dev/null +++ b/examples/markdown/readme-sqlmesh.md @@ -0,0 +1,242 @@ +

    + SQLMESH-TEST-TOOLS-logo +

    +

    +

    SQLMESH-TEST-TOOLS

    +

    +

    + Empower Your SQL, Automate with Confidence! +

    +

    + license + last-commit + repo-top-language + repo-language-count +

    +

    + Built with the tools and technologies: +

    +

    + DuckDB + Jupyter + Poetry + Python + Pytest +

    + +
    + +##### ๐Ÿ”— Table of Contents + +- [๐Ÿ“ Overview](#-overview) +- [๐Ÿ‘พ Features](#-features) +- [๐Ÿ“‚ Repository Structure](#-repository-structure) +- [๐Ÿงฉ Modules](#-modules) +- [๐Ÿš€ Getting Started](#-getting-started) + - [๐Ÿ”– Prerequisites](#-prerequisites) + - [๐Ÿ“ฆ Installation](#-installation) + - [๐Ÿค– Usage](#-usage) + - [๐Ÿงช Tests](#-tests) +- [๐Ÿ“Œ Project Roadmap](#-project-roadmap) +- [๐Ÿค Contributing](#-contributing) +- [๐ŸŽ— License](#-license) +- [๐Ÿ™Œ Acknowledgments](#-acknowledgments) + +--- + +## ๐Ÿ“ Overview + +The sqlmesh-test-tools project is designed to enhance the reliability and efficiency of SQL data models within the SQLMesh framework. It features tools for generating both synthetic datasets and YAML configuration files, which are crucial for automating SQL unit tests. By parsing and executing SQL queries, and then validating these against generated datasets, the project supports comprehensive testing workflows. This setup not only streamlines the development process but also ensures the accuracy and robustness of SQL queries and data models, making it an invaluable asset for developers working with complex data-driven applications. + +--- + +## ๐Ÿ‘พ Features + +| | Feature | Description | +|----|--------------------|---------------------------------------------------------------| +| โš™๏ธ | **Architecture** | Modular design with separate components for dataset generation, YAML configuration, and SQL testing. Utilizes Jupyter notebooks for configuration and data generation scripts. | +| ๐Ÿ”ฉ | **Code Quality** | Code is structured with clear separation of concerns. Uses Python best practices and adheres to PEP8 standards, facilitated by tools like `flake8` and `black`. | +| ๐Ÿ“„ | **Documentation** | Documentation is embedded within code and Jupyter notebooks, explaining functionalities and usage. Lacks a comprehensive README or external documentation. | +| ๐Ÿ”Œ | **Integrations** | Integrates with SQLMesh for SQL testing, DuckDB for database interactions, and Faker for data generation. | +| ๐Ÿงฉ | **Modularity** | High modularity with distinct components for data generation, test file creation, and SQL execution. Facilitates reuse and maintenance. | +| ๐Ÿงช | **Testing** | Uses `pytest` for testing. Specific tests for components are not detailed but likely integrated within the development workflow. | +| โšก๏ธ | **Performance** | Performance specifics not detailed, but use of DuckDB suggests efficient handling of SQL queries and datasets. | +| ๐Ÿ›ก๏ธ | **Security** | No explicit security measures detailed. Focus is on testing and data generation rather than secure deployment or data handling. | +| ๐Ÿ“ฆ | **Dependencies** | Key dependencies include `duckdb`, `pytest`, `Faker`, `typer`, `pyarrow`, `PyYAML`, `sqlglot`, and `black`. | +| ๐Ÿš€ | **Scalability** | Scalability is indirectly supported through the use of DuckDB and modular design, allowing for expansion in data size and complexity of SQL queries. | + +--- + +## ๐Ÿ“‚ Repository Structure + +```sh +โ””โ”€โ”€ sqlmesh-test-tools/ + โ”œโ”€โ”€ README.md + โ”œโ”€โ”€ data + โ”‚ โ””โ”€โ”€ seed_metric_loans.csv + โ”œโ”€โ”€ docs + โ”‚ โ”œโ”€โ”€ .gitkeep + โ”‚ โ””โ”€โ”€ images + โ”‚ โ””โ”€โ”€ project-logo.png + โ”œโ”€โ”€ notebooks + โ”‚ โ”œโ”€โ”€ faker_dataset_generator.ipynb + โ”‚ โ””โ”€โ”€ sqlmesh_yml_generator.ipynb + โ”œโ”€โ”€ poetry.lock + โ”œโ”€โ”€ pyproject.toml + โ”œโ”€โ”€ sql + โ”‚ โ””โ”€โ”€ test_metric_loans_model.sql + โ”œโ”€โ”€ src + โ”‚ โ”œโ”€โ”€ __init__.py + โ”‚ โ”œโ”€โ”€ data_generator.py + โ”‚ โ””โ”€โ”€ test_generator.py + โ””โ”€โ”€ tests + โ””โ”€โ”€ test_metric_loans_model.yaml +``` + +--- + +## ๐Ÿงฉ Modules + +
    . + +| File | Summary | +| --- | --- | +| [pyproject.toml](https://github.com/eli64s/sqlmesh-test-tools/blob/main/pyproject.toml) | Defines the configuration for the SQL Unit Test Generator project, specifying dependencies essential for generating and testing SQL queries within the SQLMesh framework. It sets up the project environment and tooling for development, ensuring compatibility and streamlined project setup. | + +
    + +
    notebooks + +| File | Summary | +| --- | --- | +| [sqlmesh_yml_generator.ipynb](https://github.com/eli64s/sqlmesh-test-tools/blob/main/notebooks/sqlmesh_yml_generator.ipynb) | Generates YAML configuration files for SQL unit tests by parsing SQL queries, executing them against a dataset, and outputting the results in a structured format to facilitate automated testing within the SQLMesh testing framework. This enhances the reliability of SQL data models by automating test validations. | +| [faker_dataset_generator.ipynb](https://github.com/eli64s/sqlmesh-test-tools/blob/main/notebooks/faker_dataset_generator.ipynb) | Generates synthetic datasets for testing SQL queries by leveraging the Faker library to produce realistic loan and restaurant service data, which are then saved as CSV files for integration into automated testing workflows within the repositorys SQL testing framework. | + +
    + +
    src + +| File | Summary | +| --- | --- | +| [test_generator.py](https://github.com/eli64s/sqlmesh-test-tools/blob/main/src/test_generator.py) | Generates YAML test files for SQL models by extracting and testing SQL queries and Common Table Expressions (CTEs) against provided datasets, facilitating automated testing and validation within the SQLMesh test tools ecosystem. Integrates with a CLI for streamlined operations. | +| [data_generator.py](https://github.com/eli64s/sqlmesh-test-tools/blob/main/src/data_generator.py) | Generates synthetic datasets for testing within the sqlmesh-test-tools repository, supporting the validation of SQL queries and data models by providing customizable and scalable data inputs, crucial for ensuring the robustness and accuracy of data operations across different testing scenarios. | + +
    + +
    sql + +| File | Summary | +| --- | --- | +| [test_metric_loans_model.sql](https://github.com/eli64s/sqlmesh-test-tools/blob/main/sql/test_metric_loans_model.sql) | Analyzes loan data by computing metrics such as average loan amount, age demographics of applicants, and loan frequency within the year, integrating these insights through a series of joined SQL queries to facilitate comprehensive data-driven decision-making within the repositorys testing framework. | + +
    + +--- + +## ๐Ÿš€ Getting Started + +### ๐Ÿ”– Prerequisites + +**JupyterNotebook**: `version x.y.z` + +### ๐Ÿ“ฆ Installation + +Build the project from source: + +1. Clone the sqlmesh-test-tools repository: +```sh +โฏ git clone https://github.com/eli64s/sqlmesh-test-tools +``` + +2. Navigate to the project directory: +```sh +โฏ cd sqlmesh-test-tools +``` + +3. Install the required dependencies: +```sh +โฏ pip install -r requirements.txt +``` + +### ๐Ÿค– Usage + +To run the project, execute the following command: + +```sh +โฏ jupyter nbconvert --execute notebook.ipynb +``` + +### ๐Ÿงช Tests + +Execute the test suite using the following command: + +```sh +โฏ pytest notebook_test.py +``` + +--- + +## ๐Ÿ“Œ Project Roadmap + +- [X] **`Task 1`**: Implement feature one. +- [ ] **`Task 2`**: Implement feature two. +- [ ] **`Task 3`**: Implement feature three. + +--- + +## ๐Ÿค Contributing + +Contributions are welcome! Here are several ways you can contribute: + +- **[Report Issues](https://github.com/eli64s/sqlmesh-test-tools/issues)**: Submit bugs found or log feature requests for the `sqlmesh-test-tools` project. +- **[Submit Pull Requests](https://github.com/eli64s/sqlmesh-test-tools/blob/main/CONTRIBUTING.md)**: Review open PRs, and submit your own PRs. +- **[Join the Discussions](https://github.com/eli64s/sqlmesh-test-tools/discussions)**: Share your insights, provide feedback, or ask questions. + +
    +Contributing Guidelines + +1. **Fork the Repository**: Start by forking the project repository to your github account. +2. **Clone Locally**: Clone the forked repository to your local machine using a git client. + ```sh + git clone https://github.com/eli64s/sqlmesh-test-tools + ``` +3. **Create a New Branch**: Always work on a new branch, giving it a descriptive name. + ```sh + git checkout -b new-feature-x + ``` +4. **Make Your Changes**: Develop and test your changes locally. +5. **Commit Your Changes**: Commit with a clear message describing your updates. + ```sh + git commit -m 'Implemented new feature x.' + ``` +6. **Push to github**: Push the changes to your forked repository. + ```sh + git push origin new-feature-x + ``` +7. **Submit a Pull Request**: Create a PR against the original project repository. Clearly describe the changes and their motivations. +8. **Review**: Once your PR is reviewed and approved, it will be merged into the main branch. Congratulations on your contribution! +
    + +
    +Contributor Graph +
    +

    + + + +

    +
    + +--- + +## ๐ŸŽ— License + +This project is protected under the [SELECT-A-LICENSE](https://choosealicense.com/licenses) License. For more details, refer to the [LICENSE](https://choosealicense.com/licenses/) file. + +--- + +## ๐Ÿ™Œ Acknowledgments + +- List any resources, contributors, inspiration, etc. here. + +--- diff --git a/examples/markdown/readme-streamlit-dalle.md b/examples/markdown/readme-streamlit-dalle.md new file mode 100644 index 00000000..1f22f144 --- /dev/null +++ b/examples/markdown/readme-streamlit-dalle.md @@ -0,0 +1,229 @@ +

    + README-AI-STREAMLIT-logo +

    +

    +

    README-AI-STREAMLIT

    +

    +

    + Empower READMEs with AI magic, effortlessly. +

    +

    + license + last-commit + repo-top-language + repo-language-count +

    +

    + Built with the tools and technologies: +

    +

    + GNU%20Bash + Streamlit + Poetry + Python +

    + +
    + +##### ๐Ÿ”— Table of Contents + +- [๐Ÿ“ Overview](#-overview) +- [๐Ÿ‘พ Features](#-features) +- [๐Ÿ“‚ Repository Structure](#-repository-structure) +- [๐Ÿงฉ Modules](#-modules) +- [๐Ÿš€ Getting Started](#-getting-started) + - [๐Ÿ”– Prerequisites](#-prerequisites) + - [๐Ÿ“ฆ Installation](#-installation) + - [๐Ÿค– Usage](#-usage) + - [๐Ÿงช Tests](#-tests) +- [๐Ÿ“Œ Project Roadmap](#-project-roadmap) +- [๐Ÿค Contributing](#-contributing) +- [๐ŸŽ— License](#-license) +- [๐Ÿ™Œ Acknowledgments](#-acknowledgments) + +--- + +## ๐Ÿ“ Overview + +The readme-ai-streamlit project automates README generation for Streamlit apps using AI. It streamlines the process by collecting user inputs, enhancing customization with badges and emojis, and providing a web app interface for generating README files. The project's core functionalities include cleaning artifacts, managing dependencies with Poetry, and facilitating efficient development workflows. By leveraging AI in a Streamlit web app, readme-ai-streamlit adds value by simplifying README creation, improving user experience, and enhancing project documentation. + +--- + +## ๐Ÿ‘พ Features + +| | Feature | Description | +|----|-------------------|---------------------------------------------------------------| +| โš™๏ธ | **Architecture** | The project follows a modular architecture with a CLI tool for README generation and a Streamlit web app for AI-based README generation. It uses Python for backend logic and Streamlit for the frontend. | +| ๐Ÿ”ฉ | **Code Quality** | The codebase maintains high code quality standards with consistent formatting and linting using tools like Ruff. It follows best practices for Python development and includes a Makefile for automation tasks. | +| ๐Ÿ“„ | **Documentation** | The project has detailed documentation in the form of comments in the codebase and a README file. It explains the project structure, setup instructions, and usage guidelines for developers. | +| ๐Ÿ”Œ | **Integrations** | Key integrations include README AI library for AI-based README generation, Streamlit for web app development, and Poetry for dependency management. External dependencies like Ruff are used for code formatting and linting. | +| ๐Ÿงฉ | **Modularity** | The codebase is modular and reusable, with separate modules for CLI functionality, utility functions, and the Streamlit web app. This design allows for easy maintenance and extension of the project. | +| ๐Ÿงช | **Testing** | Testing frameworks and tools are not explicitly mentioned in the repository contents. However, the project can benefit from incorporating testing frameworks like pytest for automated testing. | +| โšก๏ธ | **Performance** | The project's efficiency is enhanced by using AI for README generation and Streamlit for interactive web app development. Resource usage is optimized for generating README files based on user inputs. | +| ๐Ÿ›ก๏ธ | **Security** | Security measures for data protection and access control are not explicitly mentioned in the repository contents. Implementing secure coding practices and data encryption can enhance the project's security. | +| ๐Ÿ“ฆ | **Dependencies** | Key external libraries and dependencies include README AI, Streamlit, Poetry for dependency management, and Ruff for code formatting and linting. These dependencies streamline development and enhance project functionality. | +| ๐Ÿš€ | **Scalability** | The project's architecture and design support scalability for handling increased traffic and load. The use of Streamlit for web app development allows for easy scaling of the AI-based README generation functionality. | + +--- + +## ๐Ÿ“‚ Repository Structure + +```sh +โ””โ”€โ”€ readme-ai-streamlit/ + โ”œโ”€โ”€ LICENSE + โ”œโ”€โ”€ Makefile + โ”œโ”€โ”€ README.md + โ”œโ”€โ”€ poetry.lock + โ”œโ”€โ”€ pyproject.toml + โ”œโ”€โ”€ scripts + โ”‚ โ””โ”€โ”€ clean.sh + โ”œโ”€โ”€ src + โ”‚ โ”œโ”€โ”€ __init__.py + โ”‚ โ”œโ”€โ”€ app.py + โ”‚ โ”œโ”€โ”€ cli.py + โ”‚ โ””โ”€โ”€ utils.py + โ””โ”€โ”€ tests + โ”œโ”€โ”€ __init__.py + โ””โ”€โ”€ conftest.py +``` + +--- + +## ๐Ÿงฉ Modules + +
    . + +| File | Summary | +| --- | --- | +| [Makefile](https://github.com/eli64s/readme-ai-streamlit/blob/main/Makefile) | Orchestrates code formatting, linting, testing, and application execution.-Facilitates repository cleanup, Conda package building, and word search functionality.-Enhances development workflow efficiency and maintenance. | +| [pyproject.toml](https://github.com/eli64s/readme-ai-streamlit/blob/main/pyproject.toml) | Generates READMEs automatically on Streamlit using the README AI library. Manages dependencies and project metadata with Poetry. Maintains code formatting and linting standards with Ruff. Supports development with additional dependencies like Ruff. | + +
    + +
    scripts + +| File | Summary | +| --- | --- | +| [clean.sh](https://github.com/eli64s/readme-ai-streamlit/blob/main/scripts/clean.sh) | Cleans build, test, and Python artifacts. Removes files and directories related to builds, tests, coverage, backups, and caches. Provides commands for specific cleaning tasks. | + +
    + +
    src + +| File | Summary | +| --- | --- | +| [cli.py](https://github.com/eli64s/readme-ai-streamlit/blob/main/src/cli.py) | Collects user inputs for configuring a README generation web app.-Builds a command to execute a CLI tool for generating README files based on user settings.-Enhances README customization with badges, emojis, and project logo options. | +| [utils.py](https://github.com/eli64s/readme-ai-streamlit/blob/main/src/utils.py) | Provides utility functions for the Streamlit app, enhancing functionality and improving user experience. | +| [app.py](https://github.com/eli64s/readme-ai-streamlit/blob/main/src/app.py) | Generates README files using AI in a Streamlit web app. Initializes session state, executes commands, and displays output. Handles README generation settings, previews, downloads, and markdown copying. Logs errors if generation fails. | + +
    + +--- + +## ๐Ÿš€ Getting Started + +### ๐Ÿ”– Prerequisites + +**Python**: `version x.y.z` + +### ๐Ÿ“ฆ Installation + +Build the project from source: + +1. Clone the readme-ai-streamlit repository: +```sh +โฏ git clone https://github.com/eli64s/readme-ai-streamlit +``` + +2. Navigate to the project directory: +```sh +โฏ cd readme-ai-streamlit +``` + +3. Install the required dependencies: +```sh +โฏ pip install -r requirements.txt +``` + +### ๐Ÿค– Usage + +To run the project, execute the following command: + +```sh +โฏ python main.py +``` + +### ๐Ÿงช Tests + +Execute the test suite using the following command: + +```sh +โฏ pytest +``` + +--- + +## ๐Ÿ“Œ Project Roadmap + +- [X] **`Task 1`**: Implement feature one. +- [ ] **`Task 2`**: Implement feature two. +- [ ] **`Task 3`**: Implement feature three. + +--- + +## ๐Ÿค Contributing + +Contributions are welcome! Here are several ways you can contribute: + +- **[Report Issues](https://github.com/eli64s/readme-ai-streamlit/issues)**: Submit bugs found or log feature requests for the `readme-ai-streamlit` project. +- **[Submit Pull Requests](https://github.com/eli64s/readme-ai-streamlit/blob/main/CONTRIBUTING.md)**: Review open PRs, and submit your own PRs. +- **[Join the Discussions](https://github.com/eli64s/readme-ai-streamlit/discussions)**: Share your insights, provide feedback, or ask questions. + +
    +Contributing Guidelines + +1. **Fork the Repository**: Start by forking the project repository to your github account. +2. **Clone Locally**: Clone the forked repository to your local machine using a git client. + ```sh + git clone https://github.com/eli64s/readme-ai-streamlit + ``` +3. **Create a New Branch**: Always work on a new branch, giving it a descriptive name. + ```sh + git checkout -b new-feature-x + ``` +4. **Make Your Changes**: Develop and test your changes locally. +5. **Commit Your Changes**: Commit with a clear message describing your updates. + ```sh + git commit -m 'Implemented new feature x.' + ``` +6. **Push to github**: Push the changes to your forked repository. + ```sh + git push origin new-feature-x + ``` +7. **Submit a Pull Request**: Create a PR against the original project repository. Clearly describe the changes and their motivations. +8. **Review**: Once your PR is reviewed and approved, it will be merged into the main branch. Congratulations on your contribution! +
    + +
    +Contributor Graph +
    +

    + + + +

    +
    + +--- + +## ๐ŸŽ— License + +This project is protected under the [SELECT-A-LICENSE](https://choosealicense.com/licenses) License. For more details, refer to the [LICENSE](https://choosealicense.com/licenses/) file. + +--- + +## ๐Ÿ™Œ Acknowledgments + +- List any resources, contributors, inspiration, etc. here. + +--- diff --git a/examples/markdown/readme-streamlit-dalle.png b/examples/markdown/readme-streamlit-dalle.png new file mode 100644 index 00000000..e41ae4d6 Binary files /dev/null and b/examples/markdown/readme-streamlit-dalle.png differ diff --git a/examples/markdown/readme-streamlit-minimal.md b/examples/markdown/readme-streamlit-minimal.md new file mode 100644 index 00000000..8a1c28f5 --- /dev/null +++ b/examples/markdown/readme-streamlit-minimal.md @@ -0,0 +1,210 @@ +[]() + +##    README-AI-STREAMLIT + +     *Code clarity, README magic, effortlessly unleashed.* + +

       + license + last-commit + repo-top-language + repo-language-count +

    + +
    + +##### Quick Links + +- [ Overview](#-overview) +- [ Features](#-features) +- [ Repository Structure](#-repository-structure) +- [ Modules](#-modules) +- [ Getting Started](#-getting-started) + - [ Prerequisites](#-prerequisites) + - [ Installation](#-installation) + - [ Usage](#-usage) + - [ Tests](#-tests) +- [ Project Roadmap](#-project-roadmap) +- [ Contributing](#-contributing) +- [ License](#-license) +- [ Acknowledgments](#-acknowledgments) + +--- + +## Overview + +The readme-ai-streamlit project is a Streamlit-based tool that simplifies the generation of README files for software projects. It leverages user inputs to configure a Streamlit web app, integrating with GitHub, OpenAI, and Docker Hub for seamless functionality. Key files like Makefile and pyproject.toml manage project organization and dependencies, while scripts like clean.sh ensure repository cleanliness. The app.py file orchestrates README generation, displaying output, previews, and enabling file download. This project streamlines README creation, enhancing project documentation and developer productivity. + +--- + +## Features + +| | Feature | Description | +|----|-------------------|---------------------------------------------------------------| +| โš™๏ธ | **Architecture** | The project follows a modular architecture with clear separation of concerns. It leverages Streamlit for the web app interface and integrates with GitHub, OpenAI, and Docker Hub for additional functionality. | +| ๐Ÿ”ฉ | **Code Quality** | The codebase maintains high quality with a Makefile for code formatting, linting, testing, and application running. It enforces linting rules through pyproject.toml and ensures project organization and cleanliness. | +| ๐Ÿ“„ | **Documentation** | The project provides detailed documentation in the form of inline comments and docstrings. It explains the purpose and functionality of key modules and scripts, aiding in understanding and contributing to the project. | +| ๐Ÿ”Œ | **Integrations** | Key integrations include Streamlit for the web app, GitHub for version control, OpenAI for AI capabilities, and Docker Hub for containerization. These integrations enhance the project's functionality and usability. | +| ๐Ÿงฉ | **Modularity** | The codebase exhibits high modularity with separate modules for CLI, utility functions, and app orchestration. This modularity promotes code reusability and maintainability. | +| ๐Ÿงช | **Testing** | The project uses testing frameworks and tools for ensuring code quality and functionality. Specific testing details are not provided in the repository contents. | +| โšก๏ธ | **Performance** | The project demonstrates efficient performance in generating README files through the Streamlit web app. Resource usage is optimized for smooth user experience. | +| ๐Ÿ›ก๏ธ | **Security** | Security measures for data protection and access control are not explicitly mentioned in the repository contents. Additional details on security practices would enhance the project's robustness. | +| ๐Ÿ“ฆ | **Dependencies** | Key external libraries and dependencies include Streamlit, toml, and other packages for web app development and functionality. These dependencies contribute to the project's feature set. | +| ๐Ÿš€ | **Scalability** | The project shows potential for scalability with its modular architecture and integration capabilities. It can handle increased traffic and load by leveraging Streamlit's scalability features. | + +--- + +## Repository Structure + +```sh +โ””โ”€โ”€ readme-ai-streamlit/ + โ”œโ”€โ”€ LICENSE + โ”œโ”€โ”€ Makefile + โ”œโ”€โ”€ README.md + โ”œโ”€โ”€ poetry.lock + โ”œโ”€โ”€ pyproject.toml + โ”œโ”€โ”€ scripts + โ”œโ”€โ”€ src + โ””โ”€โ”€ tests +``` + +--- + +## Modules + +
    . + +| File | Summary | +| --- | --- | +| [Makefile](https://github.com/eli64s/readme-ai-streamlit/blob/main/Makefile) | Manages code formatting, linting, testing, and application running. Builds Conda packages, generates requirements, and searches for words in the repository. Facilitates cleanup and fixes untracked files. Key for maintaining code quality and project organization in the repository. | +| [pyproject.toml](https://github.com/eli64s/readme-ai-streamlit/blob/main/pyproject.toml) | Defines project metadata, dependencies, and linting rules for a Streamlit README generator. Specifies Python version, required packages, and development tools. Facilitates seamless project management and code quality maintenance. | + +
    + +
    scripts + +| File | Summary | +| --- | --- | +| [clean.sh](https://github.com/eli64s/readme-ai-streamlit/blob/main/scripts/clean.sh) | Cleans build, test, coverage, and Python artifacts by removing various file artifacts and directories. Provides commands to clean specific artifact types. Maintains the repositorys cleanliness and ensures removal of unnecessary files and directories. | + +
    + +
    src + +| File | Summary | +| --- | --- | +| [cli.py](https://github.com/eli64s/readme-ai-streamlit/blob/main/src/cli.py) | Collects user inputs for configuring a Streamlit web app.-Builds a command to execute the readme-ai CLI for generating README files.-Integrates with GitHub, OpenAI, and Docker Hub for seamless functionality. | +| [utils.py](https://github.com/eli64s/readme-ai-streamlit/blob/main/src/utils.py) | Enhance Streamlit app with utility functions for improved functionality. | +| [app.py](https://github.com/eli64s/readme-ai-streamlit/blob/main/src/app.py) | Orchestrates README generation in a Streamlit web app.-Initializes session state and executes CLI commands.-Displays output, previews, and enables file download.-Handles errors gracefully. | + +
    + +--- + +## Getting Started + +### Prerequisites + +**Python**: `version x.y.z` + +### Installation + +Build the project from source: + +1. Clone the readme-ai-streamlit repository: +```sh +โฏ git clone https://github.com/eli64s/readme-ai-streamlit +``` + +2. Navigate to the project directory: +```sh +โฏ cd readme-ai-streamlit +``` + +3. Install the required dependencies: +```sh +โฏ pip install -r requirements.txt +``` + +### Usage + +To run the project, execute the following command: + +```sh +โฏ python main.py +``` + +### Tests + +Execute the test suite using the following command: + +```sh +โฏ pytest +``` + +--- + +## Project Roadmap + +- [X] **`Task 1`**: Implement feature one. +- [ ] **`Task 2`**: Implement feature two. +- [ ] **`Task 3`**: Implement feature three. + +--- + +## Contributing + +Contributions are welcome! Here are several ways you can contribute: + +- **[Report Issues](https://github.com/eli64s/readme-ai-streamlit/issues)**: Submit bugs found or log feature requests for the `readme-ai-streamlit` project. +- **[Submit Pull Requests](https://github.com/eli64s/readme-ai-streamlit/blob/main/CONTRIBUTING.md)**: Review open PRs, and submit your own PRs. +- **[Join the Discussions](https://github.com/eli64s/readme-ai-streamlit/discussions)**: Share your insights, provide feedback, or ask questions. + +
    +Contributing Guidelines + +1. **Fork the Repository**: Start by forking the project repository to your github account. +2. **Clone Locally**: Clone the forked repository to your local machine using a git client. + ```sh + git clone https://github.com/eli64s/readme-ai-streamlit + ``` +3. **Create a New Branch**: Always work on a new branch, giving it a descriptive name. + ```sh + git checkout -b new-feature-x + ``` +4. **Make Your Changes**: Develop and test your changes locally. +5. **Commit Your Changes**: Commit with a clear message describing your updates. + ```sh + git commit -m 'Implemented new feature x.' + ``` +6. **Push to github**: Push the changes to your forked repository. + ```sh + git push origin new-feature-x + ``` +7. **Submit a Pull Request**: Create a PR against the original project repository. Clearly describe the changes and their motivations. +8. **Review**: Once your PR is reviewed and approved, it will be merged into the main branch. Congratulations on your contribution! +
    + +
    +Contributor Graph +
    +

    + + + +

    +
    + +--- + +## License + +This project is protected under the [SELECT-A-LICENSE](https://choosealicense.com/licenses) License. For more details, refer to the [LICENSE](https://choosealicense.com/licenses/) file. + +--- + +## Acknowledgments + +- List any resources, contributors, inspiration, etc. here. + +--- diff --git a/noxfile.py b/noxfile.py index 7d9571ae..86e215b3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,5 +1,5 @@ """ -Nox file for running tests across multiple Python versions. +Nox configuration for running tests against multiple Python versions. """ import nox @@ -24,16 +24,27 @@ def install(session, groups, root=True): @nox.session(python=["3.9", "3.10", "3.11", "3.12"]) def tests(session): - """Run the test suite across Python versions""" + """Run test suite against Python versions 3.9, 3.10, 3.11, and 3.12.""" session.install(".") - session.install(".[test]") + session.install( + "pytest", + "pytest-asyncio", + "pytest-cov", + "pytest-randomly", + "pytest-sugar", + "pytest-xdist", + ) session.run( + "poetry", + "run", "pytest", - "-vv", - "-n auto", - "--asyncio-mode=auto", - "--cov=./", + "--cov=readmeai", "--cov-branch", "--cov-report=xml", "--cov-report=term-missing", + "--cov-fail-under=80", + "--asyncio-mode=auto", + "--numprocesses=auto", + "--durations=10", + external=True, ) diff --git a/poetry.lock b/poetry.lock index 35fd6409..bf820056 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,91 +1,118 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "aiohappyeyeballs" +version = "2.4.0" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd"}, + {file = "aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2"}, +] [[package]] name = "aiohttp" -version = "3.9.4" +version = "3.10.5" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, - {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, - {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, - {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, - {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, - {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, - {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, - {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, - {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, - {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, - {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, - {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, - {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, - {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, + {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:18a01eba2574fb9edd5f6e5fb25f66e6ce061da5dab5db75e13fe1558142e0a3"}, + {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94fac7c6e77ccb1ca91e9eb4cb0ac0270b9fb9b289738654120ba8cebb1189c6"}, + {file = "aiohttp-3.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f1f1c75c395991ce9c94d3e4aa96e5c59c8356a15b1c9231e783865e2772699"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7acae3cf1a2a2361ec4c8e787eaaa86a94171d2417aae53c0cca6ca3118ff6"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94c4381ffba9cc508b37d2e536b418d5ea9cfdc2848b9a7fea6aebad4ec6aac1"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c31ad0c0c507894e3eaa843415841995bf8de4d6b2d24c6e33099f4bc9fc0d4f"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0912b8a8fadeb32ff67a3ed44249448c20148397c1ed905d5dac185b4ca547bb"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d93400c18596b7dc4794d48a63fb361b01a0d8eb39f28800dc900c8fbdaca91"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d00f3c5e0d764a5c9aa5a62d99728c56d455310bcc288a79cab10157b3af426f"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d742c36ed44f2798c8d3f4bc511f479b9ceef2b93f348671184139e7d708042c"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:814375093edae5f1cb31e3407997cf3eacefb9010f96df10d64829362ae2df69"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8224f98be68a84b19f48e0bdc14224b5a71339aff3a27df69989fa47d01296f3"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9a487ef090aea982d748b1b0d74fe7c3950b109df967630a20584f9a99c0683"}, + {file = "aiohttp-3.10.5-cp310-cp310-win32.whl", hash = "sha256:d9ef084e3dc690ad50137cc05831c52b6ca428096e6deb3c43e95827f531d5ef"}, + {file = "aiohttp-3.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:66bf9234e08fe561dccd62083bf67400bdbf1c67ba9efdc3dac03650e97c6088"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c6a4e5e40156d72a40241a25cc226051c0a8d816610097a8e8f517aeacd59a2"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c634a3207a5445be65536d38c13791904fda0748b9eabf908d3fe86a52941cf"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4aff049b5e629ef9b3e9e617fa6e2dfeda1bf87e01bcfecaf3949af9e210105e"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1942244f00baaacaa8155eca94dbd9e8cc7017deb69b75ef67c78e89fdad3c77"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04a1f2a65ad2f93aa20f9ff9f1b672bf912413e5547f60749fa2ef8a644e061"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f2bfc0032a00405d4af2ba27f3c429e851d04fad1e5ceee4080a1c570476697"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:424ae21498790e12eb759040bbb504e5e280cab64693d14775c54269fd1d2bb7"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:975218eee0e6d24eb336d0328c768ebc5d617609affaca5dbbd6dd1984f16ed0"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4120d7fefa1e2d8fb6f650b11489710091788de554e2b6f8347c7a20ceb003f5"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b90078989ef3fc45cf9221d3859acd1108af7560c52397ff4ace8ad7052a132e"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ba5a8b74c2a8af7d862399cdedce1533642fa727def0b8c3e3e02fcb52dca1b1"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:02594361128f780eecc2a29939d9dfc870e17b45178a867bf61a11b2a4367277"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8fb4fc029e135859f533025bc82047334e24b0d489e75513144f25408ecaf058"}, + {file = "aiohttp-3.10.5-cp311-cp311-win32.whl", hash = "sha256:e1ca1ef5ba129718a8fc827b0867f6aa4e893c56eb00003b7367f8a733a9b072"}, + {file = "aiohttp-3.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:349ef8a73a7c5665cca65c88ab24abe75447e28aa3bc4c93ea5093474dfdf0ff"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:305be5ff2081fa1d283a76113b8df7a14c10d75602a38d9f012935df20731487"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3a1c32a19ee6bbde02f1cb189e13a71b321256cc1d431196a9f824050b160d5a"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:61645818edd40cc6f455b851277a21bf420ce347baa0b86eaa41d51ef58ba23d"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c225286f2b13bab5987425558baa5cbdb2bc925b2998038fa028245ef421e75"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ba01ebc6175e1e6b7275c907a3a36be48a2d487549b656aa90c8a910d9f3178"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8eaf44ccbc4e35762683078b72bf293f476561d8b68ec8a64f98cf32811c323e"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c43eb1ab7cbf411b8e387dc169acb31f0ca0d8c09ba63f9eac67829585b44f"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de7a5299827253023c55ea549444e058c0eb496931fa05d693b95140a947cb73"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4790f0e15f00058f7599dab2b206d3049d7ac464dc2e5eae0e93fa18aee9e7bf"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:44b324a6b8376a23e6ba25d368726ee3bc281e6ab306db80b5819999c737d820"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0d277cfb304118079e7044aad0b76685d30ecb86f83a0711fc5fb257ffe832ca"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:54d9ddea424cd19d3ff6128601a4a4d23d54a421f9b4c0fff740505813739a91"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4f1c9866ccf48a6df2b06823e6ae80573529f2af3a0992ec4fe75b1a510df8a6"}, + {file = "aiohttp-3.10.5-cp312-cp312-win32.whl", hash = "sha256:dc4826823121783dccc0871e3f405417ac116055bf184ac04c36f98b75aacd12"}, + {file = "aiohttp-3.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:22c0a23a3b3138a6bf76fc553789cb1a703836da86b0f306b6f0dc1617398abc"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7f6b639c36734eaa80a6c152a238242bedcee9b953f23bb887e9102976343092"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f29930bc2921cef955ba39a3ff87d2c4398a0394ae217f41cb02d5c26c8b1b77"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f489a2c9e6455d87eabf907ac0b7d230a9786be43fbe884ad184ddf9e9c1e385"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:123dd5b16b75b2962d0fff566effb7a065e33cd4538c1692fb31c3bda2bfb972"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b98e698dc34966e5976e10bbca6d26d6724e6bdea853c7c10162a3235aba6e16"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3b9162bab7e42f21243effc822652dc5bb5e8ff42a4eb62fe7782bcbcdfacf6"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1923a5c44061bffd5eebeef58cecf68096e35003907d8201a4d0d6f6e387ccaa"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55f011da0a843c3d3df2c2cf4e537b8070a419f891c930245f05d329c4b0689"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:afe16a84498441d05e9189a15900640a2d2b5e76cf4efe8cbb088ab4f112ee57"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8112fb501b1e0567a1251a2fd0747baae60a4ab325a871e975b7bb67e59221f"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e72589da4c90337837fdfe2026ae1952c0f4a6e793adbbfbdd40efed7c63599"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4d46c7b4173415d8e583045fbc4daa48b40e31b19ce595b8d92cf639396c15d5"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33e6bc4bab477c772a541f76cd91e11ccb6d2efa2b8d7d7883591dfb523e5987"}, + {file = "aiohttp-3.10.5-cp313-cp313-win32.whl", hash = "sha256:c58c6837a2c2a7cf3133983e64173aec11f9c2cd8e87ec2fdc16ce727bcf1a04"}, + {file = "aiohttp-3.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:38172a70005252b6893088c0f5e8a47d173df7cc2b2bd88650957eb84fcf5022"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f6f18898ace4bcd2d41a122916475344a87f1dfdec626ecde9ee802a711bc569"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5ede29d91a40ba22ac1b922ef510aab871652f6c88ef60b9dcdf773c6d32ad7a"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:673f988370f5954df96cc31fd99c7312a3af0a97f09e407399f61583f30da9bc"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58718e181c56a3c02d25b09d4115eb02aafe1a732ce5714ab70326d9776457c3"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b38b1570242fbab8d86a84128fb5b5234a2f70c2e32f3070143a6d94bc854cf"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:074d1bff0163e107e97bd48cad9f928fa5a3eb4b9d33366137ffce08a63e37fe"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd31f176429cecbc1ba499d4aba31aaccfea488f418d60376b911269d3b883c5"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7384d0b87d4635ec38db9263e6a3f1eb609e2e06087f0aa7f63b76833737b471"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8989f46f3d7ef79585e98fa991e6ded55d2f48ae56d2c9fa5e491a6e4effb589"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c83f7a107abb89a227d6c454c613e7606c12a42b9a4ca9c5d7dad25d47c776ae"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cde98f323d6bf161041e7627a5fd763f9fd829bcfcd089804a5fdce7bb6e1b7d"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:676f94c5480d8eefd97c0c7e3953315e4d8c2b71f3b49539beb2aa676c58272f"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2d21ac12dc943c68135ff858c3a989f2194a709e6e10b4c8977d7fcd67dfd511"}, + {file = "aiohttp-3.10.5-cp38-cp38-win32.whl", hash = "sha256:17e997105bd1a260850272bfb50e2a328e029c941c2708170d9d978d5a30ad9a"}, + {file = "aiohttp-3.10.5-cp38-cp38-win_amd64.whl", hash = "sha256:1c19de68896747a2aa6257ae4cf6ef59d73917a36a35ee9d0a6f48cff0f94db8"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7e2fe37ac654032db1f3499fe56e77190282534810e2a8e833141a021faaab0e"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5bf3ead3cb66ab990ee2561373b009db5bc0e857549b6c9ba84b20bc462e172"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b2c16a919d936ca87a3c5f0e43af12a89a3ce7ccbce59a2d6784caba945b68b"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad146dae5977c4dd435eb31373b3fe9b0b1bf26858c6fc452bf6af394067e10b"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c5c6fa16412b35999320f5c9690c0f554392dc222c04e559217e0f9ae244b92"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95c4dc6f61d610bc0ee1edc6f29d993f10febfe5b76bb470b486d90bbece6b22"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da452c2c322e9ce0cfef392e469a26d63d42860f829026a63374fde6b5c5876f"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:898715cf566ec2869d5cb4d5fb4be408964704c46c96b4be267442d265390f32"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:391cc3a9c1527e424c6865e087897e766a917f15dddb360174a70467572ac6ce"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:380f926b51b92d02a34119d072f178d80bbda334d1a7e10fa22d467a66e494db"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce91db90dbf37bb6fa0997f26574107e1b9d5ff939315247b7e615baa8ec313b"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9093a81e18c45227eebe4c16124ebf3e0d893830c6aca7cc310bfca8fe59d857"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ee40b40aa753d844162dcc80d0fe256b87cba48ca0054f64e68000453caead11"}, + {file = "aiohttp-3.10.5-cp39-cp39-win32.whl", hash = "sha256:03f2645adbe17f274444953bdea69f8327e9d278d961d85657cb0d06864814c1"}, + {file = "aiohttp-3.10.5-cp39-cp39-win_amd64.whl", hash = "sha256:d17920f18e6ee090bdd3d0bfffd769d9f2cb4c8ffde3eb203777a3895c128862"}, + {file = "aiohttp-3.10.5.tar.gz", hash = "sha256:f071854b47d39591ce9a17981c46790acb30518e2f83dfca8db2dfa091178691"}, ] [package.dependencies] +aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" @@ -94,7 +121,7 @@ multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] +speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] [[package]] name = "aiosignal" @@ -110,15 +137,51 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anthropic" +version = "0.34.1" +description = "The official Python library for the anthropic API" +optional = false +python-versions = ">=3.7" +files = [ + {file = "anthropic-0.34.1-py3-none-any.whl", hash = "sha256:2fa26710809d0960d970f26cd0be3686437250a481edb95c33d837aa5fa24158"}, + {file = "anthropic-0.34.1.tar.gz", hash = "sha256:69e822bd7a31ec11c2edb85f2147e8f0ee0cfd3288fea70b0ca8808b2f9bf91d"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tokenizers = ">=0.13.0" +typing-extensions = ">=4.7,<5" + +[package.extras] +bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"] +vertex = ["google-auth (>=2,<3)"] + [[package]] name = "anyio" -version = "4.3.0" +version = "4.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, ] [package.dependencies] @@ -145,32 +208,32 @@ files = [ [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "babel" -version = "2.14.0" +version = "2.16.0" description = "Internationalization utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, - {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [package.extras] @@ -178,13 +241,13 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "cachetools" -version = "5.3.3" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] @@ -198,6 +261,17 @@ files = [ {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + [[package]] name = "charset-normalizer" version = "3.3.2" @@ -324,63 +398,83 @@ files = [ [[package]] name = "coverage" -version = "7.4.3" +version = "7.6.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, - {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, - {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, - {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, - {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, - {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, - {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, - {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, - {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, - {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, - {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, - {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, - {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, - {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.dependencies] @@ -389,6 +483,17 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + [[package]] name = "distro" version = "1.9.0" @@ -402,13 +507,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -416,18 +521,34 @@ test = ["pytest (>=6)"] [[package]] name = "execnet" -version = "2.0.2" +version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, - {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, + {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, + {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, ] [package.extras] testing = ["hatch", "pre-commit", "pytest", "tox"] +[[package]] +name = "filelock" +version = "3.15.4" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] +typing = ["typing-extensions (>=4.8)"] + [[package]] name = "frozenlist" version = "1.4.1" @@ -514,6 +635,45 @@ files = [ {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, ] +[[package]] +name = "fsspec" +version = "2024.6.1" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, + {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +dev = ["pre-commit", "ruff"] +doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] +test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] +test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] +tqdm = ["tqdm"] + [[package]] name = "ghp-import" version = "2.1.0" @@ -547,60 +707,63 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.42" +version = "3.1.43" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.42-py3-none-any.whl", hash = "sha256:1bf9cd7c9e7255f77778ea54359e54ac22a72a5b51288c457c881057b7bb9ecd"}, - {file = "GitPython-3.1.42.tar.gz", hash = "sha256:2d99869e0fef71a73cbd242528105af1d6c1b108c60dfabd994bf292f76c3ceb"}, + {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, + {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" [package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar"] +doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] +test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] [[package]] name = "google-ai-generativelanguage" -version = "0.4.0" +version = "0.6.6" description = "Google Ai Generativelanguage API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-ai-generativelanguage-0.4.0.tar.gz", hash = "sha256:c8199066c08f74c4e91290778329bb9f357ba1ea5d6f82de2bc0d10552bf4f8c"}, - {file = "google_ai_generativelanguage-0.4.0-py3-none-any.whl", hash = "sha256:e4c425376c1ee26c78acbc49a24f735f90ebfa81bf1a06495fae509a2433232c"}, + {file = "google-ai-generativelanguage-0.6.6.tar.gz", hash = "sha256:1739f035caeeeca5c28f887405eec8690f3372daf79fecf26454a97a4f1733a8"}, + {file = "google_ai_generativelanguage-0.6.6-py3-none-any.whl", hash = "sha256:59297737931f073d55ce1268dcc6d95111ee62850349d2b6cde942b16a4fca5c"}, ] [package.dependencies] -google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" proto-plus = ">=1.22.3,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" [[package]] name = "google-api-core" -version = "2.17.1" +version = "2.19.1" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.17.1.tar.gz", hash = "sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95"}, - {file = "google_api_core-2.17.1-py3-none-any.whl", hash = "sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e"}, + {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"}, + {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"}, ] [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" grpcio = [ + {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ + {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""}, {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" [package.extras] @@ -608,15 +771,33 @@ grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +[[package]] +name = "google-api-python-client" +version = "2.142.0" +description = "Google API Client Library for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_api_python_client-2.142.0-py2.py3-none-any.whl", hash = "sha256:266799082bb8301f423ec204dffbffb470b502abbf29efd1f83e644d36eb5a8f"}, + {file = "google_api_python_client-2.142.0.tar.gz", hash = "sha256:a1101ac9e24356557ca22f07ff48b7f61fa5d4b4e7feeef3bda16e5dcb86350e"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" +google-auth = ">=1.32.0,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0.dev0" +google-auth-httplib2 = ">=0.2.0,<1.0.0" +httplib2 = ">=0.19.0,<1.dev0" +uritemplate = ">=3.0.1,<5" + [[package]] name = "google-auth" -version = "2.28.2" +version = "2.34.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.28.2.tar.gz", hash = "sha256:80b8b4969aa9ed5938c7828308f20f035bc79f9d8fb8120bf9dc8db20b41ba30"}, - {file = "google_auth-2.28.2-py2.py3-none-any.whl", hash = "sha256:9fd67bbcd40f16d9d42f950228e9cf02a2ded4ae49198b27432d0cded5a74c38"}, + {file = "google_auth-2.34.0-py2.py3-none-any.whl", hash = "sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65"}, + {file = "google_auth-2.34.0.tar.gz", hash = "sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc"}, ] [package.dependencies] @@ -626,24 +807,40 @@ rsa = ">=3.1.4,<5" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] requests = ["requests (>=2.20.0,<3.0.0.dev0)"] +[[package]] +name = "google-auth-httplib2" +version = "0.2.0" +description = "Google Authentication Library: httplib2 transport" +optional = false +python-versions = "*" +files = [ + {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"}, + {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"}, +] + +[package.dependencies] +google-auth = "*" +httplib2 = ">=0.19.0" + [[package]] name = "google-generativeai" -version = "0.4.0" +version = "0.7.2" description = "Google Generative AI High level API client library and tools." optional = false python-versions = ">=3.9" files = [ - {file = "google_generativeai-0.4.0-py3-none-any.whl", hash = "sha256:cf53a51f7c22f0193685e39708e015119b2500626bb2e74ad8c2bf8d8568ef1e"}, + {file = "google_generativeai-0.7.2-py3-none-any.whl", hash = "sha256:3117d1ebc92ee77710d4bc25ab4763492fddce9b6332eb25d124cf5d8b78b339"}, ] [package.dependencies] -google-ai-generativelanguage = "0.4.0" +google-ai-generativelanguage = "0.6.6" google-api-core = "*" +google-api-python-client = "*" google-auth = ">=2.15.0" protobuf = "*" pydantic = "*" @@ -655,101 +852,93 @@ dev = ["Pillow", "absl-py", "black", "ipython", "nose2", "pandas", "pytype", "py [[package]] name = "googleapis-common-protos" -version = "1.62.0" +version = "1.63.2" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, + {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, + {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, ] [package.dependencies] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "grpcio" -version = "1.62.1" +version = "1.65.5" description = "HTTP/2-based RPC framework" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"}, - {file = "grpcio-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea"}, - {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d"}, - {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5"}, - {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243"}, - {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3"}, - {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70"}, - {file = "grpcio-1.62.1-cp310-cp310-win32.whl", hash = "sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f"}, - {file = "grpcio-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66"}, - {file = "grpcio-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2"}, - {file = "grpcio-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7"}, - {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698"}, - {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660"}, - {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a"}, - {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f"}, - {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db"}, - {file = "grpcio-1.62.1-cp311-cp311-win32.whl", hash = "sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c"}, - {file = "grpcio-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc"}, - {file = "grpcio-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b"}, - {file = "grpcio-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037"}, - {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31"}, - {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9"}, - {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1"}, - {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b"}, - {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41"}, - {file = "grpcio-1.62.1-cp312-cp312-win32.whl", hash = "sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f"}, - {file = "grpcio-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d"}, - {file = "grpcio-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a"}, - {file = "grpcio-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22"}, - {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec"}, - {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1"}, - {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9"}, - {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f"}, - {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7"}, - {file = "grpcio-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407"}, - {file = "grpcio-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362"}, - {file = "grpcio-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9"}, - {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd"}, - {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505"}, - {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d"}, - {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49"}, - {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06"}, - {file = "grpcio-1.62.1-cp38-cp38-win32.whl", hash = "sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4"}, - {file = "grpcio-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b"}, - {file = "grpcio-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483"}, - {file = "grpcio-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de"}, - {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de"}, - {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369"}, - {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f"}, - {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd"}, - {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585"}, - {file = "grpcio-1.62.1-cp39-cp39-win32.whl", hash = "sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4"}, - {file = "grpcio-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332"}, - {file = "grpcio-1.62.1.tar.gz", hash = "sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947"}, + {file = "grpcio-1.65.5-cp310-cp310-linux_armv7l.whl", hash = "sha256:b67d450f1e008fedcd81e097a3a400a711d8be1a8b20f852a7b8a73fead50fe3"}, + {file = "grpcio-1.65.5-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:a70a20eed87bba647a38bedd93b3ce7db64b3f0e8e0952315237f7f5ca97b02d"}, + {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:f79c87c114bf37adf408026b9e2e333fe9ff31dfc9648f6f80776c513145c813"}, + {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17f9fa2d947dbfaca01b3ab2c62eefa8240131fdc67b924eb42ce6032e3e5c1"}, + {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32d60e18ff7c34fe3f6db3d35ad5c6dc99f5b43ff3982cb26fad4174462d10b1"}, + {file = "grpcio-1.65.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fe6505376f5b00bb008e4e1418152e3ad3d954b629da286c7913ff3cfc0ff740"}, + {file = "grpcio-1.65.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:33158e56c6378063923c417e9fbdb28660b6e0e2835af42e67f5a7793f587af7"}, + {file = "grpcio-1.65.5-cp310-cp310-win32.whl", hash = "sha256:1cbc208edb9acf1cc339396a1a36b83796939be52f34e591c90292045b579fbf"}, + {file = "grpcio-1.65.5-cp310-cp310-win_amd64.whl", hash = "sha256:bc74f3f745c37e2c5685c9d2a2d5a94de00f286963f5213f763ae137bf4f2358"}, + {file = "grpcio-1.65.5-cp311-cp311-linux_armv7l.whl", hash = "sha256:3207ae60d07e5282c134b6e02f9271a2cb523c6d7a346c6315211fe2bf8d61ed"}, + {file = "grpcio-1.65.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a2f80510f99f82d4eb825849c486df703f50652cea21c189eacc2b84f2bde764"}, + {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a80e9a5e3f93c54f5eb82a3825ea1fc4965b2fa0026db2abfecb139a5c4ecdf1"}, + {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b2944390a496567de9e70418f3742b477d85d8ca065afa90432edc91b4bb8ad"}, + {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3655139d7be213c32c79ef6fb2367cae28e56ef68e39b1961c43214b457f257"}, + {file = "grpcio-1.65.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05f02d68fc720e085f061b704ee653b181e6d5abfe315daef085719728d3d1fd"}, + {file = "grpcio-1.65.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1c4caafe71aef4dabf53274bbf4affd6df651e9f80beedd6b8e08ff438ed3260"}, + {file = "grpcio-1.65.5-cp311-cp311-win32.whl", hash = "sha256:84c901cdec16a092099f251ef3360d15e29ef59772150fa261d94573612539b5"}, + {file = "grpcio-1.65.5-cp311-cp311-win_amd64.whl", hash = "sha256:11f8b16121768c1cb99d7dcb84e01510e60e6a206bf9123e134118802486f035"}, + {file = "grpcio-1.65.5-cp312-cp312-linux_armv7l.whl", hash = "sha256:ee6ed64a27588a2c94e8fa84fe8f3b5c89427d4d69c37690903d428ec61ca7e4"}, + {file = "grpcio-1.65.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:76991b7a6fb98630a3328839755181ce7c1aa2b1842aa085fd4198f0e5198960"}, + {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:89c00a18801b1ed9cc441e29b521c354725d4af38c127981f2c950c796a09b6e"}, + {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:078038e150a897e5e402ed3d57f1d31ebf604cbed80f595bd281b5da40762a92"}, + {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c97962720489ef31b5ad8a916e22bc31bba3664e063fb9f6702dce056d4aa61b"}, + {file = "grpcio-1.65.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b8270b15b99781461b244f5c81d5c2bc9696ab9189fb5ff86c841417fb3b39fe"}, + {file = "grpcio-1.65.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e5c4c15ac3fe1eb68e46bc51e66ad29be887479f231f8237cf8416058bf0cc1"}, + {file = "grpcio-1.65.5-cp312-cp312-win32.whl", hash = "sha256:f5b5970341359341d0e4c789da7568264b2a89cd976c05ea476036852b5950cd"}, + {file = "grpcio-1.65.5-cp312-cp312-win_amd64.whl", hash = "sha256:238a625f391a1b9f5f069bdc5930f4fd71b74426bea52196fc7b83f51fa97d34"}, + {file = "grpcio-1.65.5-cp38-cp38-linux_armv7l.whl", hash = "sha256:6c4e62bcf297a1568f627f39576dbfc27f1e5338a691c6dd5dd6b3979da51d1c"}, + {file = "grpcio-1.65.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d7df567b67d16d4177835a68d3f767bbcbad04da9dfb52cbd19171f430c898bd"}, + {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:b7ca419f1462390851eec395b2089aad1e49546b52d4e2c972ceb76da69b10f8"}, + {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa36dd8496d3af0d40165252a669fa4f6fd2db4b4026b9a9411cbf060b9d6a15"}, + {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a101696f9ece90a0829988ff72f1b1ea2358f3df035bdf6d675dd8b60c2c0894"}, + {file = "grpcio-1.65.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2a6d8169812932feac514b420daffae8ab8e36f90f3122b94ae767e633296b17"}, + {file = "grpcio-1.65.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:47d0aaaab82823f0aa6adea5184350b46e2252e13a42a942db84da5b733f2e05"}, + {file = "grpcio-1.65.5-cp38-cp38-win32.whl", hash = "sha256:85ae8f8517d5bcc21fb07dbf791e94ed84cc28f84c903cdc2bd7eaeb437c8f45"}, + {file = "grpcio-1.65.5-cp38-cp38-win_amd64.whl", hash = "sha256:770bd4bd721961f6dd8049bc27338564ba8739913f77c0f381a9815e465ff965"}, + {file = "grpcio-1.65.5-cp39-cp39-linux_armv7l.whl", hash = "sha256:ab5ec837d8cee8dbce9ef6386125f119b231e4333cc6b6d57b6c5c7c82a72331"}, + {file = "grpcio-1.65.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cabd706183ee08d8026a015af5819a0b3a8959bdc9d1f6fdacd1810f09200f2a"}, + {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:ec71fc5b39821ad7d80db7473c8f8c2910f3382f0ddadfbcfc2c6c437107eb67"}, + {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a9e35bcb045e39d7cac30464c285389b9a816ac2067e4884ad2c02e709ef8e"}, + {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d750e9330eb14236ca11b78d0c494eed13d6a95eb55472298f0e547c165ee324"}, + {file = "grpcio-1.65.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2b91ce647b6307f25650872454a4d02a2801f26a475f90d0b91ed8110baae589"}, + {file = "grpcio-1.65.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8da58ff80bc4556cf29bc03f5fff1f03b8387d6aaa7b852af9eb65b2cf833be4"}, + {file = "grpcio-1.65.5-cp39-cp39-win32.whl", hash = "sha256:7a412959aa5f08c5ac04aa7b7c3c041f5e4298cadd4fcc2acff195b56d185ebc"}, + {file = "grpcio-1.65.5-cp39-cp39-win_amd64.whl", hash = "sha256:55714ea852396ec9568f45f487639945ab674de83c12bea19d5ddbc3ae41ada3"}, + {file = "grpcio-1.65.5.tar.gz", hash = "sha256:ec6f219fb5d677a522b0deaf43cea6697b16f338cb68d009e30930c4aa0d2209"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.62.1)"] +protobuf = ["grpcio-tools (>=1.65.5)"] [[package]] name = "grpcio-status" -version = "1.62.1" +version = "1.62.3" description = "Status proto mapping for gRPC" optional = false python-versions = ">=3.6" files = [ - {file = "grpcio-status-1.62.1.tar.gz", hash = "sha256:3431c8abbab0054912c41df5c72f03ddf3b7a67be8a287bb3c18a3456f96ff77"}, - {file = "grpcio_status-1.62.1-py3-none-any.whl", hash = "sha256:af0c3ab85da31669f21749e8d53d669c061ebc6ce5637be49a46edcb7aa8ab17"}, + {file = "grpcio-status-1.62.3.tar.gz", hash = "sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485"}, + {file = "grpcio_status-1.62.3-py3-none-any.whl", hash = "sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8"}, ] [package.dependencies] googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.62.1" +grpcio = ">=1.62.3" protobuf = ">=4.21.6" [[package]] @@ -765,13 +954,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.4" +version = "1.0.5" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.4-py3-none-any.whl", hash = "sha256:ac418c1db41bade2ad53ae2f3834a3a0f5ae76b56cf5aa497d2d033384fc7d73"}, - {file = "httpcore-1.0.4.tar.gz", hash = "sha256:cb2839ccfcba0d2d3c1131d3c3e26dfc327326fbe7a5dc0dbfe9f6c9151bb022"}, + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, ] [package.dependencies] @@ -782,7 +971,21 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.25.0)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httplib2" +version = "0.22.0" +description = "A comprehensive HTTP client library." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, + {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, +] + +[package.dependencies] +pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} [[package]] name = "httpx" @@ -808,6 +1011,54 @@ cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +[[package]] +name = "huggingface-hub" +version = "0.24.6" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "huggingface_hub-0.24.6-py3-none-any.whl", hash = "sha256:a990f3232aa985fe749bc9474060cbad75e8b2f115f6665a9fda5b9c97818970"}, + {file = "huggingface_hub-0.24.6.tar.gz", hash = "sha256:cc2579e761d070713eaa9c323e3debe39d5b464ae3a7261c39a9195b27bb8000"}, +] + +[package.dependencies] +filelock = "*" +fsspec = ">=2023.5.0" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = ">=4.42.1" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +hf-transfer = ["hf-transfer (>=0.1.4)"] +inference = ["aiohttp", "minijinja (>=1.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.5.0)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +tensorflow-testing = ["keras (<3.0)", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors[torch]", "torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] + +[[package]] +name = "identify" +version = "2.6.0" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, + {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, +] + +[package.extras] +license = ["ukkonen"] + [[package]] name = "idna" version = "3.7" @@ -821,22 +1072,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.2" +version = "8.4.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.2-py3-none-any.whl", hash = "sha256:f4bc4c0c070c490abf4ce96d715f68e95923320370efb66143df00199bb6c100"}, - {file = "importlib_metadata-7.0.2.tar.gz", hash = "sha256:198f568f3230878cb1b44fbd7975f87906c22336dba2e4a7f05278c281fbd792"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -866,15 +1117,85 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jiter" +version = "0.5.0" +description = "Fast iterable JSON parser." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jiter-0.5.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b599f4e89b3def9a94091e6ee52e1d7ad7bc33e238ebb9c4c63f211d74822c3f"}, + {file = "jiter-0.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a063f71c4b06225543dddadbe09d203dc0c95ba352d8b85f1221173480a71d5"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acc0d5b8b3dd12e91dd184b87273f864b363dfabc90ef29a1092d269f18c7e28"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c22541f0b672f4d741382a97c65609332a783501551445ab2df137ada01e019e"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63314832e302cc10d8dfbda0333a384bf4bcfce80d65fe99b0f3c0da8945a91a"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a25fbd8a5a58061e433d6fae6d5298777c0814a8bcefa1e5ecfff20c594bd749"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:503b2c27d87dfff5ab717a8200fbbcf4714516c9d85558048b1fc14d2de7d8dc"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d1f3d27cce923713933a844872d213d244e09b53ec99b7a7fdf73d543529d6d"}, + {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c95980207b3998f2c3b3098f357994d3fd7661121f30669ca7cb945f09510a87"}, + {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:afa66939d834b0ce063f57d9895e8036ffc41c4bd90e4a99631e5f261d9b518e"}, + {file = "jiter-0.5.0-cp310-none-win32.whl", hash = "sha256:f16ca8f10e62f25fd81d5310e852df6649af17824146ca74647a018424ddeccf"}, + {file = "jiter-0.5.0-cp310-none-win_amd64.whl", hash = "sha256:b2950e4798e82dd9176935ef6a55cf6a448b5c71515a556da3f6b811a7844f1e"}, + {file = "jiter-0.5.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d4c8e1ed0ef31ad29cae5ea16b9e41529eb50a7fba70600008e9f8de6376d553"}, + {file = "jiter-0.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6f16e21276074a12d8421692515b3fd6d2ea9c94fd0734c39a12960a20e85f3"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5280e68e7740c8c128d3ae5ab63335ce6d1fb6603d3b809637b11713487af9e6"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:583c57fc30cc1fec360e66323aadd7fc3edeec01289bfafc35d3b9dcb29495e4"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26351cc14507bdf466b5f99aba3df3143a59da75799bf64a53a3ad3155ecded9"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829df14d656b3fb87e50ae8b48253a8851c707da9f30d45aacab2aa2ba2d614"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42a4bdcf7307b86cb863b2fb9bb55029b422d8f86276a50487982d99eed7c6e"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04d461ad0aebf696f8da13c99bc1b3e06f66ecf6cfd56254cc402f6385231c06"}, + {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6375923c5f19888c9226582a124b77b622f8fd0018b843c45eeb19d9701c403"}, + {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cec323a853c24fd0472517113768c92ae0be8f8c384ef4441d3632da8baa646"}, + {file = "jiter-0.5.0-cp311-none-win32.whl", hash = "sha256:aa1db0967130b5cab63dfe4d6ff547c88b2a394c3410db64744d491df7f069bb"}, + {file = "jiter-0.5.0-cp311-none-win_amd64.whl", hash = "sha256:aa9d2b85b2ed7dc7697597dcfaac66e63c1b3028652f751c81c65a9f220899ae"}, + {file = "jiter-0.5.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9f664e7351604f91dcdd557603c57fc0d551bc65cc0a732fdacbf73ad335049a"}, + {file = "jiter-0.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:044f2f1148b5248ad2c8c3afb43430dccf676c5a5834d2f5089a4e6c5bbd64df"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:702e3520384c88b6e270c55c772d4bd6d7b150608dcc94dea87ceba1b6391248"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:528d742dcde73fad9d63e8242c036ab4a84389a56e04efd854062b660f559544"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf80e5fe6ab582c82f0c3331df27a7e1565e2dcf06265afd5173d809cdbf9ba"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:44dfc9ddfb9b51a5626568ef4e55ada462b7328996294fe4d36de02fce42721f"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c451f7922992751a936b96c5f5b9bb9312243d9b754c34b33d0cb72c84669f4e"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:308fce789a2f093dca1ff91ac391f11a9f99c35369117ad5a5c6c4903e1b3e3a"}, + {file = "jiter-0.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7f5ad4a7c6b0d90776fdefa294f662e8a86871e601309643de30bf94bb93a64e"}, + {file = "jiter-0.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ea189db75f8eca08807d02ae27929e890c7d47599ce3d0a6a5d41f2419ecf338"}, + {file = "jiter-0.5.0-cp312-none-win32.whl", hash = "sha256:e3bbe3910c724b877846186c25fe3c802e105a2c1fc2b57d6688b9f8772026e4"}, + {file = "jiter-0.5.0-cp312-none-win_amd64.whl", hash = "sha256:a586832f70c3f1481732919215f36d41c59ca080fa27a65cf23d9490e75b2ef5"}, + {file = "jiter-0.5.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f04bc2fc50dc77be9d10f73fcc4e39346402ffe21726ff41028f36e179b587e6"}, + {file = "jiter-0.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f433a4169ad22fcb550b11179bb2b4fd405de9b982601914ef448390b2954f3"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad4a6398c85d3a20067e6c69890ca01f68659da94d74c800298581724e426c7e"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6baa88334e7af3f4d7a5c66c3a63808e5efbc3698a1c57626541ddd22f8e4fbf"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ece0a115c05efca597c6d938f88c9357c843f8c245dbbb53361a1c01afd7148"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:335942557162ad372cc367ffaf93217117401bf930483b4b3ebdb1223dbddfa7"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649b0ee97a6e6da174bffcb3c8c051a5935d7d4f2f52ea1583b5b3e7822fbf14"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f4be354c5de82157886ca7f5925dbda369b77344b4b4adf2723079715f823989"}, + {file = "jiter-0.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5206144578831a6de278a38896864ded4ed96af66e1e63ec5dd7f4a1fce38a3a"}, + {file = "jiter-0.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8120c60f8121ac3d6f072b97ef0e71770cc72b3c23084c72c4189428b1b1d3b6"}, + {file = "jiter-0.5.0-cp38-none-win32.whl", hash = "sha256:6f1223f88b6d76b519cb033a4d3687ca157c272ec5d6015c322fc5b3074d8a5e"}, + {file = "jiter-0.5.0-cp38-none-win_amd64.whl", hash = "sha256:c59614b225d9f434ea8fc0d0bec51ef5fa8c83679afedc0433905994fb36d631"}, + {file = "jiter-0.5.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0af3838cfb7e6afee3f00dc66fa24695199e20ba87df26e942820345b0afc566"}, + {file = "jiter-0.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:550b11d669600dbc342364fd4adbe987f14d0bbedaf06feb1b983383dcc4b961"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:489875bf1a0ffb3cb38a727b01e6673f0f2e395b2aad3c9387f94187cb214bbf"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b250ca2594f5599ca82ba7e68785a669b352156260c5362ea1b4e04a0f3e2389"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ea18e01f785c6667ca15407cd6dabbe029d77474d53595a189bdc813347218e"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462a52be85b53cd9bffd94e2d788a09984274fe6cebb893d6287e1c296d50653"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92cc68b48d50fa472c79c93965e19bd48f40f207cb557a8346daa020d6ba973b"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c834133e59a8521bc87ebcad773608c6fa6ab5c7a022df24a45030826cf10bc"}, + {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab3a71ff31cf2d45cb216dc37af522d335211f3a972d2fe14ea99073de6cb104"}, + {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cccd3af9c48ac500c95e1bcbc498020c87e1781ff0345dd371462d67b76643eb"}, + {file = "jiter-0.5.0-cp39-none-win32.whl", hash = "sha256:368084d8d5c4fc40ff7c3cc513c4f73e02c85f6009217922d0823a48ee7adf61"}, + {file = "jiter-0.5.0-cp39-none-win_amd64.whl", hash = "sha256:ce03f7b4129eb72f1687fa11300fbf677b02990618428934662406d2a76742a1"}, + {file = "jiter-0.5.0.tar.gz", hash = "sha256:1d916ba875bcab5c5f7d927df998c4cb694d27dceddf3392e58beaf10563368a"}, +] + [[package]] name = "markdown" -version = "3.5.2" +version = "3.7" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, - {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, + {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, + {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, ] [package.dependencies] @@ -966,44 +1287,61 @@ files = [ [[package]] name = "mkdocs" -version = "1.5.3" +version = "1.6.0" description = "Project documentation with Markdown." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mkdocs-1.5.3-py3-none-any.whl", hash = "sha256:3b3a78e736b31158d64dbb2f8ba29bd46a379d0c6e324c2246c3bc3d2189cfc1"}, - {file = "mkdocs-1.5.3.tar.gz", hash = "sha256:eb7c99214dcb945313ba30426c2451b735992c73c2e10838f76d09e39ff4d0e2"}, + {file = "mkdocs-1.6.0-py3-none-any.whl", hash = "sha256:1eb5cb7676b7d89323e62b56235010216319217d4af5ddc543a91beb8d125ea7"}, + {file = "mkdocs-1.6.0.tar.gz", hash = "sha256:a73f735824ef83a4f3bcb7a231dcab23f5a838f88b7efc54a0eef5fbdbc3c512"}, ] [package.dependencies] click = ">=7.0" colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} ghp-import = ">=1.0" -importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} jinja2 = ">=2.11.1" -markdown = ">=3.2.1" +markdown = ">=3.3.6" markupsafe = ">=2.0.1" mergedeep = ">=1.3.4" +mkdocs-get-deps = ">=0.2.0" packaging = ">=20.5" pathspec = ">=0.11.1" -platformdirs = ">=2.2.0" pyyaml = ">=5.1" pyyaml-env-tag = ">=0.1" watchdog = ">=2.0" [package.extras] i18n = ["babel (>=2.9.0)"] -min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pathspec (==0.11.1)", "platformdirs (==2.2.0)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.4)", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"] + +[[package]] +name = "mkdocs-get-deps" +version = "0.2.0" +description = "MkDocs extension that lists all dependencies according to a mkdocs.yml file" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134"}, + {file = "mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} +mergedeep = ">=1.3.4" +platformdirs = ">=2.2.0" +pyyaml = ">=5.1" [[package]] name = "mkdocs-material" -version = "9.5.13" +version = "9.5.32" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.13-py3-none-any.whl", hash = "sha256:5cbe17fee4e3b4980c8420a04cc762d8dc052ef1e10532abd4fce88e5ea9ce6a"}, - {file = "mkdocs_material-9.5.13.tar.gz", hash = "sha256:d8e4caae576312a88fd2609b81cf43d233cdbe36860d67a68702b018b425bd87"}, + {file = "mkdocs_material-9.5.32-py3-none-any.whl", hash = "sha256:f3704f46b63d31b3cd35c0055a72280bed825786eccaf19c655b44e0cd2c6b3f"}, + {file = "mkdocs_material-9.5.32.tar.gz", hash = "sha256:38ed66e6d6768dde4edde022554553e48b2db0d26d1320b19e2e2b9da0be1120"}, ] [package.dependencies] @@ -1011,7 +1349,7 @@ babel = ">=2.10,<3.0" colorama = ">=0.4,<1.0" jinja2 = ">=3.0,<4.0" markdown = ">=3.2,<4.0" -mkdocs = ">=1.5.3,<1.6.0" +mkdocs = ">=1.6,<2.0" mkdocs-material-extensions = ">=1.3,<2.0" paginate = ">=0.5,<1.0" pygments = ">=2.16,<3.0" @@ -1134,38 +1472,108 @@ files = [ {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, ] +[[package]] +name = "mypy" +version = "1.11.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, + {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, + {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, + {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, + {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, + {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, + {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, + {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, + {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, + {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, + {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, + {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, + {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, + {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, + {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, + {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, + {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, + {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, + {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + [[package]] name = "openai" -version = "1.13.3" +version = "1.42.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.13.3-py3-none-any.whl", hash = "sha256:5769b62abd02f350a8dd1a3a242d8972c947860654466171d60fb0972ae0a41c"}, - {file = "openai-1.13.3.tar.gz", hash = "sha256:ff6c6b3bc7327e715e4b3592a923a5a1c7519ff5dd764a83d69f633d49e77a7b"}, + {file = "openai-1.42.0-py3-none-any.whl", hash = "sha256:dc91e0307033a4f94931e5d03cc3b29b9717014ad5e73f9f2051b6cb5eda4d80"}, + {file = "openai-1.42.0.tar.gz", hash = "sha256:c9d31853b4e0bc2dc8bd08003b462a006035655a701471695d0bfdc08529cde3"}, ] [package.dependencies] anyio = ">=3.5.0,<5" distro = ">=1.7.0,<2" httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" pydantic = ">=1.9.0,<3" sniffio = "*" tqdm = ">4" -typing-extensions = ">=4.7,<5" +typing-extensions = ">=4.11,<5" [package.extras] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -1191,181 +1599,306 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pre-commit" +version = "3.8.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, + {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + [[package]] name = "proto-plus" -version = "1.23.0" +version = "1.24.0" description = "Beautiful, Pythonic protocol buffers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, - {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, + {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, + {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, ] [package.dependencies] -protobuf = ">=3.19.0,<5.0.0dev" +protobuf = ">=3.19.0,<6.0.0dev" [package.extras] -testing = ["google-api-core[grpc] (>=1.31.5)"] +testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.25.3" +version = "4.25.4" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, - {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, - {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, - {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, - {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, - {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, - {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, - {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, - {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, + {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, + {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, + {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, + {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, + {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, + {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, + {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, + {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, + {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, + {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, + {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, ] [[package]] name = "pyasn1" -version = "0.5.1" +version = "0.6.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, - {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, ] [[package]] name = "pyasn1-modules" -version = "0.3.0" +version = "0.4.0" description = "A collection of ASN.1-based protocols modules" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" +pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pydantic" -version = "1.10.14" -description = "Data validation and settings management using python type hints" +version = "2.8.2" +description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.20.1" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.20.1" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, ] [package.dependencies] -typing-extensions = ">=4.2.0" +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-extra-types" +version = "2.9.0" +description = "Extra Pydantic types." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_extra_types-2.9.0-py3-none-any.whl", hash = "sha256:f0bb975508572ba7bf3390b7337807588463b7248587e69f43b1ad7c797530d0"}, + {file = "pydantic_extra_types-2.9.0.tar.gz", hash = "sha256:e061c01636188743bb69f368dcd391f327b8cfbfede2fe1cbb1211b06601ba3b"}, +] + +[package.dependencies] +pydantic = ">=2.5.2" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +all = ["pendulum (>=3.0.0,<4.0.0)", "phonenumbers (>=8,<9)", "pycountry (>=23)", "python-ulid (>=1,<2)", "python-ulid (>=1,<3)", "pytz (>=2024.1)", "semver (>=3.0.2)", "tzdata (>=2024.1)"] +pendulum = ["pendulum (>=3.0.0,<4.0.0)"] +phonenumbers = ["phonenumbers (>=8,<9)"] +pycountry = ["pycountry (>=23)"] +python-ulid = ["python-ulid (>=1,<2)", "python-ulid (>=1,<3)"] +semver = ["semver (>=3.0.2)"] [[package]] name = "pygments" -version = "2.17.2" +version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] -plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pymdown-extensions" -version = "10.7.1" +version = "10.9" description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-10.7.1-py3-none-any.whl", hash = "sha256:f5cc7000d7ff0d1ce9395d216017fa4df3dde800afb1fb72d1c7d3fd35e710f4"}, - {file = "pymdown_extensions-10.7.1.tar.gz", hash = "sha256:c70e146bdd83c744ffc766b4671999796aba18842b268510a329f7f64700d584"}, + {file = "pymdown_extensions-10.9-py3-none-any.whl", hash = "sha256:d323f7e90d83c86113ee78f3fe62fc9dee5f56b54d912660703ea1816fed5626"}, + {file = "pymdown_extensions-10.9.tar.gz", hash = "sha256:6ff740bcd99ec4172a938970d42b96128bdc9d4b9bcad72494f29921dc69b753"}, ] [package.dependencies] -markdown = ">=3.5" +markdown = ">=3.6" pyyaml = "*" [package.extras] extra = ["pygments (>=2.12)"] +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + [[package]] name = "pytest" version = "7.4.4" @@ -1390,13 +1923,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest-asyncio" -version = "0.23.5.post1" +version = "0.23.8" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-asyncio-0.23.5.post1.tar.gz", hash = "sha256:b9a8806bea78c21276bc34321bbf234ba1b2ea5b30d9f0ce0f2dea45e4685813"}, - {file = "pytest_asyncio-0.23.5.post1-py3-none-any.whl", hash = "sha256:30f54d27774e79ac409778889880242b0403d09cabd65b727ce90fe92dd5d80e"}, + {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, + {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, ] [package.dependencies] @@ -1408,13 +1941,13 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" -version = "4.1.0" +version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, ] [package.dependencies] @@ -1422,7 +1955,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-randomly" @@ -1460,18 +1993,18 @@ dev = ["black", "flake8", "pre-commit"] [[package]] name = "pytest-xdist" -version = "3.5.0" +version = "3.6.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"}, - {file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"}, + {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, + {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, ] [package.dependencies] -execnet = ">=1.1" -pytest = ">=6.2.0" +execnet = ">=2.1" +pytest = ">=7.0.0" [package.extras] psutil = ["psutil (>=3.0)"] @@ -1494,62 +2027,64 @@ six = ">=1.5" [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] @@ -1568,115 +2103,101 @@ pyyaml = "*" [[package]] name = "regex" -version = "2023.12.25" +version = "2024.7.24" description = "Alternative regular expression module, to replace re." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, - {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, - {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, - {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, - {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, - {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, - {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, - {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, - {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, - {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, - {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, - {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, - {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, - {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, - {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, - {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, - {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, + {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b0d3f567fafa0633aee87f08b9276c7062da9616931382993c03808bb68ce"}, + {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3426de3b91d1bc73249042742f45c2148803c111d1175b283270177fdf669024"}, + {file = "regex-2024.7.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f273674b445bcb6e4409bf8d1be67bc4b58e8b46fd0d560055d515b8830063cd"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23acc72f0f4e1a9e6e9843d6328177ae3074b4182167e34119ec7233dfeccf53"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65fd3d2e228cae024c411c5ccdffae4c315271eee4a8b839291f84f796b34eca"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c414cbda77dbf13c3bc88b073a1a9f375c7b0cb5e115e15d4b73ec3a2fbc6f59"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7a89eef64b5455835f5ed30254ec19bf41f7541cd94f266ab7cbd463f00c41"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19c65b00d42804e3fbea9708f0937d157e53429a39b7c61253ff15670ff62cb5"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7a5486ca56c8869070a966321d5ab416ff0f83f30e0e2da1ab48815c8d165d46"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f51f9556785e5a203713f5efd9c085b4a45aecd2a42573e2b5041881b588d1f"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a4997716674d36a82eab3e86f8fa77080a5d8d96a389a61ea1d0e3a94a582cf7"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c0abb5e4e8ce71a61d9446040c1e86d4e6d23f9097275c5bd49ed978755ff0fe"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:18300a1d78cf1290fa583cd8b7cde26ecb73e9f5916690cf9d42de569c89b1ce"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:416c0e4f56308f34cdb18c3f59849479dde5b19febdcd6e6fa4d04b6c31c9faa"}, + {file = "regex-2024.7.24-cp310-cp310-win32.whl", hash = "sha256:fb168b5924bef397b5ba13aabd8cf5df7d3d93f10218d7b925e360d436863f66"}, + {file = "regex-2024.7.24-cp310-cp310-win_amd64.whl", hash = "sha256:6b9fc7e9cc983e75e2518496ba1afc524227c163e43d706688a6bb9eca41617e"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:382281306e3adaaa7b8b9ebbb3ffb43358a7bbf585fa93821300a418bb975281"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4fdd1384619f406ad9037fe6b6eaa3de2749e2e12084abc80169e8e075377d3b"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d974d24edb231446f708c455fd08f94c41c1ff4f04bcf06e5f36df5ef50b95a"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ec4419a3fe6cf8a4795752596dfe0adb4aea40d3683a132bae9c30b81e8d73"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb563dd3aea54c797adf513eeec819c4213d7dbfc311874eb4fd28d10f2ff0f2"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45104baae8b9f67569f0f1dca5e1f1ed77a54ae1cd8b0b07aba89272710db61e"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:994448ee01864501912abf2bad9203bffc34158e80fe8bfb5b031f4f8e16da51"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fac296f99283ac232d8125be932c5cd7644084a30748fda013028c815ba3364"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e37e809b9303ec3a179085415cb5f418ecf65ec98cdfe34f6a078b46ef823ee"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:01b689e887f612610c869421241e075c02f2e3d1ae93a037cb14f88ab6a8934c"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f6442f0f0ff81775eaa5b05af8a0ffa1dda36e9cf6ec1e0d3d245e8564b684ce"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:871e3ab2838fbcb4e0865a6e01233975df3a15e6fce93b6f99d75cacbd9862d1"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c918b7a1e26b4ab40409820ddccc5d49871a82329640f5005f73572d5eaa9b5e"}, + {file = "regex-2024.7.24-cp311-cp311-win32.whl", hash = "sha256:2dfbb8baf8ba2c2b9aa2807f44ed272f0913eeeba002478c4577b8d29cde215c"}, + {file = "regex-2024.7.24-cp311-cp311-win_amd64.whl", hash = "sha256:538d30cd96ed7d1416d3956f94d54e426a8daf7c14527f6e0d6d425fcb4cca52"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fe4ebef608553aff8deb845c7f4f1d0740ff76fa672c011cc0bacb2a00fbde86"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:74007a5b25b7a678459f06559504f1eec2f0f17bca218c9d56f6a0a12bfffdad"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7df9ea48641da022c2a3c9c641650cd09f0cd15e8908bf931ad538f5ca7919c9"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1141a1dcc32904c47f6846b040275c6e5de0bf73f17d7a409035d55b76f289"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80c811cfcb5c331237d9bad3bea2c391114588cf4131707e84d9493064d267f9"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7214477bf9bd195894cf24005b1e7b496f46833337b5dedb7b2a6e33f66d962c"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d55588cba7553f0b6ec33130bc3e114b355570b45785cebdc9daed8c637dd440"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:558a57cfc32adcf19d3f791f62b5ff564922942e389e3cfdb538a23d65a6b610"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a512eed9dfd4117110b1881ba9a59b31433caed0c4101b361f768e7bcbaf93c5"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:86b17ba823ea76256b1885652e3a141a99a5c4422f4a869189db328321b73799"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5eefee9bfe23f6df09ffb6dfb23809f4d74a78acef004aa904dc7c88b9944b05"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:731fcd76bbdbf225e2eb85b7c38da9633ad3073822f5ab32379381e8c3c12e94"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eaef80eac3b4cfbdd6de53c6e108b4c534c21ae055d1dbea2de6b3b8ff3def38"}, + {file = "regex-2024.7.24-cp312-cp312-win32.whl", hash = "sha256:185e029368d6f89f36e526764cf12bf8d6f0e3a2a7737da625a76f594bdfcbfc"}, + {file = "regex-2024.7.24-cp312-cp312-win_amd64.whl", hash = "sha256:2f1baff13cc2521bea83ab2528e7a80cbe0ebb2c6f0bfad15be7da3aed443908"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:66b4c0731a5c81921e938dcf1a88e978264e26e6ac4ec96a4d21ae0354581ae0"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:88ecc3afd7e776967fa16c80f974cb79399ee8dc6c96423321d6f7d4b881c92b"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64bd50cf16bcc54b274e20235bf8edbb64184a30e1e53873ff8d444e7ac656b2"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb462f0e346fcf41a901a126b50f8781e9a474d3927930f3490f38a6e73b6950"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a82465ebbc9b1c5c50738536fdfa7cab639a261a99b469c9d4c7dcbb2b3f1e57"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68a8f8c046c6466ac61a36b65bb2395c74451df2ffb8458492ef49900efed293"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac8e84fff5d27420f3c1e879ce9929108e873667ec87e0c8eeb413a5311adfe"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba2537ef2163db9e6ccdbeb6f6424282ae4dea43177402152c67ef869cf3978b"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:43affe33137fcd679bdae93fb25924979517e011f9dea99163f80b82eadc7e53"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c9bb87fdf2ab2370f21e4d5636e5317775e5d51ff32ebff2cf389f71b9b13750"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:945352286a541406f99b2655c973852da7911b3f4264e010218bbc1cc73168f2"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8bc593dcce679206b60a538c302d03c29b18e3d862609317cb560e18b66d10cf"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3f3b6ca8eae6d6c75a6cff525c8530c60e909a71a15e1b731723233331de4169"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c51edc3541e11fbe83f0c4d9412ef6c79f664a3745fab261457e84465ec9d5a8"}, + {file = "regex-2024.7.24-cp38-cp38-win32.whl", hash = "sha256:d0a07763776188b4db4c9c7fb1b8c494049f84659bb387b71c73bbc07f189e96"}, + {file = "regex-2024.7.24-cp38-cp38-win_amd64.whl", hash = "sha256:8fd5afd101dcf86a270d254364e0e8dddedebe6bd1ab9d5f732f274fa00499a5"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0ffe3f9d430cd37d8fa5632ff6fb36d5b24818c5c986893063b4e5bdb84cdf24"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25419b70ba00a16abc90ee5fce061228206173231f004437730b67ac77323f0d"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33e2614a7ce627f0cdf2ad104797d1f68342d967de3695678c0cb84f530709f8"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d33a0021893ede5969876052796165bab6006559ab845fd7b515a30abdd990dc"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04ce29e2c5fedf296b1a1b0acc1724ba93a36fb14031f3abfb7abda2806c1535"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b16582783f44fbca6fcf46f61347340c787d7530d88b4d590a397a47583f31dd"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:836d3cc225b3e8a943d0b02633fb2f28a66e281290302a79df0e1eaa984ff7c1"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:438d9f0f4bc64e8dea78274caa5af971ceff0f8771e1a2333620969936ba10be"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:973335b1624859cb0e52f96062a28aa18f3a5fc77a96e4a3d6d76e29811a0e6e"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c5e69fd3eb0b409432b537fe3c6f44ac089c458ab6b78dcec14478422879ec5f"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fbf8c2f00904eaf63ff37718eb13acf8e178cb940520e47b2f05027f5bb34ce3"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2757ace61bc4061b69af19e4689fa4416e1a04840f33b441034202b5cd02d4"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:44fc61b99035fd9b3b9453f1713234e5a7c92a04f3577252b45feefe1b327759"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:84c312cdf839e8b579f504afcd7b65f35d60b6285d892b19adea16355e8343c9"}, + {file = "regex-2024.7.24-cp39-cp39-win32.whl", hash = "sha256:ca5b2028c2f7af4e13fb9fc29b28d0ce767c38c7facdf64f6c2cd040413055f1"}, + {file = "regex-2024.7.24-cp39-cp39-win_amd64.whl", hash = "sha256:7c479f5ae937ec9985ecaf42e2e10631551d909f203e31308c12d703922742f9"}, + {file = "regex-2024.7.24.tar.gz", hash = "sha256:9cfd009eed1a46b27c14039ad5bbc5e71b6367c5b2e6d5f5da0ea91600817506"}, ] [[package]] name = "requests" -version = "2.32.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ - {file = "requests-2.32.0-py3-none-any.whl", hash = "sha256:f2c3881dddb70d056c5bd7600a4fae312b2a300e39be6a118d30b90bd27262b5"}, - {file = "requests-2.32.0.tar.gz", hash = "sha256:fa5490319474c82ef1d2c9bc459d3652e3ae4ef4c4ebdd18a21145a47ca4b6b8"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -1705,28 +2226,29 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.3.2" +version = "0.6.1" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77f2612752e25f730da7421ca5e3147b213dca4f9a0f7e0b534e9562c5441f01"}, - {file = "ruff-0.3.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9966b964b2dd1107797be9ca7195002b874424d1d5472097701ae8f43eadef5d"}, - {file = "ruff-0.3.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b83d17ff166aa0659d1e1deaf9f2f14cbe387293a906de09bc4860717eb2e2da"}, - {file = "ruff-0.3.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb875c6cc87b3703aeda85f01c9aebdce3d217aeaca3c2e52e38077383f7268a"}, - {file = "ruff-0.3.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be75e468a6a86426430373d81c041b7605137a28f7014a72d2fc749e47f572aa"}, - {file = "ruff-0.3.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:967978ac2d4506255e2f52afe70dda023fc602b283e97685c8447d036863a302"}, - {file = "ruff-0.3.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1231eacd4510f73222940727ac927bc5d07667a86b0cbe822024dd00343e77e9"}, - {file = "ruff-0.3.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c6d613b19e9a8021be2ee1d0e27710208d1603b56f47203d0abbde906929a9b"}, - {file = "ruff-0.3.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8439338a6303585d27b66b4626cbde89bb3e50fa3cae86ce52c1db7449330a7"}, - {file = "ruff-0.3.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:de8b480d8379620cbb5ea466a9e53bb467d2fb07c7eca54a4aa8576483c35d36"}, - {file = "ruff-0.3.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b74c3de9103bd35df2bb05d8b2899bf2dbe4efda6474ea9681280648ec4d237d"}, - {file = "ruff-0.3.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f380be9fc15a99765c9cf316b40b9da1f6ad2ab9639e551703e581a5e6da6745"}, - {file = "ruff-0.3.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0ac06a3759c3ab9ef86bbeca665d31ad3aa9a4b1c17684aadb7e61c10baa0df4"}, - {file = "ruff-0.3.2-py3-none-win32.whl", hash = "sha256:9bd640a8f7dd07a0b6901fcebccedadeb1a705a50350fb86b4003b805c81385a"}, - {file = "ruff-0.3.2-py3-none-win_amd64.whl", hash = "sha256:0c1bdd9920cab5707c26c8b3bf33a064a4ca7842d91a99ec0634fec68f9f4037"}, - {file = "ruff-0.3.2-py3-none-win_arm64.whl", hash = "sha256:5f65103b1d76e0d600cabd577b04179ff592064eaa451a70a81085930e907d0b"}, - {file = "ruff-0.3.2.tar.gz", hash = "sha256:fa78ec9418eb1ca3db392811df3376b46471ae93792a81af2d1cbb0e5dcb5142"}, + {file = "ruff-0.6.1-py3-none-linux_armv6l.whl", hash = "sha256:b4bb7de6a24169dc023f992718a9417380301b0c2da0fe85919f47264fb8add9"}, + {file = "ruff-0.6.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:45efaae53b360c81043e311cdec8a7696420b3d3e8935202c2846e7a97d4edae"}, + {file = "ruff-0.6.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:bc60c7d71b732c8fa73cf995efc0c836a2fd8b9810e115be8babb24ae87e0850"}, + {file = "ruff-0.6.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c7477c3b9da822e2db0b4e0b59e61b8a23e87886e727b327e7dcaf06213c5cf"}, + {file = "ruff-0.6.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3a0af7ab3f86e3dc9f157a928e08e26c4b40707d0612b01cd577cc84b8905cc9"}, + {file = "ruff-0.6.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:392688dbb50fecf1bf7126731c90c11a9df1c3a4cdc3f481b53e851da5634fa5"}, + {file = "ruff-0.6.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5278d3e095ccc8c30430bcc9bc550f778790acc211865520f3041910a28d0024"}, + {file = "ruff-0.6.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fe6d5f65d6f276ee7a0fc50a0cecaccb362d30ef98a110f99cac1c7872df2f18"}, + {file = "ruff-0.6.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2e0dd11e2ae553ee5c92a81731d88a9883af8db7408db47fc81887c1f8b672e"}, + {file = "ruff-0.6.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d812615525a34ecfc07fd93f906ef5b93656be01dfae9a819e31caa6cfe758a1"}, + {file = "ruff-0.6.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:faaa4060f4064c3b7aaaa27328080c932fa142786f8142aff095b42b6a2eb631"}, + {file = "ruff-0.6.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:99d7ae0df47c62729d58765c593ea54c2546d5de213f2af2a19442d50a10cec9"}, + {file = "ruff-0.6.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9eb18dfd7b613eec000e3738b3f0e4398bf0153cb80bfa3e351b3c1c2f6d7b15"}, + {file = "ruff-0.6.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c62bc04c6723a81e25e71715aa59489f15034d69bf641df88cb38bdc32fd1dbb"}, + {file = "ruff-0.6.1-py3-none-win32.whl", hash = "sha256:9fb4c4e8b83f19c9477a8745e56d2eeef07a7ff50b68a6998f7d9e2e3887bdc4"}, + {file = "ruff-0.6.1-py3-none-win_amd64.whl", hash = "sha256:c2ebfc8f51ef4aca05dad4552bbcf6fe8d1f75b2f6af546cc47cc1c1ca916b5b"}, + {file = "ruff-0.6.1-py3-none-win_arm64.whl", hash = "sha256:3bc81074971b0ffad1bd0c52284b22411f02a11a012082a76ac6da153536e014"}, + {file = "ruff-0.6.1.tar.gz", hash = "sha256:af3ffd8c6563acb8848d33cd19a69b9bfe943667f0419ca083f8ebe4224a3436"}, ] [[package]] @@ -1764,17 +2286,18 @@ files = [ [[package]] name = "tenacity" -version = "8.2.3" +version = "8.5.0" description = "Retry code until it succeeds" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, - {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, + {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, + {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, ] [package.extras] -doc = ["reno", "sphinx", "tornado (>=4.5)"] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] [[package]] name = "termcolor" @@ -1836,16 +2359,122 @@ requests = ">=2.26.0" blobfile = ["blobfile (>=2)"] [[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" +name = "tokenizers" +version = "0.20.0" +description = "" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.7" files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, + {file = "tokenizers-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6cff5c5e37c41bc5faa519d6f3df0679e4b37da54ea1f42121719c5e2b4905c0"}, + {file = "tokenizers-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:62a56bf75c27443432456f4ca5ca055befa95e25be8a28141cc495cac8ae4d6d"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68cc7de6a63f09c4a86909c2597b995aa66e19df852a23aea894929c74369929"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:053c37ecee482cc958fdee53af3c6534286a86f5d35aac476f7c246830e53ae5"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d7074aaabc151a6363fa03db5493fc95b423b2a1874456783989e96d541c7b6"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a11435780f2acd89e8fefe5e81cecf01776f6edb9b3ac95bcb76baee76b30b90"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9a81cd2712973b007d84268d45fc3f6f90a79c31dfe7f1925e6732f8d2959987"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7dfd796ab9d909f76fb93080e1c7c8309f196ecb316eb130718cd5e34231c69"}, + {file = "tokenizers-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8029ad2aa8cb00605c9374566034c1cc1b15130713e0eb5afcef6cface8255c9"}, + {file = "tokenizers-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ca4d54260ebe97d59dfa9a30baa20d0c4dd9137d99a8801700055c561145c24e"}, + {file = "tokenizers-0.20.0-cp310-none-win32.whl", hash = "sha256:95ee16b57cec11b86a7940174ec5197d506439b0f415ab3859f254b1dffe9df0"}, + {file = "tokenizers-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:0a61a11e93eeadbf02aea082ffc75241c4198e0608bbbac4f65a9026851dcf37"}, + {file = "tokenizers-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6636b798b3c4d6c9b1af1a918bd07c867808e5a21c64324e95318a237e6366c3"}, + {file = "tokenizers-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ec603e42eaf499ffd58b9258162add948717cf21372458132f14e13a6bc7172"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cce124264903a8ea6f8f48e1cc7669e5ef638c18bd4ab0a88769d5f92debdf7f"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07bbeba0231cf8de07aa6b9e33e9779ff103d47042eeeb859a8c432e3292fb98"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06c0ca8397b35d38b83a44a9c6929790c1692957d88541df061cb34d82ebbf08"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca6557ac3b83d912dfbb1f70ab56bd4b0594043916688e906ede09f42e192401"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a5ad94c9e80ac6098328bee2e3264dbced4c6faa34429994d473f795ec58ef4"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b5c7f906ee6bec30a9dc20268a8b80f3b9584de1c9f051671cb057dc6ce28f6"}, + {file = "tokenizers-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:31e087e9ee1b8f075b002bfee257e858dc695f955b43903e1bb4aa9f170e37fe"}, + {file = "tokenizers-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c3124fb6f3346cb3d8d775375d3b429bf4dcfc24f739822702009d20a4297990"}, + {file = "tokenizers-0.20.0-cp311-none-win32.whl", hash = "sha256:a4bb8b40ba9eefa621fdcabf04a74aa6038ae3be0c614c6458bd91a4697a452f"}, + {file = "tokenizers-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:2b709d371f1fe60a28ef0c5c67815952d455ca7f34dbe7197eaaed3cc54b658e"}, + {file = "tokenizers-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:15c81a17d0d66f4987c6ca16f4bea7ec253b8c7ed1bb00fdc5d038b1bb56e714"}, + {file = "tokenizers-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a531cdf1fb6dc41c984c785a3b299cb0586de0b35683842a3afbb1e5207f910"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06caabeb4587f8404e0cd9d40f458e9cba3e815c8155a38e579a74ff3e2a4301"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8768f964f23f5b9f50546c0369c75ab3262de926983888bbe8b98be05392a79c"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:626403860152c816f97b649fd279bd622c3d417678c93b4b1a8909b6380b69a8"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c1b88fa9e5ff062326f4bf82681da5a96fca7104d921a6bd7b1e6fcf224af26"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d7e559436a07dc547f22ce1101f26d8b2fad387e28ec8e7e1e3b11695d681d8"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48afb75e50449848964e4a67b0da01261dd3aa8df8daecf10db8fd7f5b076eb"}, + {file = "tokenizers-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:baf5d0e1ff44710a95eefc196dd87666ffc609fd447c5e5b68272a7c3d342a1d"}, + {file = "tokenizers-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e5e56df0e8ed23ba60ae3848c3f069a0710c4b197218fe4f89e27eba38510768"}, + {file = "tokenizers-0.20.0-cp312-none-win32.whl", hash = "sha256:ec53e5ecc142a82432f9c6c677dbbe5a2bfee92b8abf409a9ecb0d425ee0ce75"}, + {file = "tokenizers-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:f18661ece72e39c0dfaa174d6223248a15b457dbd4b0fc07809b8e6d3ca1a234"}, + {file = "tokenizers-0.20.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:f7065b1084d8d1a03dc89d9aad69bcbc8415d4bc123c367063eb32958cd85054"}, + {file = "tokenizers-0.20.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e5d4069e4714e3f7ba0a4d3d44f9d84a432cd4e4aa85c3d7dd1f51440f12e4a1"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:799b808529e54b7e1a36350bda2aeb470e8390e484d3e98c10395cee61d4e3c6"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f9baa027cc8a281ad5f7725a93c204d7a46986f88edbe8ef7357f40a23fb9c7"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:010ec7f3f7a96adc4c2a34a3ada41fa14b4b936b5628b4ff7b33791258646c6b"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98d88f06155335b14fd78e32ee28ca5b2eb30fced4614e06eb14ae5f7fba24ed"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e13eb000ef540c2280758d1b9cfa5fe424b0424ae4458f440e6340a4f18b2638"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fab3cf066ff426f7e6d70435dc28a9ff01b2747be83810e397cba106f39430b0"}, + {file = "tokenizers-0.20.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:39fa3761b30a89368f322e5daf4130dce8495b79ad831f370449cdacfb0c0d37"}, + {file = "tokenizers-0.20.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c8da0fba4d179ddf2607821575998df3c294aa59aa8df5a6646dc64bc7352bce"}, + {file = "tokenizers-0.20.0-cp37-none-win32.whl", hash = "sha256:fada996d6da8cf213f6e3c91c12297ad4f6cdf7a85c2fadcd05ec32fa6846fcd"}, + {file = "tokenizers-0.20.0-cp37-none-win_amd64.whl", hash = "sha256:7d29aad702279e0760c265fcae832e89349078e3418dd329732d4503259fd6bd"}, + {file = "tokenizers-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:099c68207f3ef0227ecb6f80ab98ea74de559f7b124adc7b17778af0250ee90a"}, + {file = "tokenizers-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:68012d8a8cddb2eab3880870d7e2086cb359c7f7a2b03f5795044f5abff4e850"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9253bdd209c6aee168deca7d0e780581bf303e0058f268f9bb06859379de19b6"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f868600ddbcb0545905ed075eb7218a0756bf6c09dae7528ea2f8436ebd2c93"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9643d9c8c5f99b6aba43fd10034f77cc6c22c31f496d2f0ee183047d948fa0"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c375c6a889aeab44734028bc65cc070acf93ccb0f9368be42b67a98e1063d3f6"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e359f852328e254f070bbd09a19a568421d23388f04aad9f2fb7da7704c7228d"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d98b01a309d4387f3b1c1dd68a8b8136af50376cf146c1b7e8d8ead217a5be4b"}, + {file = "tokenizers-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:459f7537119554c2899067dec1ac74a00d02beef6558f4ee2e99513bf6d568af"}, + {file = "tokenizers-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:392b87ec89452628c045c9f2a88bc2a827f4c79e7d84bc3b72752b74c2581f70"}, + {file = "tokenizers-0.20.0-cp38-none-win32.whl", hash = "sha256:55a393f893d2ed4dd95a1553c2e42d4d4086878266f437b03590d3f81984c4fe"}, + {file = "tokenizers-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:30ffe33c5c2f2aab8e9a3340d0110dd9f7ace7eec7362e20a697802306bd8068"}, + {file = "tokenizers-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aa2d4a6fed2a7e3f860c7fc9d48764bb30f2649d83915d66150d6340e06742b8"}, + {file = "tokenizers-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b5ef0f814084a897e9071fc4a868595f018c5c92889197bdc4bf19018769b148"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc1e1b791e8c3bf4c4f265f180dadaff1c957bf27129e16fdd5e5d43c2d3762c"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b69e55e481459c07885263743a0d3c18d52db19bae8226a19bcca4aaa213fff"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806b4d82e27a2512bc23057b2986bc8b85824914286975b84d8105ff40d03d9"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9859e9ef13adf5a473ccab39d31bff9c550606ae3c784bf772b40f615742a24f"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef703efedf4c20488a8eb17637b55973745b27997ff87bad88ed499b397d1144"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eec0061bab94b1841ab87d10831fdf1b48ebaed60e6d66d66dbe1d873f92bf5"}, + {file = "tokenizers-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:980f3d0d7e73f845b69087f29a63c11c7eb924c4ad6b358da60f3db4cf24bdb4"}, + {file = "tokenizers-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c157550a2f3851b29d7fdc9dc059fcf81ff0c0fc49a1e5173a89d533ed043fa"}, + {file = "tokenizers-0.20.0-cp39-none-win32.whl", hash = "sha256:8a3d2f4d08608ec4f9895ec25b4b36a97f05812543190a5f2c3cd19e8f041e5a"}, + {file = "tokenizers-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:d90188d12afd0c75e537f9a1d92f9c7375650188ee4f48fdc76f9e38afbd2251"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d68e15f1815357b059ec266062340c343ea7f98f7f330602df81ffa3474b6122"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:23f9ecec637b9bc80da5f703808d29ed5329e56b5aa8d791d1088014f48afadc"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f830b318ee599e3d0665b3e325f85bc75ee2d2ca6285f52e439dc22b64691580"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3dc750def789cb1de1b5a37657919545e1d9ffa667658b3fa9cb7862407a1b8"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e26e6c755ae884c2ea6135cd215bdd0fccafe4ee62405014b8c3cd19954e3ab9"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a1158c7174f427182e08baa2a8ded2940f2b4a3e94969a85cc9cfd16004cbcea"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:6324826287a3fc198898d3dcf758fe4a8479e42d6039f4c59e2cedd3cf92f64e"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7d8653149405bb0c16feaf9cfee327fdb6aaef9dc2998349fec686f35e81c4e2"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8a2dc1e402a155e97309287ca085c80eb1b7fab8ae91527d3b729181639fa51"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bef67b20aa6e5f7868c42c7c5eae4d24f856274a464ae62e47a0f2cccec3da"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da06e397182ff53789c506c7833220c192952c57e1581a53f503d8d953e2d67e"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:302f7e11a14814028b7fc88c45a41f1bbe9b5b35fd76d6869558d1d1809baa43"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:055ec46e807b875589dfbe3d9259f9a6ee43394fb553b03b3d1e9541662dbf25"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e3144b8acebfa6ae062e8f45f7ed52e4b50fb6c62f93afc8871b525ab9fdcab3"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b52aa3fd14b2a07588c00a19f66511cff5cca8f7266ca3edcdd17f3512ad159f"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b8cf52779ffc5d4d63a0170fbeb512372bad0dd014ce92bbb9149756c831124"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:983a45dd11a876124378dae71d6d9761822199b68a4c73f32873d8cdaf326a5b"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6b819c9a19831ebec581e71a7686a54ab45d90faf3842269a10c11d746de0c"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e738cfd80795fcafcef89c5731c84b05638a4ab3f412f97d5ed7765466576eb1"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c8842c7be2fadb9c9edcee233b1b7fe7ade406c99b0973f07439985c1c1d0683"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e47a82355511c373a4a430c4909dc1e518e00031207b1fec536c49127388886b"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9afbf359004551179a5db19424180c81276682773cff2c5d002f6eaaffe17230"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07eaa8799a92e6af6f472c21a75bf71575de2af3c0284120b7a09297c0de2f3"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0994b2e5fc53a301071806bc4303e4bc3bdc3f490e92a21338146a36746b0872"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6466e0355b603d10e3cc3d282d350b646341b601e50969464a54939f9848d0"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1e86594c2a433cb1ea09cfbe596454448c566e57ee8905bd557e489d93e89986"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3e14cdef1efa96ecead6ea64a891828432c3ebba128bdc0596e3059fea104ef3"}, + {file = "tokenizers-0.20.0.tar.gz", hash = "sha256:39d7acc43f564c274085cafcd1dae9d36f332456de1a31970296a6b8da4eac8d"}, ] +[package.dependencies] +huggingface-hub = ">=0.16.4,<1.0" + +[package.extras] +dev = ["tokenizers[testing]"] +docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] +testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"] + [[package]] name = "tomli" version = "2.0.1" @@ -1859,13 +2488,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.3" +version = "4.66.5" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.3-py3-none-any.whl", hash = "sha256:4f41d54107ff9a223dca80b53efe4fb654c67efaba7f47bada3ee9d50e05bd53"}, - {file = "tqdm-4.66.3.tar.gz", hash = "sha256:23097a41eba115ba99ecae40d06444c15d1c0c698d527a01c6c8bd1c5d0647e5"}, + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, ] [package.dependencies] @@ -1879,13 +2508,24 @@ telegram = ["requests"] [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "uritemplate" +version = "4.1.1" +description = "Implementation of RFC 6570 URI Templates" +optional = false +python-versions = ">=3.6" +files = [ + {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, + {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, ] [[package]] @@ -1905,42 +2545,68 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "virtualenv" +version = "20.26.3" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + [[package]] name = "watchdog" -version = "4.0.0" +version = "4.0.2" description = "Filesystem events monitoring" optional = false python-versions = ">=3.8" files = [ - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, - {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, - {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, - {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, - {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, - {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, - {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, - {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"}, + {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"}, + {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"}, + {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"}, + {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"}, ] [package.extras] @@ -2051,20 +2717,20 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.17.0" +version = "3.20.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = "^3.9,<4.0.0" -content-hash = "f0429b190d9cbe98665fe89bad2b6577361c1631f14a997b78c99403aa9dc9ef" +content-hash = "3d9eba786af2c4a10972d187d04aa1bdc1fd2f616e9d03880a223c8ca8c74a9e" diff --git a/pyproject.toml b/pyproject.toml index 50ab05d0..6fda974e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,37 +1,28 @@ -[build-system] -requires = ["poetry-core", "setuptools"] -build-backend = "poetry.core.masonry.api" - [tool.poetry] name = "readmeai" -version = "0.5.079" -description = "๐Ÿ‘พ Automated README file generator, powered by large language model APIs." +version = "0.5.080" +description = "README file generator, powered by large language model APIs ๐Ÿ‘พ" authors = ["Eli "] license = "MIT" readme = "README.md" homepage = "https://github.com/eli64s/readme-ai" documentation = "https://eli64s.github.io/readme-ai" keywords = [ - "python", - "markdown", - "readme", - "readme-md", - "devtools", + "ai", + "ai-documentation", + "badge-generator", "developer-tools", + "devtools", "documentation", "documentation-generator", - "large-language-models", - "generative-ai", + "markdown", + "markdown-generator", + "python", + "readme", "readme-generator", + "readme-md", "readme-md-generator", - "readme-badges", - "shieldsio-badges", - "ollama", - "openai", - "chatgpt", - "gpt-4", - "genai", - "ai", + "readme-template", ] include = ["readmeai", "readmeai.*"] @@ -40,18 +31,21 @@ readmeai = "readmeai.cli.main:main" [tool.poetry.dependencies] python = "^3.9,<4.0.0" -aiohttp = "^3.9.3" +aiohttp = "^3.9.5" click = "^8.1.7" gitpython = "^3.1.31" -google-generativeai = "^0.4.0" -openai = "*" -pydantic = ">=1.10.9,<2.0.0" +google-generativeai = "^0.7.1" +openai = "^1.16.2" +pydantic = "^2.8.0" +pydantic-extra-types = "^2.9.0" pyyaml = "^6.0.1" +requests = "^2.32.3" tenacity = "^8.2.2" tiktoken = "^0.4.0" -toml = { version = "*", python = "<3.11" } [tool.poetry.group.dev.dependencies] +mypy = "*" +pre-commit = "*" ruff = "*" [tool.poetry.group.test.dependencies] @@ -66,70 +60,53 @@ pytest-xdist = "*" mkdocs = "*" mkdocs-material = "*" -[tool.ruff] -exclude = [ - ".ipynb_checkpoints", - ".mypy_cache", - ".nox", - ".pytest_cache", - ".ruff_cache", - ".vscode", -] -line-length = 79 +[tool.poetry.group.anthropic] +optional = true -[tool.ruff.format] -quote-style = "double" -docstring-code-format = true -docstring-code-line-length = 20 +[tool.poetry.group.anthropic.dependencies] +anthropic = "*" -[tool.ruff.lint] -extend-select = ["E501"] -select = [ - # pycodestyle - "E", - # Pyflakes - "F", - # pyupgrade - "UP", - # flake8-bugbear - "B", - # flake8-simplify - "SIM", - # isort - "I", +[tool.pytest.ini_options] +addopts = [ + "-vvv", + "-rfEsxX", + "--durations=10", + "--asyncio-mode=auto", + "--numprocesses=auto", + "--cov=readmeai", + "--cov-branch", + "--cov-report=xml", + "--cov-report=term-missing", ] -ignore = [ - # Line too long - "E501", +env = [ + "OLLAMA_HOST=127.0.0.1", ] +pythonpath = ["readmeai"] +testpaths = ["tests"] -[tool.coverage.run] -source = ["readmeai", "tests"] +[tool.coverage.report] +exclude_also = [ + "if __name__ == '__main__':", + "pragma: no cover", + "raise NotImplementedError", +] +fail_under = 80 +precision = 2 +show_missing = true +skip_covered = true +skip_empty = true +sort = "cover" omit = [ "tests/*", "*__init__.py", "*/__init__.py", "noxfile.py", - "readmeai/templates/*.py", - "readmeai/ui/*.py", - "readmeai/parsers/cicd/*.py", - "readmeai/parsers/infrastructure/*.py", - "readmeai/parsers/orchestration/*.py", - "readmeai/parsers/configuration/ansible.py", - "readmeai/parsers/configuration/apache.py", - "readmeai/parsers/configuration/nginx.py", - "readmeai/parsers/configuration/properties.py", - "readmeai/parsers/package/compose.py", - "readmeai/parsers/package/gem.py", - "readmeai/parsers/package/pip.py", - "readmeai/parsers/package/nuget.py", - "readmeai/parsers/package/yarn.py", + "readmeai/models/gemini.py", "readmeai/utils/file_cleaner.py", + "readmeai/vcs/metadata.py", + "readmeai/vcs/providers.py", ] -[tool.coverage.report] -fail_under = 80 -show_missing = true - -[tool.coverage.xml] -output = "coverage.xml" +[build-system] +requires = ["poetry-core", "setuptools"] +build-backend = "poetry.core.masonry.api" diff --git a/readmeai/_agent.py b/readmeai/__main__.py similarity index 50% rename from readmeai/_agent.py rename to readmeai/__main__.py index a91e0fed..e024bf29 100644 --- a/readmeai/_agent.py +++ b/readmeai/__main__.py @@ -1,16 +1,16 @@ #!/usr/bin/env python3 """ -Runs the README.md file generation process. +Main module for the README file generator agent. """ __package__ = "readmeai" import asyncio +import contextlib import tempfile import traceback -from pathlib import Path -from typing import Optional +from collections.abc import Generator from readmeai._exceptions import ReadmeGeneratorError from readmeai.cli.options import ImageOptions, ModelOptions @@ -20,38 +20,54 @@ from readmeai.core.utils import get_environment from readmeai.generators.builder import MarkdownBuilder from readmeai.models.dalle import DalleHandler -from readmeai.models.factory import ModelFactory -from readmeai.services.git import clone_repository +from readmeai.models.factory import ModelRegistry from readmeai.utils.file_handler import FileHandler +from readmeai.vcs.ingestor import retrieve_repository _logger = Logger(__name__) +@contextlib.contextmanager +def error_handler() -> Generator: + """ + Exception handler for the README generation process. + """ + try: + yield + except Exception as exc: + _logger.error(f"Error in README generation process: {exc}") + raise ReadmeGeneratorError(exc, traceback.format_exc()) from exc + + def readme_agent( - alignment: Optional[str], - api: Optional[str], - badge_color: Optional[str], - badge_style: Optional[str], - base_url: Optional[str], - context_window: Optional[int], - emojis: Optional[bool], - image: Optional[str], - # language: Optional[str], - model: Optional[str], - output_file: Optional[str], - rate_limit: Optional[int], repository: str, - temperature: Optional[float], - # template: Optional[str], - tree_depth: Optional[int], - top_p: Optional[float], + align: str, + api: str, + badge_color: str, + badge_style: str, + base_url: str, + context_window: int, + emojis: bool, + header_style: str, + image: str, + model: str, + output: str, + rate_limit: int, + temperature: float, + toc_style: str, + top_p: float, + tree_depth: int, ) -> None: - """Configures and runs the README file generator agent.""" - try: - conf = ConfigLoader() + """ + Configures and runs the README file generator agent. + """ + with error_handler(): api, model = get_environment(api, model) + + conf = ConfigLoader() + conf.config.api.rate_limit = rate_limit - conf.config.llm = conf.config.llm.copy( + conf.config.llm = conf.config.llm.model_copy( update={ "api": api, "base_url": base_url, @@ -59,40 +75,41 @@ def readme_agent( "model": model, "temperature": temperature, "top_p": top_p, - } + }, ) - conf.config.md = conf.config.md.copy( + conf.config.md = conf.config.md.model_copy( update={ - "alignment": alignment, + "align": align, "badge_color": badge_color, "badge_style": badge_style, "emojis": emojis, + "header_style": header_style, "image": image, + "toc_style": toc_style, "tree_depth": tree_depth, - } + }, ) conf.config.git = GitSettings(repository=repository) - _logger.info(f"Repository validated: {conf.config.git}") - _logger.info(f"LLM API settings: {conf.config.llm}") - asyncio.run(readme_generator(conf, output_file)) - except Exception as exc: - raise ReadmeGeneratorError(exc, traceback.format_exc()) from exc + _logger.info(f"Repository settings updated: {conf.config.git}") + _logger.info(f"LLM API settings updated: {conf.config.llm}") + + asyncio.run(readme_generator(conf, output)) -async def readme_generator(conf: ConfigLoader, output_file: Path) -> None: - """Orchestrates the README.md file generation process.""" - with tempfile.TemporaryDirectory() as temp_dir: - await clone_repository(conf.config.git.repository, temp_dir) - ( - dependencies, - raw_files, - ) = preprocessor(conf, temp_dir) +async def readme_generator(conf: ConfigLoader, output_file: str) -> None: + """ + Processes the repository and generates the README file. + """ + with tempfile.TemporaryDirectory() as tmp_dir: + await retrieve_repository(conf.config.git.repository, tmp_dir) + deps, raw_files = preprocessor(conf, tmp_dir) + _logger.info(f"Total files analyzed: {len(raw_files)}") - _logger.info(f"Dependencies found: {dependencies}") + _logger.info(f"Dependencies extracted: {deps}") - async with ModelFactory.model_handler(conf).use_api() as llm: - responses = await llm.batch_request(dependencies, raw_files) + async with ModelRegistry.get_backend(conf).use_api() as llm: + responses = await llm.batch_request(deps, raw_files) ( summaries, features, @@ -107,20 +124,16 @@ async def readme_generator(conf: ConfigLoader, output_file: Path) -> None: conf.config.md.image == ImageOptions.LLM.value and conf.config.llm.api != ModelOptions.OFFLINE.value ): - conf.config.md.width = "60%" - dalle = DalleHandler(conf) - image_url = dalle.run() - conf.config.md.image = dalle.download(image_url) - elif ( - conf.config.md.image == ImageOptions.LLM.value - and conf.config.llm.api == ModelOptions.OFFLINE.value - ): - conf.config.md.image = ImageOptions.BLUE.value + with DalleHandler(conf).use_api() as dalle: + image_url = dalle._make_request() + conf.config.md.image = dalle.download(image_url) + if conf.config.md.image == ImageOptions.LLM.value: + conf.config.md.image = ImageOptions.BLUE.value + + readme_md = MarkdownBuilder(conf, deps, summaries, tmp_dir).build() - readme_md = MarkdownBuilder( - conf, dependencies, summaries, temp_dir - ).build() FileHandler().write(output_file, readme_md) - _logger.info("README generation process completed successfully!") + _logger.info(f"README.md file saved to: {output_file}") + _logger.info("README generation process completed successfully!") _logger.info("Share it @ github.com/eli64s/readme-ai/discussions") diff --git a/readmeai/_exceptions.py b/readmeai/_exceptions.py index 4a513fd0..1392ba82 100644 --- a/readmeai/_exceptions.py +++ b/readmeai/_exceptions.py @@ -1,41 +1,46 @@ """ -Custom exceptions for the readme-ai package. +Custom exceptions classes for the readme-ai package. """ from __future__ import annotations class ReadmeAIError(Exception): - """Base class for exceptions in this module.""" + """ + Base class for exceptions in this module. + """ ... -class CLIError(ReadmeAIError): - """Exceptions related to the CLI.""" +class ReadmeGeneratorError(Exception): + """ + Raised when an error occurs during README generation. + """ + + def __init__(self, exc, traceback): + self.exc = exc + self.traceback = traceback + super().__init__(f"README generation error occurred: {exc}") - def __init__(self, message, *args): - super().__init__(f"Invalid option provided to CLI: {message}", *args) +# ----------------- CLI ---------------------------------- -class GitCloneError(ReadmeAIError): - """Could not clone repository.""" - def __init__(self, repository: str, *args): - self.repository = repository - super().__init__(f"Failed to clone repository: {repository}", *args) +class CLIError(ReadmeAIError): + """Exceptions related to the CLI.""" + def __init__(self, message, *args): + super().__init__(f"Invalid option provided to CLI: {message}", *args) -class GitValidationError(ReadmeAIError): - """Could not validate repository.""" - def __init__(self, repository: str, *args): - self.repository = repository - super().__init__(f"Failed to validate repository: {repository}", *args) +# ----------------- File System ---------------------------------- class FileSystemError(ReadmeAIError): - """Exceptions related to file system operations.""" + """ + Exceptions related to file system operations. + """ def __init__(self, message, path, *args): self.file_path = path @@ -43,27 +48,28 @@ def __init__(self, message, path, *args): class FileReadError(FileSystemError): - """Could not read file.""" + """ + Raised when a file cannot be read. + """ ... class FileWriteError(FileSystemError): - """Could not write file.""" + """ + Raised when a file cannot be written to. + """ ... -class ReadmeGeneratorError(ReadmeAIError): - """Exceptions related to readme generation.""" - - def __init__(self, traceback, *args): - self.traceback = traceback - super().__init__(f"Error generating readme: {traceback}", *args) +# ----------------- LLM API ---------------------------------- class UnsupportedServiceError(ReadmeAIError): - """Exceptions related to the LLMHandler class.""" + """ + Raised when an unsupported LLM service is provided. + """ def __init__(self, message, *args): super().__init__(message, *args) diff --git a/readmeai/cli/main.py b/readmeai/cli/main.py index 5bf98826..b54a76af 100644 --- a/readmeai/cli/main.py +++ b/readmeai/cli/main.py @@ -2,68 +2,66 @@ CLI entrypoint for the readme-ai package. """ -from typing import Optional - import click -from readmeai._agent import readme_agent +from readmeai.__main__ import readme_agent from readmeai.cli import options @click.command() -@options.alignment +@options.align @options.api @options.badge_color @options.badge_style @options.base_url @options.context_window @options.emojis +@options.header_style @options.image -@options.language @options.model @options.output @options.rate_limit @options.repository @options.temperature -@options.tree_depth -@options.template +@options.toc_style @options.top_p +@options.tree_depth def main( - alignment: Optional[str], - api: Optional[str], - badge_color: Optional[str], - badge_style: Optional[str], - base_url: Optional[str], - context_window: Optional[int], - emojis: Optional[bool], - image: Optional[str], - language: Optional[str], - model: Optional[str], - output: Optional[str], - rate_limit: Optional[int], + align: str, + api: str, + badge_color: str, + badge_style: str, + base_url: str, + context_window: int, + emojis: bool, + header_style: str, + image: str, + model: str, + output: str, + rate_limit: int, repository: str, - temperature: Optional[float], - template: Optional[str], - top_p: Optional[float], - tree_depth: Optional[int], + temperature: float, + toc_style: str, + top_p: float, + tree_depth: int, ) -> None: """Entry point for the readme-ai CLI application.""" readme_agent( - alignment=alignment, + align=align, api=api, badge_color=badge_color, badge_style=badge_style, base_url=base_url, context_window=context_window, emojis=emojis, + header_style=header_style, image=image, - # language=language, model=model, - output_file=output, + output=output, rate_limit=rate_limit, repository=repository, temperature=temperature, - # template=template, + toc_style=toc_style, top_p=top_p, tree_depth=tree_depth, ) diff --git a/readmeai/cli/options.py b/readmeai/cli/options.py index 7115cefb..97879c7f 100644 --- a/readmeai/cli/options.py +++ b/readmeai/cli/options.py @@ -4,62 +4,19 @@ from __future__ import annotations -from enum import Enum -from typing import Optional - import click - -class BadgeOptions(str, Enum): - """ - Enum for CLI options for README file badge icons. - """ - - DEFAULT = "default" - FLAT = "flat" - FLAT_SQUARE = "flat-square" - FOR_THE_BADGE = "for-the-badge" - PLASTIC = "plastic" - SKILLS = "skills" - SKILLS_LIGHT = "skills-light" - SOCIAL = "social" - - -class ImageOptions(str, Enum): - """ - Enum for CLI options for README file header images. - """ - - # Custom image options - CUSTOM = "custom" - LLM = "llm" - - # Default image options - BLACK = "https://img.icons8.com/external-tal-revivo-regular-tal-revivo/96/external-readme-is-a-easy-to-build-a-developer-hub-that-adapts-to-the-user-logo-regular-tal-revivo.png" - BLUE = "https://raw.githubusercontent.com/PKief/vscode-material-icon-theme/ec559a9f6bfd399b82bb44393651661b08aaf7ba/icons/folder-markdown-open.svg" - CLOUD = "https://cdn-icons-png.flaticon.com/512/6295/6295417.png" - GRADIENT = "https://img.icons8.com/?size=512&id=55494&format=png" - GREY = "https://img.icons8.com/external-tal-revivo-filled-tal-revivo/96/external-markdown-a-lightweight-markup-language-with-plain-text-formatting-syntax-logo-filled-tal-revivo.png" - PURPLE = "https://img.icons8.com/external-tal-revivo-duo-tal-revivo/100/external-markdown-a-lightweight-markup-language-with-plain-text-formatting-syntax-logo-duo-tal-revivo.png" - - -class ModelOptions(str, Enum): - """ - Enum for CLI options for the LLM API key. - """ - - OFFLINE = "OFFLINE" - OLLAMA = "OLLAMA" - OPENAI = "OPENAI" - GEMINI = "GEMINI" +from readmeai.config.settings import BadgeOptions, ImageOptions, ModelOptions def prompt_for_image( - context: Optional[click.Context], - parameter: Optional[click.Parameter], - value: Optional[str], + context: click.Context | None = None, + parameter: click.Parameter | None = None, + value: str | None = None, ) -> str: """Prompt the user for a custom image URL.""" + if value is None: + return ImageOptions.BLUE.value if value == ImageOptions.CUSTOM.name: return click.prompt("Provide an image file path or URL") elif value == ImageOptions.LLM.name: @@ -70,43 +27,48 @@ def prompt_for_image( raise click.BadParameter(f"Invalid image provided: {value}") -alignment = click.option( +align = click.option( "-a", - "--alignment", - type=click.Choice(["center", "left"], case_sensitive=False), + "--align", + type=click.Choice(["center", "left", "right"], case_sensitive=False), default="center", - help="Alignment for the README.md file header sections.", + help="align for the README.md file header sections.", ) api = click.option( "--api", type=click.Choice( - [opt.value for opt in ModelOptions], case_sensitive=False + [opt.value for opt in ModelOptions], + case_sensitive=False, ), default=None, - help="""LLM service to use for generating the README.md file. The following services are currently supported:\n - - OPENAI # OpenAI - gpt-3.5-turbo \n - - OLLAMA # Ollama - llama2 \n - - GEMINI # Google Gemini API - gemini-pro \n - - OFFLINE # Offline mode - no LLM service used \n + help="""LLM API service to use for README.md file generation. Current support for:\n + - ANTHROPIC # Anthropic: claude-3-5-sonnet \n + - GEMINI # Google Gemini: gemini-pro \n + - OFFLINE # Offline Mode: run without a LLM API \n + - OLLAMA # Ollama: llama3, mistral, etc. \n + - OPENAI # OpenAI: gpt-3.5-turbo \n """, ) badge_color = click.option( + "-bc", "--badge-color", type=str, default="0080ff", - help="Custom color for the badge icon. Provide a valid color name or hex code.", + help="Change color of shields.io badges. Provide color name or hex code.", ) badge_style = click.option( + "-bs", "--badge-style", type=click.Choice( - [opt.value for opt in BadgeOptions], case_sensitive=False + [opt.value for opt in BadgeOptions], + case_sensitive=False, ), default=BadgeOptions.DEFAULT.value, help="""\ - Badge icon style types to select from when generating README.md badges. The following options are currently available:\n + Style for shields.io badges. The following options are currently supported:\n - default \n - flat \n - flat-square \n @@ -122,10 +84,11 @@ def prompt_for_image( "--base-url", type=str, default="https://api.openai.com/v1/chat/completions", - help="Base URL for the LLM API service used to generate text for the README.md file.", + help="Base URL for the LLM API service.", ) context_window = click.option( + "-cw", "--context-window", default=3999, type=int, @@ -137,26 +100,42 @@ def prompt_for_image( "--emojis", is_flag=True, default=False, - help="This option adds emojis to the README.md file's header sections. For example, the default header for the 'Overview' section generates the markdown code as '## Overview'. Adding the --emojis flag generates the markdown code as '## ๐Ÿ“ Overview'.", + help="Adds an emoji prefix to each header of the README.md file. For example, the header ## 'Overview' would change to '## ๐Ÿ“ Overview'.", +) + +header_style = click.option( + "-hs", + "--header-style", + type=click.Choice(["classic", "compact", "modern"], case_sensitive=False), + default="classic", + help="Header template styles.", +) + +toc_style = click.option( + "-ts", + "--toc-style", + type=click.Choice( + ["bullet", "fold", "links", "number"], + case_sensitive=False, + ), + default="bullet", + help="Table of Contents template styles.", ) image = click.option( "-i", "--image", type=click.Choice( - [opt.name for opt in ImageOptions], case_sensitive=False + [opt.name for opt in ImageOptions], + case_sensitive=False, ), default=ImageOptions.BLUE.name, callback=prompt_for_image, show_choices=True, help="""\ Project logo image displayed in the README file header. The following options are currently supported:\n - - Custom image options:\n - - CUSTOM (use a custom image file path or URL) \n - - LLM (use LLM multi-modal capabilities to generate an image) \n - - Default image options:\n + - CUSTOM (provide local image path or URL) \n + - LLM (generate project logo using LLM API) \n - BLACK \n - BLUE \n - CLOUD \n @@ -166,28 +145,22 @@ def prompt_for_image( """, ) -language = click.option( - "-l", - "--language", - default="en", - help="Language to use for generating the README.md file. Default is English (en).", -) - model = click.option( "-m", "--model", default="gpt-3.5-turbo", - help="Large language model (LLM) API used to generate text for the README.md file. Default model uses OpenAI's gpt-3.5-turbo.", + help="Large language model (LLM) API backend to power the README.md file generation.", ) output = click.option( "-o", "--output", default="readme-ai.md", - help="Output file name for your README file. Default name is 'readme-ai.md'.", + help="Output file name for your README file.", ) rate_limit = click.option( + "-rl", "--rate-limit", default=10, type=click.IntRange(1, 25, clamp=True), @@ -198,33 +171,27 @@ def prompt_for_image( "-r", "--repository", required=True, - help="Provide a remote repository URL (GitHub, GitLab, BitBucket), or a local directory path to your project.", + help="Provide a remote repository URL (GitHub, GitLab, BitBucket), or local path to your project.", ) temperature = click.option( "-t", "--temperature", default=0.9, - type=click.FloatRange(0.0, 2.0, clamp=True), - help="Setting the model's temperature to a higher value will yield more creative content generated, while a lower value will generate more predictable content.", -) - -template = click.option( - "--template", - type=str, - help="README template file to use for generating the README.md file.", + type=click.FloatRange(min_open=0.0, max=2.0, clamp=True), + help="Increasing temperature yields more randomness in text generation.", ) top_p = click.option( "--top-p", default=0.9, type=click.FloatRange(0.0, 1.0, clamp=True), - help="Top-p sampling probability for the model's generation. This value can be set between 0.0 and 1.0.", + help="Top-p sampling probability for the model's generation.", ) tree_depth = click.option( "--tree-depth", default=2, type=int, - help="Maximum depth of the directory tree thats included in the README.md file.", + help="Maximum depth of the directory tree generated for the README file.", ) diff --git a/readmeai/config/settings.py b/readmeai/config/settings.py index 29a6aa22..c7a0dd6d 100644 --- a/readmeai/config/settings.py +++ b/readmeai/config/settings.py @@ -1,117 +1,280 @@ """ -Data models and configuration settings for the readme-ai package. +Pydantic models and settings for the readme-ai package. """ from __future__ import annotations +from enum import Enum from functools import cached_property from pathlib import Path -from typing import Optional, Union +from typing import Literal + +from pydantic import ( + AnyHttpUrl, + BaseModel, + ConfigDict, + Field, + FilePath, + PositiveFloat, + PositiveInt, + field_validator, + model_validator, +) +from pydantic_extra_types.color import Color -from pydantic import BaseModel, Field, HttpUrl, validator - -from readmeai.config.validators import GitValidator from readmeai.core.logger import Logger from readmeai.utils.file_handler import FileHandler from readmeai.utils.file_resources import get_resource_path +from readmeai.vcs.errors import GitValidationError +from readmeai.vcs.url_builder import GitURL, parse_git_url + +_logger = Logger(__name__) + + +class BadgeOptions(str, Enum): + DEFAULT = "default" + FLAT = "flat" + FLAT_SQUARE = "flat-square" + FOR_THE_BADGE = "for-the-badge" + PLASTIC = "plastic" + SKILLS = "skills" + SKILLS_LIGHT = "skills-light" + SOCIAL = "social" + + +class ImageOptions(str, Enum): + CUSTOM = "CUSTOM" + LLM = "LLM" + BLACK = "https://img.icons8.com/external-tal-revivo-regular-tal-revivo/96/external-readme-is-a-easy-to-build-a-developer-hub-that-adapts-to-the-user-logo-regular-tal-revivo.png" + BLUE = "https://raw.githubusercontent.com/PKief/vscode-material-icon-theme/ec559a9f6bfd399b82bb44393651661b08aaf7ba/icons/folder-markdown-open.svg" + CLOUD = "https://cdn-icons-png.flaticon.com/512/6295/6295417.png" + GRADIENT = "https://img.icons8.com/?size=512&id=55494&format=png" + GREY = "https://img.icons8.com/external-tal-revivo-filled-tal-revivo/96/external-markdown-a-lightweight-markup-language-with-plain-text-formatting-syntax-logo-filled-tal-revivo.png" + PURPLE = "https://img.icons8.com/external-tal-revivo-duo-tal-revivo/100/external-markdown-a-lightweight-markup-language-with-plain-text-formatting-syntax-logo-duo-tal-revivo.png" + + +class ModelOptions(str, Enum): + # ANTHROPIC = "ANTHROPIC" + GEMINI = "GEMINI" + OFFLINE = "OFFLINE" + OLLAMA = "OLLAMA" + OPENAI = "OPENAI" class APISettings(BaseModel): """ - Universal LLM API settings. + LLM API settings and parameters. """ - content: Optional[str] - rate_limit: Optional[int] + rate_limit: PositiveInt = Field(gt=0, le=25, description="API rate limit.") + system_message: str = Field(description="LLM system prompt content field.") class FileSettings(BaseModel): - """File paths used by the readme-ai CLI tool.""" - - blacklist: str - commands: str - languages: str - markdown: str - parsers: str - prompts: str - shields_icons: str - skill_icons: str + """ + File path resources for the readme-ai package. + """ + + commands: str = Field(description="'Quickstart' setup commands.") + ignore_list: str = Field(description="List of files to ignore.") + languages: str = Field(description="Extension to language mappings.") + markdown: str = Field(description="Markdown code template blocks.") + parsers: str = Field(description="Common dependency file names.") + prompts: str = Field(description="LLM API prompt templates.") + shieldsio_icons: str = Field(description="Shields.io svg icon badges.") + skill_icons: str = Field(description="Skill icon badges.") class GitSettings(BaseModel): - """User repository settings, sanitized and validated by Pydantic.""" + """ + User repository settings for a remote or local codebase. + """ - repository: Union[str, Path] = Field( - ..., description="URL or directory path to the repository." + repository: Path | str + git_url: GitURL | None = None + full_name: str | None = None + host_domain: str | None = None + host: str | None = None + name: str | None = None + + model_config = ConfigDict(extra="forbid") + + @field_validator("repository") + def validate_repository(cls, value: Path | str) -> Path | str: + """ + Validates the repository path or Git URL. + """ + if isinstance(value, Path) or ( + isinstance(value, str) + and Path(value).is_dir() + and Path(value).exists() + ): + return value + try: + return str(GitURL.create(value).url) + except ValueError as exc: + raise GitValidationError( + f"Invalid Git repository URL or path: {value}", + ) from exc + + @field_validator("git_url") + def set_git_url(cls, value: GitURL | None, values) -> GitURL | None: + """ + Set Git URL from the repository path if not provided. + """ + if value is None and "repository" in values: + return GitURL.create(str(values["repository"])) + return value + + @model_validator(mode="after") + def set_git_attributes(self) -> GitSettings: + """ + Parse and set Git repository attributes. + """ + if self.git_url: + self.host_domain = self.git_url.host_domain + self.host = self.git_url.host.name if self.git_url.host else None + self.name = self.git_url.name + self.full_name = self.git_url.full_name + else: + self.host_domain, self.host, self.name, self.full_name = ( + parse_git_url(str(self.repository)) + ) + return self + + def get_file_url(self, file_path: str) -> str: + """ + Generates a URL for a file in the repository. + """ + if self.git_url: + return self.git_url.get_file_url(file_path) + elif isinstance(self.repository, Path) or ( + isinstance(self.repository, str) and Path(self.repository).is_dir() + ): + return str(Path(self.repository) / file_path) + else: + raise GitValidationError("Failed to generate git file URL") + + +class MarkdownSettings(BaseModel): + """ + Markdown code template blocks for building the README.md file. + """ + + align: Literal["left", "center", "right"] = Field( + default="center", + description="align for markdown content.", ) - full_name: Optional[str] = Field( - None, description="The full name of the repository." + badge_color: Color = Field( + default_factory=lambda: Color("blue"), + description="Badge color (https://www.w3.org/TR/SVG11/types.html#ColorKeywords)", ) - host_domain: Optional[str] = Field( - None, description="Domain of the repository host." + badge_style: BadgeOptions = Field( + default=BadgeOptions.FLAT, + description="Badge icon style type.", ) - host: Optional[str] = Field( - None, description="The repository host i.e. 'github'." + badge_icons: str + contribute: str + emojis: bool = Field( + default=False, + description="Enable emoji prefixes for headers.", ) - name: Optional[str] = Field( - None, description="Project name i.e. 'readme-ai'." + features: str = Field( + default="โฏ INSERT-PROJECT-FEATURES", + description="Project feature content.", ) - - _validate_repository = validator("repository", pre=True, always=True)( - GitValidator.validate_repository + header_style: str = Field( + default="classic", + description="Header style for the README file.", ) - _validate_full_name = validator("full_name", pre=True, always=True)( - GitValidator.validate_full_name + image: AnyHttpUrl | FilePath | str = Field( + default=ImageOptions.BLUE, + description="Image URL or path for the project logo.", ) - _set_host_domain = validator("host_domain", pre=True, always=True)( - GitValidator.set_host_domain + image_width: str = Field( + default="20%", + description="Project logo width", ) - _set_host = validator("host", pre=True, always=True)(GitValidator.set_host) - _set_name = validator("name", pre=True, always=True)(GitValidator.set_name) - - -class MarkdownSettings(BaseModel): - """Markdown template blocks for the README.md file.""" - - alignment: str - badge_color: str - badge_style: str - badge_icons: str - contribute: str - emojis: bool - features: str - header: str - image: str modules: str modules_widget: str - overview: str - placeholder: str + overview: str = Field( + default="โฏ INSERT-PROJECT-OVERVIEW", + description="Project overview content.", + ) + placeholder: str = Field( + default="โฏ REPLACE-ME", + description="Placeholder image for missing content.", + ) quickstart: str - shields_icons: str + requirements: str = Field( + default="", + description="Project system prerequisites.", + ) + shieldsio_icons: str skill_icons: str - slogan: str - tables: str - toc: str + slogan: str = Field( + default="โฏ INSERT-PROJECT-SLOGAN", + description="Project tagline or slogan.", + ) + tables: str = Field(default="", description="Markdown table options.") + toc_style: str = Field( + default="bullet", + description="Table of contents content.", + ) tree: str - tree_depth: int - width: str + tree_depth: PositiveInt = Field( + default=2, + ge=1, + le=5, + description="Depth of directory tree.", + ) + + @field_validator("badge_color") + def set_color(cls, value: str) -> str: + """ + Validates badge color value and returns the hex code. + """ + try: + return Color(value).as_hex().strip("#") + except ValueError as exc: + _logger.error(f"Invalid color value '{value}': {exc}") + return cls.model_fields["badge_color"].default + + @model_validator(mode="after") + def set_width(self) -> MarkdownSettings: + """ + Validates and sets the width for the project logo image. + """ + if str(self.image).lower() == ImageOptions.LLM.name.lower(): + self.image_width = "60%" + return self class ModelSettings(BaseModel): - """LLM API settings used for generating text for the README.md file.""" + """ + LLM API model settings and parameters. + """ - api: Optional[str] - base_url: Optional[HttpUrl] - context_window: Optional[int] - encoder: Optional[str] - model: Optional[str] - temperature: Optional[float] - tokens: Optional[int] - top_p: Optional[float] + api: str | None = Field( + default=ModelOptions.OFFLINE, + description="API key for the LLM model.", + ) + base_url: str + context_window: PositiveInt + encoder: str + host_name: AnyHttpUrl + localhost: AnyHttpUrl + model: str + path: str + temperature: float + tokens: PositiveInt + top_p: PositiveFloat class Settings(BaseModel): - """Nested data model to store all configuration settings.""" + """ + Pydantic settings model for the readme-ai package. + """ api: APISettings files: FileSettings @@ -119,62 +282,55 @@ class Settings(BaseModel): llm: ModelSettings md: MarkdownSettings - class Config: - """Pydantic configuration settings.""" - - validate_assignment = True + model_config = ConfigDict( + validate_assignment=True, + ) class ConfigLoader: - """Loads the configuration settings for the CLI.""" + """ + Loads the configuration settings for the readme-ai package. + """ def __init__( self, - config_file: Union[str, Path] = "config.toml", - sub_module: str = "settings", + config_file: str = "config.toml", + submodule: str = "settings", ) -> None: """Initialize ConfigLoader with the base configuration file.""" - self._logger = Logger(__name__) self.file_handler = FileHandler() self.config_file = config_file - self.sub_module = sub_module - self.config = self._base_config + self.submodule = submodule + self.config = self._load_config self.load_settings() @cached_property - def _base_config(self) -> Settings: + def _load_config(self) -> Settings: """Loads the base configuration file.""" file_path = get_resource_path( - file_path=self.config_file, sub_module=self.sub_module + file_path=self.config_file, + submodule=self.submodule, ) - config_dict = self.file_handler.read(file_path) - return Settings.parse_obj(config_dict) + config_dict = self.file_handler.read(str(file_path)) + return Settings.model_validate(config_dict) def load_settings(self) -> dict[str, dict]: - """Loads all configuration settings. - - - Loads the base configuration file from `settings/config.toml`. - - Loads any additional configuration files specified in the base settings - under the `files` key. - - Returns: - A dictionary containing all loaded configuration settings, where - the keys are the section names from `Settings` and the values - are their respective data dictionaries. """ - settings = self._base_config.dict() + Loads all configuration settings. + 1. Loads the base configuration file from 'settings/config.toml'. + 2. Loads all additional TOML files defined in 'FileSettings.' + """ + settings = self._load_config.model_dump() - for key, file_name in settings["files"].items(): - if not file_name.endswith(".toml"): + for key, file_path in settings["files"].items(): + if not file_path.endswith(".toml"): continue - file_path = get_resource_path( - file_path=file_name, - ) - data_dict = self.file_handler.read(file_path) - settings[key] = data_dict - setattr(self, key, data_dict) - self._logger.info( - f"Loaded configuration file: {self.sub_module}/{file_name}" - ) + + file_path = get_resource_path(file_path=file_path) + data_config = self.file_handler.read(file_path) + settings[key] = data_config + setattr(self, key, data_config) + + _logger.info(f"Config loaded: {self.submodule}/{file_path}") return settings diff --git a/readmeai/config/settings/commands.toml b/readmeai/config/settings/commands.toml index facafed1..4b81a74f 100644 --- a/readmeai/config/settings/commands.toml +++ b/readmeai/config/settings/commands.toml @@ -1,7 +1,7 @@ # Programming language install, run, and test commands.-vs-binding = { [quickstart_guide] -default = ["> INSERT-INSTALL-COMMANDS", "> INSERT-RUN-COMMANDS", "> INSERT-TEST-COMMANDS"] +default = ["โฏ INSERT-INSTALL-COMMANDS", "โฏ INSERT-RUN-COMMANDS", "โฏ INSERT-TEST-COMMANDS"] C = ["gcc -o myapp main.c", "./myapp", "/* No common unit test framework in C */"] CPP = ["g++ -o myapp main.cpp", "./myapp", "googletest"] CSharp = ["dotnet build", "dotnet run", "dotnet test"] diff --git a/readmeai/config/settings/config.toml b/readmeai/config/settings/config.toml index c3511b96..67dbe54a 100644 --- a/readmeai/config/settings/config.toml +++ b/readmeai/config/settings/config.toml @@ -1,17 +1,17 @@ # Default API Settings [api] -content = "You're a brilliant Tech Lead and Software Engineer with a passion for open-source projects." rate_limit = 10 +system_message = "You're a brilliant Tech Lead and Software Engineer with a passion for open-source projects." # File Resources [files] -blacklist = "blacklist.toml" +ignore_list = "ignore_list.toml" commands = "commands.toml" languages = "languages.toml" markdown = "markdown.toml" parsers = "parsers.toml" prompts = "prompts.toml" -shields_icons = "icons.json" +shieldsio_icons = "shieldsio_icons.json" skill_icons = "skill_icons.json" # Git Repository Settings @@ -21,91 +21,29 @@ repository = "https://github.com/eli64s/readme-ai" # Language Model API Settings [llm] api = "openai" -base_url = "https://api.openai.com/v1/chat/completions" -context_window = 4000 +base_url = "https://api.openai.com/v1/engines/" +context_window = 3999 encoder = "cl100k_base" +host_name = "https://api.openai.com/" +localhost = "http://localhost:11434/" model = "gpt-3.5-turbo" -temperature = 0.9 -tokens = 650 +path = "v1/chat/completions" +temperature = 0.0 +tokens = 699 top_p = 0.9 # Markdown Template Settings [md] -# Markdown Default Settings -alignment = "center" -emojis = false -image = "" -placeholder = "โ–บ INSERT-TEXT-HERE" -slogan = "" -tables = "" -width = "100" - -# Header Template -header = """\ -

    - project-logo -

    -

    -

    {repo_name}

    -

    -

    - {slogan} -

    -

    \n\t{shields_icons}

    -

    \n\t{badge_icons}

    -""" - # Badges -badge_color = "0080ff" -badge_style = "flat" -badge_icons = """\tDeveloped with the software and tools below.\n

    \n

    \n\t{badge_icons}""" -shields_icons = """ +badge_icons = """\tBuilt with the tools and technologies:\n

    \n

    \n\t{badge_icons}""" +shieldsio_icons = """ license \tlast-commit \trepo-top-language \trepo-language-count """ skill_icons = """\n\t\t\n\t""" - -# Table of Contents -quick_links = """ -## ๐Ÿ”— Quick Links - -> - [๐Ÿ“ Overview](#-overview) -> - [๐Ÿงฉ Features](#-features) -> - [๐Ÿ—‚๏ธ Repository Structure](#๏ธ-repository-structure) -> - [๐Ÿ“ฆ Modules](#-modules) -> - [๐Ÿš€ Getting Started](#-getting-started) -> - [โš™๏ธ Installation](#๏ธ-installation) -> - [๐Ÿค– Usage](#-usage) -> - [๐Ÿงช Tests](#-tests) -> - [๐Ÿ›  Project Roadmap](#-project-roadmap) -> - [๐Ÿค Contributing](#-contributing) -> - [๐ŸŽ— License](#-license) -> - [๐Ÿ”— Acknowledgments](#-acknowledgments) - ---- -""" - -toc = """
    -

    - Table of Contents
    - -- [๐Ÿ“ Overview](#-overview) -- [๐Ÿงฉ Features](#-features) -- [๐Ÿ—‚๏ธ Repository Structure](#๏ธ-repository-structure) -- [๐Ÿ“ฆ Modules](#-modules) -- [๐Ÿš€ Getting Started](#-getting-started) - - [โš™๏ธ Installation](#๏ธ-installation) - - [๐Ÿค– Usage](#-usage) - - [๐Ÿงช Tests](#-tests) -- [๐Ÿ›  Project Roadmap](#-project-roadmap) -- [๐Ÿค Contributing](#-contributing) -- [๐ŸŽ— License](#-license) -- [๐Ÿ”— Acknowledgments](#-acknowledgments) -
    -
    -""" +readmeai_badge = """readme-ai""" # Overview Template overview = """ @@ -118,17 +56,16 @@ overview = """ # Features Template features = """ -## ๐Ÿงฉ Features +## ๐Ÿ‘พ Features {0} --- """ -# Directory Structure Tree -tree_depth = 3 +# Directory Structure tree = """ -## ๐Ÿ—‚๏ธ Repository Structure +## ๐Ÿ“‚ Repository Structure ```sh {0} @@ -140,7 +77,7 @@ tree = """ # Codebase Summaries modules_widget = """
    {0}\n\n{1}\n\n
    \n""" modules = """ -## ๐Ÿ“ฆ Modules +## ๐Ÿงฉ Modules """ # Quickstart Template @@ -149,56 +86,55 @@ quickstart = """ ## ๐Ÿš€ Getting Started -**System Requirements:** +### ๐Ÿ”– Prerequisites -* {system_requirements} +{prerequisites} -### โš™๏ธ Installation +### ๐Ÿ“ฆ Installation -

    From source

    +Build the project from source: -> 1. Clone the {repo_name} repository: -> -> ```console -> $ git clone {repo_url} -> ``` -> -> 2. Change to the project directory: -> ```console -> $ cd {repo_name} -> ``` -> -> 3. Install the dependencies: -> ```console -> $ {install_command} -> ``` +1. Clone the {repo_name} repository: +```sh +โฏ git clone {repo_url} +``` + +2. Navigate to the project directory: +```sh +โฏ cd {repo_name} +``` + +3. Install the required dependencies: +```sh +โฏ {install_command} +``` ### ๐Ÿค– Usage -

    From source

    +To run the project, execute the following command: -> Run {repo_name} using the command below: -> ```console -> $ {run_command} -> ``` +```sh +โฏ {run_command} +``` ### ๐Ÿงช Tests -> Run the test suite using the command below: -> ```console -> $ {test_command} -> ``` +Execute the test suite using the following command: + +```sh +โฏ {test_command} +``` --- """ -# Project Roadmap Contributing, License, and Acknowledgments Sections +# Roadmap Contributing, License, and Acknowledgments contribute = """ -## ๐Ÿ›  Project Roadmap +## ๐Ÿ“Œ Project Roadmap -- [X] `โ–บ INSERT-TASK-1` -- [ ] `โ–บ INSERT-TASK-2` -- [ ] `โ–บ ...` +- [X] **`Task 1`**: Implement feature one. +- [ ] **`Task 2`**: Implement feature two. +- [ ] **`Task 3`**: Implement feature three. --- @@ -238,7 +174,7 @@ Contributions are welcome! Here are several ways you can contribute:
    Contributor Graph
    -

    +

    @@ -253,12 +189,10 @@ This project is protected under the [SELECT-A-LICENSE](https://choosealicense.co --- -## ๐Ÿ”— Acknowledgments +## ๐Ÿ™Œ Acknowledgments - List any resources, contributors, inspiration, etc. here. -[**Return**](#-overview) - --- """ @@ -269,7 +203,7 @@ contact = """

    -
    +
    eli64s GitHub space Ultralytics LinkedIn @@ -280,11 +214,45 @@ contact = """ """ contributor_graph = """ -

    +

    """ -custom_badge = """project-name""" +cli_docs = """ +#### Command-Line Interface + +The project supports the following command-line interface options: + +```sh +โฏ {cli_command} +``` + +--- +""" + +api_docs = """ +#### API Documentation + +The project API documentation is available at: [API Documentation]({api_url}). + +--- +""" + +test_framework = """ +### ๐Ÿงช Testing + +This project uses **`{test_framework}`** for testing. + +- **`{test_framework}`** + +Execute the test suite using the following command: + +```sh +โฏ {test_command} +``` + +--- +""" diff --git a/readmeai/config/settings/blacklist.toml b/readmeai/config/settings/ignore_list.toml similarity index 99% rename from readmeai/config/settings/blacklist.toml rename to readmeai/config/settings/ignore_list.toml index e08aeb79..dc91b4db 100644 --- a/readmeai/config/settings/blacklist.toml +++ b/readmeai/config/settings/ignore_list.toml @@ -1,6 +1,6 @@ # Directories, file extensions, and file names to be excluded from preprocessing. -[blacklist] +[ignore_list] directories = [ ".DS_Store", ".dvc", diff --git a/readmeai/config/settings/markdown.toml b/readmeai/config/settings/markdown.toml index bf221c2b..c8a02720 100644 --- a/readmeai/config/settings/markdown.toml +++ b/readmeai/config/settings/markdown.toml @@ -1,34 +1,24 @@ # Markdown template code blocks to construct a README.md file. [md] -# Markdown Default Settings -alignment = "center" -emojis = false -image = "" -placeholder = "โ–บ INSERT-TEXT-HERE" -slogan = "" -tables = "" - # Header Template header = """\ -

    - project-logo +

    + project-logo

    -

    -

    {repo_name}

    +

    +

    {repo_name}

    -

    +

    {slogan}

    -

    \n\t{shields_icons}

    -

    \n\t{badge_icons}

    +

    \n\t{shieldsio_icons}

    +

    \n\t{badge_icons}

    """ # Badges -badge_color = "0080ff" -badge_style = "flat" -badge_icons = """\tDeveloped with the software and tools below.\n

    \n

    \n\t{badge_icons}""" -shields_icons = """ +badge_icons = """\tDeveloped with the software and tools below.\n

    \n

    \n\t{badge_icons}""" +shieldsio_icons = """ license \tlast-commit \trepo-top-language @@ -36,46 +26,6 @@ shields_icons = """ """ skill_icons = """\n\t\t\n\t""" -# Table of Contents -quick_links = """ -## ๐Ÿ”— Quick Links - -> - [๐Ÿ“ Overview](#-overview) -> - [๐Ÿงฉ Features](#-features) -> - [๐Ÿ—‚๏ธ Repository Structure](#-repository-structure) -> - [๐Ÿ“ฆ Modules](#-modules) -> - [๐Ÿš€ Getting Started](#-getting-started) -> - [โš™๏ธ Installation](#๏ธ-installation) -> - [๐Ÿค– Usage](#-usage) -> - [๐Ÿงช Tests](#-tests) -> - [๐Ÿ›  Project Roadmap](#-project-roadmap) -> - [๐Ÿค Contributing](#-contributing) -> - [๐Ÿ“„ License](#-license) -> - [๐Ÿ‘ Acknowledgments](#-acknowledgments) - ---- -""" -toc = """\ - -

    - Table of Contents - -- [๐Ÿ“ Overview](#-overview) -- [๐Ÿงฉ Features](#-features) -- [๐Ÿ—‚๏ธ Repository Structure](#-repository-structure) -- [๐Ÿ“ฆ Modules](#-modules) -- [๐Ÿš€ Getting Started](#-getting-started) - - [โš™๏ธ Installation](#๏ธ-installation) - - [๐Ÿค– Usage](#-usage) - - [๐Ÿงช Tests](#-tests) -- [๐Ÿ›  Project Roadmap](#-project-roadmap) -- [๐Ÿค Contributing](#-contributing) -- [๐Ÿ“„ License](#-license) -- [๐Ÿ‘ Acknowledgments](#-acknowledgments) -
    -
    -""" - # Overview Template overview = """ ## ๐Ÿ“ Overview @@ -87,7 +37,7 @@ overview = """ # Features Template features = """ -## ๐Ÿงฉ Features +## ๐Ÿ‘พ Features {0} @@ -97,7 +47,7 @@ features = """ # Directory Structure Tree tree_depth = 3 tree = """ -## ๐Ÿ—‚๏ธ Repository Structure +## ๐Ÿ“‚ Repository Structure ```sh {0} @@ -109,7 +59,7 @@ tree = """ # Codebase Summaries modules_widget = """
    {0}\n\n{1}\n\n
    \n""" modules = """ -## ๐Ÿ“ฆ Modules +## ๐Ÿงฉ Modules """ # Quickstart Template @@ -118,52 +68,63 @@ quickstart = """ ## ๐Ÿš€ Getting Started -**System Requirements:** +### ๐Ÿ”– Prerequisites -* {system_requirements} +{requirements} -### โš™๏ธ Installation +### ๐Ÿ“ฆ Installation

    From source

    -> 1. Clone the {repo_name} repository: -> -> ```console -> $ git clone {repo_url} -> ``` -> -> 2. Change to the project directory: -> ```console -> $ cd {repo_name} -> ``` -> -> 3. Install the dependencies: -> ```console -> $ {install_command} -> ``` +1. Clone the {repo_name} repository: +```sh +โฏ git clone {repo_url} +``` + +2. Change to the project directory: +```sh +โฏ cd {repo_name} +``` + +3. Install the dependencies: +```sh +โฏ {install_command} +``` ### ๐Ÿค– Usage

    From source

    -> Run {repo_name} using the command below: -> ```console -> $ {run_command} -> ``` +Run {repo_name} using the command below: + +```sh +โฏ {run_command} +``` ### ๐Ÿงช Tests -> Run the test suite using the command below: -> ```console -> $ {test_command} -> ``` +Run the test suite using the command below: + +```sh +โฏ {test_command} +``` --- """ +usage = """\ +#### Using `{tool}` + +> [![{tool}]({tool_badge})]({tool_url}) +> +> ```console +> $ {run_command} +> ```\n +""" + # Project Roadmap Contributing, License, and Acknowledgments Sections contribute = """ -## ๐Ÿ›  Project Roadmap +## ๐Ÿ“Œ Project Roadmap - [X] `โ–บ INSERT-TASK-1` - [ ] `โ–บ INSERT-TASK-2` @@ -207,7 +168,7 @@ Contributions are welcome! Here are several ways you can contribute:
    Contributor Graph
    -

    +

    @@ -216,13 +177,13 @@ Contributions are welcome! Here are several ways you can contribute: --- -## ๐Ÿ“„ License +## ๐ŸŽ— License This project is protected under the [SELECT-A-LICENSE](https://choosealicense.com/licenses) License. For more details, refer to the [LICENSE](https://choosealicense.com/licenses/) file. --- -## ๐Ÿ‘ Acknowledgments +## ๐Ÿ™Œ Acknowledgments - List any resources, contributors, inspiration, etc. here. @@ -238,7 +199,7 @@ contact = """

    -
    +
    eli64s GitHub space Ultralytics LinkedIn @@ -249,7 +210,7 @@ contact = """ """ contributor_graph = """ -

    +

    @@ -257,3 +218,7 @@ contributor_graph = """ """ custom_badge = """project-name""" + +slogan = "" + +tables = "" diff --git a/readmeai/config/settings/prompts.toml b/readmeai/config/settings/prompts.toml index 29c62f84..fb01c4f5 100644 --- a/readmeai/config/settings/prompts.toml +++ b/readmeai/config/settings/prompts.toml @@ -38,18 +38,29 @@ each response with a verb or a noun to make the summary more engaging and impact - Do not include quotes, code snippets, or bullet points in your response. \ - Your response should be a maximum of 50 words. """ +logo = """ +Create a **simple, cute mascot icon** in a **minimalist style** specifically designed as a standalone iPhone app icon. The icon should have a **singular main centerpiece** that defines the entire shape of the icon, without any external borders or background. -logo = """Design a square app logo for the software project - "{project_name}". \ -The logo should feature the project name in bold and easily readable letters. \ -The color scheme should be engaging and suitable for a technology-focused app, \ -with a white background behind the stylized app logo square with rounded corners. \ -While designing the logo, please reference the following codebase details: \n --------------------------------------------------------------------------------- -Repository Details: -Project Name: {project_name} -Project Overview: {project_overview} -Project Catch Phrase/Slogan: {project_slogan} --------------------------------------------------------------------------------- +**Characteristics:** +1. Use **thick, rounded outlines** to define the mascot's shape. +2. Employ **basic geometric shapes** for facial features (e.g., dots for eyes, simple shapes for nose/mouth). +3. Ensure a **chunky, squat body proportion** for a cute appearance. +4. Incorporate **distinctive features** of the mascot animal/character/hero in a **simplified form**. +5. Maintain a **friendly, approachable expression**. +6. Fill the **square app-icon canvas** with the mascot, centered and occupying most of the space. +7. The mascot's shape itself should form the icon boundaryโ€”**no additional borders or background elements**. +8. The design should be **visually complete and balanced** without relying on a background. + +**Mascot Concept:** +- Design a [insert specific animal or character relevant to your project, e.g., 'playful robot', 'curious owl', 'friendly octopus']. +- The aesthetic should be **clean, adorable, and instantly recognizable** even at small sizes. + +**Project Context:** +- **Project Name:** {project_name} +- **Project Overview:** {project_overview} +- **Project Slogan:** {project_slogan} + +The mascot should embody the spirit of your project, capturing its essence and purpose. The final design must function as a standalone icon, with the mascot's shape defining the boundaries and serving as the singular main centerpiece. """ overview = """Analyze the codebase, {0}, and provide a robust, yet succinct overview of the software \ @@ -76,24 +87,3 @@ Other Requirements: \n - Your response should be a maximum of 8 words. - The slogan should be concise and memorable. """ - -mermaid = """Create a visual representation of the software project '{0}' using a flowchart diagram. \ -The diagram should clearly illustrate the main components and the flow of data or control between them, \ -representing the codebase's architecture and execution path. The output should be formatted as a Markdown code block with Mermaid syntax. \ -Below is a template for the Mermaid flowchart that you can customize based on the project's specifics: \n --------------------------------------------------------------------------------- - -```mermaid -flowchart LR - -A[Hard] -->|Text| B(Round) -B --> C{Decision} -C -->|One| D[Result 1] -C -->|Two| E[Result 2] -``` - --------------------------------------------------------------------------------- -While generating the diagram, please reference the following codebase details: -File Summaries: {1} --------------------------------------------------------------------------------- -""" diff --git a/readmeai/config/settings/quickstart.toml b/readmeai/config/settings/quickstart.toml new file mode 100644 index 00000000..08be6edd --- /dev/null +++ b/readmeai/config/settings/quickstart.toml @@ -0,0 +1,487 @@ +[default] +tool = "Default" +install = "echo 'No specific installation instructions available'" +run = "echo 'No specific run instructions available'" +test = "echo 'No specific test instructions available'" +shield = "https://img.shields.io/badge/Tool-Generic-lightgrey?style={badge_style}" +website = "https://example.com" + +[bash] +tool = "Bash" +install = "./scripts/install.sh" +run = "./scripts/run.sh" +test = "./scripts/test.sh" +shield = "https://img.shields.io/badge/Shell_Script-121011.svg?style={badge_style}&logo=gnu-bash&logoColor=white" +website = "https://www.gnu.org/software/bash/" + +[dockerfile] +tool = "Docker" +install = "docker build -t {project_name} ." +run = "docker run -it {project_name}" +test = "docker exec -it {project_name} pytest" +shield = "https://img.shields.io/badge/Docker-2CA5E0.svg?style={badge_style}&logo=docker&logoColor=white" +website = "https://www.docker.com/" + +[docker_compose_yml] +tool = "Docker Compose" +install = "docker-compose build" +run = "docker-compose up" +test = "docker-compose run --rm {service} pytest" +shield = "https://img.shields.io/badge/Docker_Compose-2CA5E0.svg?style={badge_style}&logo=docker&logoColor=white" +website = "https://docs.docker.com/compose/" + +[docker_compose_yaml] +tool = "Docker Compose" +install = "docker-compose build" +run = "docker-compose up" +test = "docker-compose run --rm {service} pytest" +shield = "https://img.shields.io/badge/Docker_Compose-2CA5E0.svg?style={badge_style}&logo=docker&logoColor=white" +website = "https://docs.docker.com/compose/" + +[makefile] +tool = "Make" +install = "make install" +run = "make run" +test = "make test" +shield = "https://img.shields.io/badge/GNU_Make-A8B9CC.svg?style={badge_style}&logo=gnu-make&logoColor=white" +website = "https://www.gnu.org/software/make/" + +[poetry_lock] +tool = "Poetry" +install = "poetry install" +run = "poetry run python {entrypoint}" +test = "poetry run pytest" +shield = "https://img.shields.io/badge/Poetry-3B5526.svg?style={badge_style}&logo=poetry&logoColor=white" +website = "https://python-poetry.org/" + +[pipfile_lock] +tool = "Pipenv" +install = "pipenv install" +run = "pipenv shell && pipenv run python {entrypoint}" +test = "pipenv shell && pipenv run pytest" +shield = "https://img.shields.io/badge/Pipenv-3775A9.svg?style={badge_style}&logo=pypi&logoColor=white" +website = "https://pipenv.pypa.io/" + +[requirements_txt] +tool = "Pip" +install = "pip install -r requirements.txt" +run = "python {entrypoint}" +test = "pytest" +shield = "https://img.shields.io/badge/pip-3775A9.svg?style={badge_style}&logo=pypi&logoColor=white" +website = "https://pip.pypa.io/" + +[environment_yaml] +tool = "Conda" +install = "conda env create -f environment.yaml" +run = "conda activate && python {entrypoint}" +test = "conda activate && pytest" +shield = "https://img.shields.io/badge/conda-342B029.svg?style={badge_style}&logo=anaconda&logoColor=white" +website = "https://docs.conda.io/" + +[environment_yml] +tool = "Conda" +install = "conda env create -f environment.yaml" +run = "conda activate && python {entrypoint}" +test = "conda activate && pytest" +shield = "https://img.shields.io/badge/conda-342B029.svg?style={badge_style}&logo=anaconda&logoColor=white" +website = "https://docs.conda.io/" + +[pyproject_toml_flit] +tool = "Flit" +install = "flit install" +run = "python -m {entrypoint}" +test = "python -m pytest" +shield = "https://img.shields.io/badge/Flit-3B5526.svg?style={badge_style}&logo=flit&logoColor=white" +website = "https://flit.readthedocs.io/" + +[pyproject_toml_pdm] +tool = "PDM" +install = "pdm install" +run = "pdm run python {entrypoint}" +test = "pdm run pytest" +shield = "https://img.shields.io/badge/PDM-AC75D7.svg?style={badge_style}&logo=PDM&logoColor=white" +website = "https://pdm.fming.dev/" + +[package_json] +tool = "Npm" +install = "npm install" +run = "npm start" +test = "npm test" +shield = "https://img.shields.io/badge/npm-CB3837.svg?style={badge_style}&logo=npm&logoColor=white" +website = "https://www.npmjs.com/" + +[package-lock_json] +tool = "npm" +install = "npm ci" +run = "npm start" +test = "npm test" +shield = "https://img.shields.io/badge/npm-CB3837.svg?style={badge_style}&logo=npm&logoColor=white" +website = "https://www.npmjs.com/" + +[yarn_lock] +tool = "Yarn" +install = "yarn install" +run = "yarn start" +test = "yarn test" +shield = "https://img.shields.io/badge/yarn-2C8EBB.svg?style={badge_style}&logo=yarn&logoColor=white" +website = "https://yarnpkg.com/" + +[gemfile_lock] +tool = "Bundler" +install = "bundle install" +run = "bundle exec ruby {entrypoint}" +test = "bundle exec rspec" +shield = "https://img.shields.io/badge/Ruby-CC342D.svg?style={badge_style}&logo=ruby&logoColor=white" +website = "https://bundler.io/" + +[pom_xml] +tool = "Maven" +install = "mvn clean install" +run = "mvn exec:java -Dexec.mainClass=\"{entrypoint}\"" +test = "mvn test" +shield = "https://img.shields.io/badge/Apache_Maven-C71A36.svg?style={badge_style}&logo=apache-maven&logoColor=white" +website = "https://maven.apache.org/" + +[maven_pom] +tool = "Maven" +install = "mvn install" +run = "mvn spring-boot:run" # If it's a Spring Boot app, adjust accordingly +test = "mvn test" +shield = "https://img.shields.io/badge/Maven-DC5226.svg?style={badge_style}&logo=apache-maven&logoColor=white" +website = "https://maven.apache.org/" + +[gradle_build] +tool = "Gradle" +install = "./gradlew install" +run = "./gradlew run" +test = "./gradlew test" +shield = "https://img.shields.io/badge/Gradle-02303A.svg?style={badge_style}&logo=gradle&logoColor=white" +website = "https://gradle.org/" + +[cargo_toml] +tool = "Cargo" +install = "cargo build" +run = "cargo run" +test = "cargo test" +shield = "https://img.shields.io/badge/Rust-000000.svg?style={badge_style}&logo=rust&logoColor=white" +website = "https://www.rust-lang.org/" + +[cargo_lock] +tool = "Cargo" +install = "cargo build" +run = "cargo run" +test = "cargo test" +shield = "https://img.shields.io/badge/Cargo-000000.svg?style={badge_style}&logo=rust&logoColor=white" +website = "https://doc.rust-lang.org/cargo/" + +[rust-toolchain_toml] +tool = "Rust" +install = "rustup toolchain install" +run = "cargo run" +test = "cargo test" +shield = "https://img.shields.io/badge/Rust-000000.svg?style={badge_style}&logo=rust&logoColor=white" +website = "https://www.rust-lang.org/" + +[go_mod] +tool = "Go Modules" +install = "go get ./..." +run = "go run {entrypoint}" +test = "go test ./..." +shield = "https://img.shields.io/badge/Go-00ADD8.svg?style={badge_style}&logo=go&logoColor=white" +website = "https://golang.org/" + +[go_sum] +tool = "Go Modules" +install = "go get ./..." +run = "go run {entrypoint}" +test = "go test ./..." +shield = "https://img.shields.io/badge/Go-00ADD8.svg?style={badge_style}&logo=go&logoColor=white" +website = "https://golang.org/" + +[gopkg_lock] +tool = "Go Modules" +install = "go get ./..." +run = "go run {entrypoint}" +test = "go test ./..." +shield = "https://img.shields.io/badge/Go-00ADD8.svg?style={badge_style}&logo=go&logoColor=white" +website = "https://golang.org/" + +[tsconfig_json] +tool = "TypeScript" +install = "npm install" +run = "ts-node {entrypoint}" +test = "npm test" +shield = "https://img.shields.io/badge/TypeScript-007ACC.svg?style={badge_style}&logo=typescript&logoColor=white" +website = "https://www.typescriptlang.org/" + +[deno_json] +tool = "Deno" +install = "deno cache {entrypoint}" +run = "deno run {entrypoint}" +test = "deno test" +shield = "https://img.shields.io/badge/Deno-000000.svg?style={badge_style}&logo=deno&logoColor=white" +website = "https://deno.land/" + +[streamlit] +tool = "Streamlit" +install = "pip install -r requirements.txt" +run = "streamlit run {entrypoint}" +test = "pytest" +shield = "https://img.shields.io/badge/Streamlit-FF4B4B.svg?style={badge_style}&logo=streamlit&logoColor=white" +website = "https://streamlit.io/" + +[sbt_build] +tool = "sbt" +install = "sbt compile" +run = "sbt run" +test = "sbt test" +shield = "https://img.shields.io/badge/sbt-DC322F.svg?style={badge_style}&logo=scala&logoColor=white" +website = "https://www.scala-sbt.org/" + +[build_sbt] +tool = "sbt" +install = "sbt compile" +run = "sbt run" +test = "sbt test" +shield = "https://img.shields.io/badge/sbt-DC322F.svg?style={badge_style}&logo=scala&logoColor=white" +website = "https://www.scala-sbt.org/" + +[mix_exs] +tool = "Mix" +install = "mix deps.get" +run = "mix run" +test = "mix test" +shield = "https://img.shields.io/badge/Elixir-4B275F.svg?style={badge_style}&logo=elixir&logoColor=white" +website = "https://elixir-lang.org/" + +[mix_lock] +tool = "Mix" +install = "mix deps.get" +run = "mix run" +test = "mix test" +shield = "https://img.shields.io/badge/Mix-6C1F87.svg?style={badge_style}&logo=elixir&logoColor=white" +website = "https://elixir-lang.org/getting-started/mix-otp/introduction-to-mix.html" + +[rebar_config] +tool = "Rebar3" +install = "rebar3 compile" +run = "rebar3 shell" +test = "rebar3 eunit" +shield = "https://img.shields.io/badge/Erlang-A90533.svg?style={badge_style}&logo=erlang&logoColor=white" +website = "https://www.rebar3.org/" + +[stack_yaml] +tool = "Stack" +install = "stack build" +run = "stack exec" +test = "stack test" +shield = "https://img.shields.io/badge/Haskell-5D4F85.svg?style={badge_style}&logo=haskell&logoColor=white" +website = "https://docs.haskellstack.org/" + +[gatsby-config_js] +tool = "Gatsby" +install = "npm install" +run = "gatsby develop" +test = "npm test" +shield = "https://img.shields.io/badge/Gatsby-663399.svg?style={badge_style}&logo=gatsby&logoColor=white" +website = "https://www.gatsbyjs.com/" + +[next_config_js] +tool = "Next.js" +install = "npm install" +run = "npm run dev" +test = "npm test" +shield = "https://img.shields.io/badge/Next.js-000000.svg?style={badge_style}&logo=next.js&logoColor=white" +website = "https://nextjs.org/" + +[nuxt_config_js] +tool = "Nuxt.js" +install = "npm install" +run = "npm run dev" +test = "npm test" +shield = "https://img.shields.io/badge/Nuxt.js-00C58E.svg?style={badge_style}&logo=nuxt.js&logoColor=white" +website = "https://nuxtjs.org/" + +[flake_nix] +tool = "Nix" +install = "nix-shell --run 'pip install -r requirements.txt'" +run = "nix-shell --run 'python {entrypoint}'" +test = "nix-shell --run 'pytest'" +shield = "https://img.shields.io/badge/Nix-5277C3.svg?style={badge_style}&logo=nixos&logoColor=white" +website = "https://nixos.org/" + +[composer_json] +tool = "Composer" +install = "composer install" +run = "php {entrypoint}" +test = "composer test" +shield = "https://img.shields.io/badge/Composer-885630.svg?style={badge_style}&logo=composer&logoColor=white" +website = "https://getcomposer.org/" + +[composer_lock] +tool = "Composer" +install = "composer install" +run = "php {entrypoint}" +test = "composer test" +shield = "https://img.shields.io/badge/Composer-885630.svg?style={badge_style}&logo=composer&logoColor=white" +website = "https://getcomposer.org/" + +[rubocop_yml] +tool = "RuboCop" +install = "gem install rubocop" +run = "rubocop" +test = "rubocop" +shield = "https://img.shields.io/badge/RuboCop-000000.svg?style={badge_style}&logo=rubygems&logoColor=white" +website = "https://rubocop.org/" + +[eslintrc_js] +tool = "ESLint" +install = "npm install eslint" +run = "eslint ." +test = "eslint ." +shield = "https://img.shields.io/badge/ESLint-4B32C3.svg?style={badge_style}&logo=eslint&logoColor=white" +website = "https://eslint.org/" + +[prettierrc_js] +tool = "Prettier" +install = "npm install prettier" +run = "prettier --write ." +test = "prettier --check ." +shield = "https://img.shields.io/badge/Prettier-F7B93E.svg?style={badge_style}&logo=prettier&logoColor=white" +website = "https://prettier.io/" + +[angular_json] +tool = "Angular" +install = "npm install" +run = "ng serve" +test = "ng test" +shield = "https://img.shields.io/badge/Angular-0F0F11.svg?style={badge_style}&logo=Angular&logoColor=white" +website = "https://angular.io/" + +[vue_config_js] +tool = "Vue.js" +install = "npm install" +run = "npm run serve" +test = "npm test" +shield = "https://img.shields.io/badge/Vue.js-4FC08D.svg?style={badge_style}&logo=vue.js&logoColor=white" +website = "https://vuejs.org/" + +[svelte_config_js] +tool = "Svelte" +install = "npm install" +run = "npm run dev" +test = "npm test" +shield = "https://img.shields.io/badge/Svelte-FF3E00.svg?style={badge_style}&logo=Svelte&logoColor=white" +website = "https://svelte.dev/" + +[pubspec_yaml] +tool = "Flutter" +install = "flutter pub get" +run = "flutter run" +test = "flutter test" +shield = "https://img.shields.io/badge/Flutter-02569B.svg?style={badge_style}&logo=flutter&logoColor=white" +website = "https://flutter.dev/" + +[project_clj] +tool = "Leiningen" +install = "lein deps" +run = "lein run" +test = "lein test" +shield = "https://img.shields.io/badge/Leiningen-2E2E2E.svg?style={badge_style}&logo=clojure&logoColor=white" +website = "https://leiningen.org/" + +[cake_build] +tool = "Cake" +install = "dotnet cake" +run = "dotnet cake" +test = "dotnet cake --target=Test" +shield = "https://img.shields.io/badge/Cake-A41D1D.svg?style={badge_style}&logo=cakebuild&logoColor=white" +website = "https://cakebuild.net/" + +[project_json] +tool = ".NET CLI" +install = "dotnet restore" +run = "dotnet run --project {entrypoint}" +test = "dotnet test" +shield = "https://img.shields.io/badge/.NET-512BD4.svg?style={badge_style}&logo=dotnet&logoColor=white" +website = "https://dotnet.microsoft.com/" + +[swift_package] +tool = "Swift Package Manager" +install = "swift build" +run = "swift run" +test = "swift test" +shield = "https://img.shields.io/badge/Swift-F05138.svg?style={badge_style}&logo=Swift&logoColor=white" +website = "https://swift.org/package-manager/" + +[xcodebuild] +tool = "Xcode" +install = "xcodebuild" +run = "xcodebuild -scheme {scheme} build" +test = "xcodebuild test" +shield = "https://img.shields.io/badge/Xcode-147EFB.svg?style={badge_style}&logo=xcode&logoColor=white" +website = "https://developer.apple.com/xcode/" + +[build_xml] +tool = "Ant" +install = "ant compile" +run = "ant run" +test = "ant test" +shield = "https://img.shields.io/badge/Ant-A81C7D.svg?style={badge_style}&logo=apache-ant&logoColor=white" +website = "https://ant.apache.org/" + +[junit_config] +tool = "JUnit" +install = "javac *.java" +run = "java {entrypoint}" +test = "java org.junit.runner.JUnitCore {entrypoint}" +shield = "https://img.shields.io/badge/JUnit-25A162.svg?style={badge_style}&logo=junit5&logoColor=white" +website = "https://junit.org/junit5/" + +[gradle_kts] +tool = "Gradle" +install = "./gradlew build" +run = "./gradlew run" +test = "./gradlew test" +shield = "https://img.shields.io/badge/Gradle-02303A.svg?style={badge_style}&logo=gradle&logoColor=white" +website = "https://gradle.org/" + +[gradle_properties] +tool = "Gradle" +install = "./gradlew build" +run = "./gradlew run" +test = "./gradlew test" +shield = "https://img.shields.io/badge/Gradle-02303A.svg?style={badge_style}&logo=gradle&logoColor=white" +website = "https://gradle.org/" + +[gradle_kotlin_dsl] +tool = "Gradle" +install = "./gradlew build" +run = "./gradlew run" +test = "./gradlew test" +shield = "https://img.shields.io/badge/Gradle-02303A.svg?style={badge_style}&logo=gradle&logoColor=white" +website = "https://gradle.org/" + +[conanfile_txt] +tool = "Conan" +install = "conan install ." +run = "conan build ." +test = "conan test ." +shield = "https://img.shields.io/badge/Conan-3F9FFF.svg?style={badge_style}&logo=conan&logoColor=white" +website = "https://conan.io/" + +[bazel] +tool = "Bazel" +install = "bazel build //:dependencies" +run = "bazel run //:app" +test = "bazel test //..." +shield = "https://img.shields.io/badge/Bazel-66B831.svg?style={badge_style}&logo=bazel&logoColor=white" +website = "https://bazel.build/" + +[meson_build] +tool = "Meson" +install = "meson setup builddir" +run = "meson compile -C builddir" +test = "meson test -C builddir" +shield = "https://img.shields.io/badge/Meson-2E2E2E.svg?style={badge_style}&logo=meson&logoColor=white" +website = "https://mesonbuild.com/" diff --git a/readmeai/config/validators.py b/readmeai/config/validators.py deleted file mode 100644 index b149b105..00000000 --- a/readmeai/config/validators.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -Pydantic validators for the CLI inputs and package settings. -""" - -import re -from pathlib import Path -from typing import Optional, Union -from urllib.parse import urlparse, urlsplit - -from readmeai._exceptions import GitValidationError -from readmeai.core.logger import Logger -from readmeai.services.git import GitHost - -_logger = Logger(__name__) - - -class GitValidator: - """Validator class for GitSettings.""" - - @classmethod - def validate_repository(cls, value: Union[str, Path]) -> Union[str, Path]: - """Validate the repository URL or path.""" - value_str = str(value) - if ( - any(service.value in value_str for service in GitHost) is False - and not Path(value_str).is_dir() - ): - raise GitValidationError(value) - - if isinstance(value, str): - path = Path(value) - if path.is_dir(): - return value - try: - parsed_url = urlparse(value) - if parsed_url.scheme in ["http", "https"] and any( - service in parsed_url.netloc for service in GitHost - ): - return value - - except Exception as exc: - _logger.error(f"Error validating repository: {exc}") - raise GitValidationError(value) from exc - - elif isinstance(value, Path) and value.is_dir(): - return value - - raise GitValidationError(value) - - @classmethod - def validate_full_name(cls, value: Optional[str], values: dict) -> str: - """Validator for getting the full name of the repository.""" - url_or_path = values.get("repository") - - path = ( - url_or_path if isinstance(url_or_path, Path) else Path(url_or_path) - ) - if path.exists(): - return str(path.name) - - patterns = { - GitHost.GITHUB: r"https?://github.com/([^/]+)/([^/]+)", - GitHost.GITLAB: r"https?://gitlab.com/([^/]+)/([^/]+)", - GitHost.BITBUCKET: r"https?://bitbucket.org/([^/]+)/([^/]+)", - } - - for _, pattern in patterns.items(): - match = re.match(pattern, url_or_path) - if match: - user_name, repo_name = match.groups() - return f"{user_name}/{repo_name}" - - raise GitValidationError(url_or_path) - - @classmethod - def set_host_domain(cls, value: Optional[str], values: dict) -> str: - """Sets the Git service host from the repository provided.""" - repo = values.get("repository") - if isinstance(repo, Path) or ( - isinstance(repo, str) and Path(repo).is_dir() - ): - return GitHost.LOCAL - - parsed_url = urlparse(str(repo)) - for service in GitHost: - if service in parsed_url.netloc: - return service - - return GitHost.LOCAL - - @classmethod - def set_host(cls, value: Optional[str], values: dict) -> str: - """Set the host based on the repository URL.""" - repo = values.get("repository") - if isinstance(repo, Path) or ( - isinstance(repo, str) and Path(repo).is_dir() - ): - return GitHost.LOCAL.value.lower() - parsed_url = urlsplit(repo) - return parsed_url.netloc.split(".")[0] - - @classmethod - def set_name(cls, value: Optional[str], values: dict) -> str: - """Sets the repository name from the repository provided.""" - repo = values.get("repository") - if isinstance(repo, Path): - return repo.name - elif isinstance(repo, str): - parsed_url = urlsplit(repo) - name = parsed_url.path.split("/")[-1] - return name.removesuffix(".git") - return "n/a" diff --git a/readmeai/core/logger.py b/readmeai/core/logger.py index a3375177..3d60f8c3 100644 --- a/readmeai/core/logger.py +++ b/readmeai/core/logger.py @@ -4,7 +4,7 @@ import logging import sys -from typing import Dict +import typing LOG_LEVEL_EMOJIS = { "DEBUG": "โš™๏ธŽ", @@ -17,7 +17,6 @@ LOG_LEVEL_COLORS = { "DEBUG": "\033[34m", "INFO": "\033[35m", - # "INFO": "\033[32m, "WARNING": "\033[33m", "ERROR": "\033[31m", "CRITICAL": "\033[31m\033[1m", @@ -28,7 +27,9 @@ class CustomFormatter(logging.Formatter): - """Custom log formatter with color and emoji support.""" + """ + Custom logging formatter with color and emoji support. + """ def format(self, record) -> str: """Format the log record.""" @@ -39,9 +40,11 @@ def format(self, record) -> str: class Logger: - """Custom logger implementation.""" + """ + Custom logger class for the readme-ai package. + """ - _instances: Dict[str, "Logger"] = {} + _instances: typing.ClassVar[dict[str, "Logger"]] = {} def __new__(cls, name, level="DEBUG"): """Creates a new logger instance.""" diff --git a/readmeai/core/models.py b/readmeai/core/models.py index 62f11690..5710eade 100644 --- a/readmeai/core/models.py +++ b/readmeai/core/models.py @@ -4,13 +4,13 @@ import asyncio from abc import ABC, abstractmethod +from collections.abc import AsyncGenerator, Generator from contextlib import asynccontextmanager -from typing import Any, Dict, Generator, List, Tuple, Union +from typing import Any import aiohttp -from readmeai.cli.options import ModelOptions as llms -from readmeai.config.settings import ConfigLoader +from readmeai.config.settings import ConfigLoader, ModelOptions from readmeai.core.logger import Logger from readmeai.models.prompts import ( get_prompt_context, @@ -21,44 +21,54 @@ class BaseModelHandler(ABC): - """Abstract base class for Large Language Model (LLM) API handlers.""" + """ + Abstract base class for Large Language Model (LLM) API handlers. + """ def __init__(self, config_loader: ConfigLoader) -> None: - """Initializes the LLM handler with configuration and logging.""" + """ + Initializes the LLM handler with configuration and logging. + """ self._logger = Logger(__name__) self._session: aiohttp.ClientSession = None self.config = config_loader.config self.prompts = config_loader.prompts - self.sys_content = self.config.api.content + self.system_message = self.config.api.system_message self.rate_limit = self.config.api.rate_limit self.rate_limit_semaphore = asyncio.Semaphore(self.rate_limit) @abstractmethod - async def _model_settings(self): - """Initializes the LLM settings for the specific API implementation.""" + async def _model_settings(self) -> None: + """ + Initializes the LLM settings for the specific API implementation. + """ ... @abstractmethod - async def _build_payload(self, prompt: str, tokens: int) -> Dict[str, Any]: - """Builds the payload for the POST request to the LLM API.""" + async def _build_payload(self, prompt: str, tokens: int) -> dict[str, Any]: + """ + Builds the payload for the POST request to the LLM API. + """ ... @abstractmethod async def _make_request( self, - index: str, - prompt: str, - tokens: int, - raw_files: List[Tuple[str, str]] = None, - ) -> Tuple[str, str]: - """Handles LLM API response and returns the generated text.""" + index: str | None, + prompt: str | None, + tokens: int | None, + raw_files: list[tuple[str, str]] | None, + ) -> list[tuple[str, str]]: + """ + Handles LLM API response and returns the generated text. + """ ... @asynccontextmanager - async def use_api(self) -> Generator: + async def use_api(self) -> AsyncGenerator[Any, None]: """Async context manager for managing the lifecycle of the HTTP client.""" async with aiohttp.ClientSession( - timeout=aiohttp.ClientTimeout(total=None) + timeout=aiohttp.ClientTimeout(total=None), ) as session: self._session = session try: @@ -70,63 +80,52 @@ async def close(self) -> None: """Closes the HTTP client session.""" if self._session: await self._session.close() - self._session = None + self._logger.debug("HTTP client closed.") else: self._logger.debug("HTTP client already closed.") async def batch_request( self, - dependencies: List[str], - raw_files: List[Tuple[str, str]], - ) -> List[str]: - """Generates a batch of prompts and processes the responses. - - Parameters - ---------- - dependencies - List of dependencies for the project. - raw_files - List of tuples containing all file paths and file contents. - - Returns - ------- - List of generated text responses from the LLM API. - """ - if self.config.llm.api == llms.OFFLINE.name: + dependencies: list[str], + raw_files: list[tuple[str, str]], + ) -> list[tuple[str, str]]: + """ + Generates a batch of prompts and processes the responses. + """ + if self.config.llm.api == ModelOptions.OFFLINE.name: return await self._make_request( - index=None, prompt=None, tokens=None, raw_files=raw_files + index=None, + prompt=None, + tokens=None, + raw_files=raw_files, ) else: raw_files = [ file for file in raw_files if not file[0].endswith(".lock") ] summaries_prompts = await set_summary_context( - self.config, dependencies, raw_files + self.config, + dependencies, + raw_files, ) summaries_responses = await self._batch_prompts(summaries_prompts) additional_prompts = await set_additional_contexts( - self.config, dependencies, summaries_responses + self.config, + dependencies, + summaries_responses, ) additional_responses = await self._batch_prompts(additional_prompts) return summaries_responses + additional_responses async def _batch_prompts( - self, prompts: List[Union[str, Tuple[str, str]]], batch_size: int = 10 - ) -> List[str]: - """Processes a batch of prompts and returns the generated text. - - Parameters - ---------- - prompts - List of prompts to be processed. - batch_size, optional - Max number of prompts to process in a single batch. Defaults to 10. - - Returns - ------- - List of generated text responses from the LLM API. + self, + prompts: list[str | tuple[str, str]], + batch_size: int = 10, + ) -> list[tuple[str, str]]: + """ + Processes a batch of prompts and returns the generated text. """ responses = [] @@ -140,75 +139,64 @@ async def _batch_prompts( return responses def _generate_batches( - self, items: List[Any], batch_size: int - ) -> Generator[List[Any], None, None]: - """Generates batches of items to be processed. - - Parameters - ---------- - items - List of items to be processed. - batch_size - Maximum number of items to process in a single batch. - - Returns - ------- - List of batches of items to be processed. - - Yields - ------ - List of items to be processed. + self, + items: list[Any], + batch_size: int, + ) -> Generator[list[Any], None, None]: + """ + Generates batches of items to be processed. """ for i in range(0, len(items), batch_size): yield items[i : i + batch_size] - async def _process_batch(self, prompt: Dict[str, Any]) -> str: - """Processes a single prompt and returns the generated text. - - Parameters - ---------- - prompt - Prompt to be processed by the LLM API. - - Returns - ------- - Generated text response from the LLM API. + async def _process_batch(self, prompt: dict[str, Any]) -> Any: + """ + Processes a single prompt and returns the generated text. """ if prompt["type"] == "file_summary": return await self._make_request_code_summary(prompt["context"]) else: formatted_prompt = get_prompt_context( - self.prompts, prompt["type"], prompt["context"] + self.prompts, + prompt["type"], + prompt["context"], + ) + tokens = update_max_tokens( + self.config.llm.tokens, + formatted_prompt, ) - tokens = update_max_tokens(self.tokens, formatted_prompt) _, summary = await self._make_request( - prompt["type"], formatted_prompt, tokens + prompt["type"], + formatted_prompt, + tokens, + None, ) return summary async def _make_request_code_summary( - self, file_context: List[Tuple[str, str]] - ) -> List[Tuple[str, str]]: - """Generates code summaries for each file in the project. - - Parameters - ---------- - file_context - List of tuples containing all file paths and file contents. - - Returns - ------- - List of generated code summaries for each file in the project. + self, + file_context: list[tuple[str, str]], + ) -> list[tuple[str, str]]: + """ + Generates code summaries for each file in the project. """ summary_text = [] for file_path, file_content in file_context["file_summary"]: prompt = self.prompts["prompts"]["file_summary"].format( - self.config.md.tree, file_path, file_content + self.config.md.tree, + file_path, + file_content, + ) + tokens = update_max_tokens( + self.config.llm.tokens, + prompt, ) - tokens = update_max_tokens(self.tokens, prompt) _, summary_or_error = await self._make_request( - file_path, prompt, tokens + file_path, + prompt, + tokens, + None, ) summary_text.append((file_path, summary_or_error)) diff --git a/readmeai/core/parsers.py b/readmeai/core/parsers.py index 74f48ba0..3e891b6f 100644 --- a/readmeai/core/parsers.py +++ b/readmeai/core/parsers.py @@ -1,28 +1,30 @@ -"""Abstract base class for dependency file parsers.""" +""" +Abstract base class for dependency file parsers. +""" from abc import ABC, abstractmethod -from typing import List from readmeai.core.logger import Logger class BaseFileParser(ABC): - """Abstract base class for dependency file parsers.""" + """ + Abstract base class for dependency file parsers. + """ def __init__(self) -> None: - """Initializes the handler with given configuration.""" self._logger = Logger(__name__) @abstractmethod - def parse(self, content: str) -> List[str]: - """Parses content of dependency file and returns list of dependencies.""" + def parse(self, content: str) -> list[str]: + """Parses content of file and return list of dependencies.""" ... def log_error(self, message: str): """Logs error message when parsing fails.""" self._logger.error(f"Error parsing dependency file {message}") - def handle_parsing_error(self, error: Exception) -> List[str]: + def handle_parsing_error(self, error: Exception) -> list[str]: """Standardized error handling for parsing exceptions.""" self.log_error(str(error)) return [] diff --git a/readmeai/core/preprocess.py b/readmeai/core/preprocess.py index f4738694..fe38a923 100644 --- a/readmeai/core/preprocess.py +++ b/readmeai/core/preprocess.py @@ -2,9 +2,9 @@ Pre-processes the input repository files and extracts metadata. """ +from collections.abc import Generator from dataclasses import dataclass, field from pathlib import Path -from typing import Generator, List, Tuple from readmeai.config.settings import ConfigLoader from readmeai.core.logger import Logger @@ -16,14 +16,16 @@ @dataclass class FileContext: - """Data class to store file contents and metadata.""" + """ + Data class to store file contents and metadata. + """ file_path: Path file_name: str file_ext: str content: str language: str = field(init=False) - dependencies: List[str] = field(default_factory=list) + dependencies: list[str] = field(default_factory=list) def __post_init__(self): """Initializes the FileContext class.""" @@ -40,13 +42,14 @@ def __init__(self, config_loader: ConfigLoader): """Initializes the RepositoryProcessor class.""" self._logger = Logger(__name__) self.config_loader = config_loader - self.blacklist = config_loader.blacklist.get("blacklist") + self.ignore_list = config_loader.ignore_list.get("ignore_list") self.commands = config_loader.commands self.languages = config_loader.languages.get("language_names") self.parser_files = config_loader.parsers.get("parsers") def create_file_data( - self, file_info: Tuple[str, Path, str] + self, + file_info: tuple[str, Path, str], ) -> FileContext: """Creates a FileContext instance from the file information.""" file_name, file_path, content = file_info @@ -57,29 +60,23 @@ def create_file_data( file_ext="", ) - def extract_dependencies(self, file_data: FileContext) -> List[str]: + def extract_dependencies(self, file_data: FileContext) -> list[str]: """Extracts the dependency file contents using the factory pattern.""" parsers = parser_handler() - if file_data.file_name not in parsers: - return [] - parser = parsers.get(file_data.file_name) - dependencies = parser.parse(content=file_data.content) - - self._logger.info( - f"Dependency file found: {file_data.file_name}:\n{dependencies}" - ) - - return dependencies + if file_data.file_name not in parsers or parser is None: + return [] + return parser.parse(content=file_data.content) - def generate_contents(self, repo_path: str) -> List[FileContext]: + def generate_contents(self, repo_path: Path | str) -> list[FileContext]: """Generates a List of Dict of file information.""" if isinstance(repo_path, str): repo_path = Path(repo_path) return [file_data for file_data in self.generate_file_info(repo_path)] def generate_file_info( - self, repo_path: Path + self, + repo_path: Path, ) -> Generator[FileContext, None, None]: """ Generates FileContext instances for each file in the repository. @@ -91,12 +88,11 @@ def generate_file_info( if file_data: yield file_data - def get_dependencies(self, contents: List[FileContext]) -> List[str]: + def get_dependencies(self, contents: list[FileContext]) -> list[str]: """Returns a list of dependencies.""" try: - dependency_dict = {} dependencies = set() - parser_files = self.config_loader.parsers.get("parsers") + parser_files = self.parser_files for file_data in contents: dependencies.update(file_data.dependencies) @@ -105,48 +101,46 @@ def get_dependencies(self, contents: List[FileContext]) -> List[str]: if file_data.file_name in parser_files["parsers"]: dependencies.add(file_data.file_name) - dependency_dict[ - file_data.file_name - ] = file_data.dependencies - if _github_actions_path in str(file_data.file_path): dependencies.add("github actions") - return list(dependencies), dependency_dict + return list(dependencies) except Exception as exc: self._logger.error(f"Error getting dependencies: {exc}") - return [], {} + return [] def _filter_file(self, file_path: Path) -> bool: """ Determines if a file should be ignored based on configurations. """ - blacklist = self.config_loader.blacklist["blacklist"] is_file_ignored = any( [ - file_path.name in blacklist["files"], - file_path.suffix.lstrip(".") in blacklist["extensions"], + file_path.name in self.ignore_list["files"], + file_path.suffix.lstrip(".") in self.ignore_list["extensions"], any( - dir in file_path.parts for dir in blacklist["directories"] + dir in file_path.parts + for dir in self.ignore_list["directories"] ), - ] + ], ) if is_file_ignored and str(file_path.name) in self.parser_files: return False - return not file_path.is_file() or is_file_ignored def _language_mapper( - self, contents: List[FileContext] - ) -> List[FileContext]: + self, + contents: list[FileContext], + ) -> list[FileContext]: """Maps file extensions to their programming languages.""" for content in contents: content.language = self.languages.get(content.file_ext, "").lower() return contents def _process_file_path( - self, file_path: Path, repo_path: Path + self, + file_path: Path, + repo_path: Path, ) -> FileContext: """ Processes an individual file path and returns FileContext. @@ -159,11 +153,14 @@ def _process_file_path( file_ext="", content="", ) - try: if file_path.is_dir(): - return - + return FileContext( + file_path=relative_path, + file_name=file_path.name, + file_ext="", + content="", + ) with file_path.open(encoding="utf-8") as file: content = file.read() @@ -174,35 +171,31 @@ def _process_file_path( content=content, ) file_data.dependencies = self.extract_dependencies(file_data) - try: - file_data.language = self.languages.get( - file_data.file_ext, self.languages.get("default") - ).lower() - except Exception: - file_data.language = None - return file_data except (OSError, UnicodeDecodeError) as exc: self._logger.warning(f"Error reading file {file_path}: {exc}") + return FileContext( + file_path=relative_path, + file_name=file_path.name, + file_ext="", + content="", + ) -def preprocessor( - conf: ConfigLoader, temp_dir: str -) -> Tuple[List[FileContext], List[str], List[Tuple[str, str]], str]: +def preprocessor(conf: ConfigLoader, tmp_dir: str) -> tuple: """Processes the repository files and returns the context.""" repo_processor = RepositoryProcessor(conf) - repo_context = repo_processor.generate_contents(temp_dir) + repo_context = repo_processor.generate_contents(tmp_dir) repo_context = repo_processor._language_mapper(repo_context) - dependencies, dependency_dict = repo_processor.get_dependencies( - repo_context - ) + deps = repo_processor.get_dependencies(repo_context) raw_files = [ (str(context.file_path), context.content) for context in repo_context ] conf.config.md.tree = MarkdownBuilder( - conf, dependencies, raw_files, temp_dir + conf, + deps, + raw_files, + tmp_dir, ).md_tree - _logger = Logger(__name__) - _logger.info(f"Dependencies: {dependency_dict}") - return dependencies, raw_files + return deps, raw_files diff --git a/readmeai/core/utils.py b/readmeai/core/utils.py index b0d5f6b3..15a8a248 100644 --- a/readmeai/core/utils.py +++ b/readmeai/core/utils.py @@ -5,7 +5,7 @@ import os from enum import Enum -from readmeai.cli.options import ModelOptions as llms +from readmeai.config.settings import ModelOptions as llms from readmeai.core.logger import Logger _logger = Logger(__name__) @@ -16,29 +16,34 @@ class SecretKey(str, Enum): Enum class to store the environment variable keys for the LLM API services. """ + # ANTHROPIC_API_KEY = "ANTHROPIC_API_KEY" + GOOGLE_API_KEY = "GOOGLE_API_KEY" OLLAMA_HOST = "OLLAMA_HOST" OPENAI_API_KEY = "OPENAI_API_KEY" - GOOGLE_API_KEY = "GOOGLE_API_KEY" def _set_offline(message: str) -> tuple: """Set the LLM service to offline mode.""" - _logger.warning(f"{message}\n\t\t\t\tGenerating README without LLM API\n") + _logger.warning( + f"{message}\n\t\n... readme-ai engine switched to offline mode ...\n", + ) return llms.OFFLINE.name, llms.OFFLINE.name def get_environment(llm_api: str = "", llm_model: str = "") -> tuple: """Set LLM environment variables based on the specified LLM service.""" default_models = { - llms.OPENAI.name: "gpt-3.5-turbo", + # llms.ANTHROPIC.name: "claude-3-5-sonnet", + llms.GEMINI.name: "gemini-1.5-flash", llms.OLLAMA.name: "mistral", - llms.GEMINI.name: "gemini-pro", + llms.OPENAI.name: "gpt-3.5-turbo", } env_keys = { - llms.OPENAI.name: SecretKey.OPENAI_API_KEY.value, - llms.OLLAMA.name: SecretKey.OLLAMA_HOST.value, + # llms.ANTHROPIC.name: SecretKey.ANTHROPIC_API_KEY.value, llms.GEMINI.name: SecretKey.GOOGLE_API_KEY.value, + llms.OLLAMA.name: SecretKey.OLLAMA_HOST.value, + llms.OPENAI.name: SecretKey.OPENAI_API_KEY.value, } if llm_api and llm_api not in env_keys: @@ -46,7 +51,7 @@ def get_environment(llm_api: str = "", llm_model: str = "") -> tuple: return _set_offline("\n\n\t\t\t\tOffline mode enabled by user") _logger.warning("Invalid LLM service provided to CLI.") return _set_offline( - "\n\n\t\t...No LLM API settings found in environment..." + "\n\n\t...No LLM API settings found in environment...", ) # If OPENAI_API_KEY does not exist in env when --api OPENAI is set @@ -55,7 +60,7 @@ def get_environment(llm_api: str = "", llm_model: str = "") -> tuple: and SecretKey.OPENAI_API_KEY.value not in os.environ ): return _set_offline( - "OPENAI_API_KEY not found in environment. Switching to offline mode." + "OPENAI_API_KEY not found in environment. Switching to offline mode.", ) # If GOOGLE_API_KEY does not exist in env when --api gemini is set @@ -64,7 +69,7 @@ def get_environment(llm_api: str = "", llm_model: str = "") -> tuple: and SecretKey.GOOGLE_API_KEY.value not in os.environ ): return _set_offline( - "GOOGLE_API_KEY not found in environment. Switching to offline mode." + "GOOGLE_API_KEY not found in environment. Switching to offline mode.", ) # If no specific API is provided or the provided API is valid @@ -78,5 +83,5 @@ def get_environment(llm_api: str = "", llm_model: str = "") -> tuple: if llm_api == llms.OFFLINE.name: return _set_offline("Offline mode enabled by user via CLI.") return _set_offline( - "\n\n\t\t...No LLM API settings found in environment..." + "\n\n... No LLM API settings found in environment ...", ) diff --git a/readmeai/generators/badges.py b/readmeai/generators/badges.py index 3d454c38..ae6f16b4 100644 --- a/readmeai/generators/badges.py +++ b/readmeai/generators/badges.py @@ -2,16 +2,13 @@ Methods to generate and format svg badges for the README file. """ -from typing import Tuple - -from readmeai.cli.options import BadgeOptions -from readmeai.config.settings import ConfigLoader -from readmeai.services.git import GitHost +from readmeai.config.settings import BadgeOptions, Settings from readmeai.utils.file_handler import FileHandler from readmeai.utils.file_resources import get_resource_path +from readmeai.vcs.providers import GitHost _package = "readmeai.generators" -_submodule = "assets" +_submodule = "svg" def _format_badges(badges: list[str]) -> str: @@ -28,20 +25,22 @@ def _format_badges(badges: list[str]) -> str: [ f'{badge.split(' for badge in badges[i : i + badges_per_line] - ] + ], ) lines.append( - f"{line}\n\t
    " if i + badges_per_line < total else f"{line}\n" + f"{line}\n\t
    " if i + badges_per_line < total else f"{line}\n", ) return "\n\t".join(lines) def build_default_badges( - config: ConfigLoader, full_name: str, host: str + config: Settings, + full_name: str, + host: str, ) -> str: """Build metadata badges using shields.io.""" - return config.md.shields_icons.format( + return config.md.shieldsio_icons.format( host=host, full_name=full_name, badge_color=config.md.badge_color, @@ -50,7 +49,9 @@ def build_default_badges( def build_project_badges( - dependencies: list[str], icons: dict[str, str], style: str + dependencies: list[str], + icons: dict[str, str], + style: str, ) -> str: """Build HTML badges for project dependencies.""" badges = [ @@ -65,14 +66,17 @@ def build_project_badges( return _format_badges(badges) -def shields_icons( - conf: ConfigLoader, dependencies: list, full_name: str, git_host: str -) -> Tuple[str, str]: +def shieldsio_icons( + conf: Settings, + dependencies: list, + full_name: str, + git_host: str, +) -> tuple[str, str]: """ Generates badges for the README using shields.io icons. """ icons_path = get_resource_path( - conf.files.shields_icons, + conf.files.shieldsio_icons, _package, _submodule, ) @@ -81,31 +85,34 @@ def shields_icons( default_icons = build_default_badges(conf, full_name, git_host) project_badges = build_project_badges( - dependencies, icons_dict, conf.md.badge_style + dependencies, + icons_dict, + conf.md.badge_style, ) project_badges = conf.md.badge_icons.format( - alignment=conf.md.alignment, badge_icons=project_badges + align=conf.md.align, + badge_icons=project_badges, ) if ( conf.md.badge_style == BadgeOptions.DEFAULT.value - and git_host != GitHost.LOCAL + and git_host != GitHost.LOCAL.name ): return ( default_icons, "\n", ) - if git_host == GitHost.LOCAL: + if git_host == GitHost.LOCAL.name: return ( - "\n", + "", project_badges, ) return default_icons, project_badges -def skill_icons(conf: ConfigLoader, dependencies: list) -> str: +def skill_icons(conf: Settings, dependencies: list) -> str: """ Generates badges for the README using skill icons, from the repository - https://github.com/tandpfun/skill-icons. @@ -113,21 +120,23 @@ def skill_icons(conf: ConfigLoader, dependencies: list) -> str: dependencies.extend(["md"]) icons_path = get_resource_path( - conf.files.skill_icons, _package, _submodule + conf.files.skill_icons, + _package, + _submodule, ) icons_dict = FileHandler().read(icons_path) - skill_icons = [ + icons = [ icon for icon in icons_dict["icons"]["names"] if icon in dependencies ] - skill_icons = ",".join(skill_icons) - skill_icons = icons_dict["url"]["base_url"] + skill_icons + formatted_icons = icons_dict["url"]["base_url"] + ",".join(icons) if conf.md.badge_style == "skills-light": - skill_icons = f"{skill_icons}&theme=light" + formatted_icons = f"{formatted_icons}&theme=light" - conf.md.skill_icons = conf.md.skill_icons.format(skill_icons) + conf.md.skill_icons = conf.md.skill_icons.format(formatted_icons) return conf.md.badge_icons.format( - alignment=conf.md.alignment, badge_icons=conf.md.skill_icons + align=conf.md.align, + badge_icons=conf.md.skill_icons, ) diff --git a/readmeai/generators/builder.py b/readmeai/generators/builder.py index 40826c09..7cf4a50f 100644 --- a/readmeai/generators/builder.py +++ b/readmeai/generators/builder.py @@ -1,15 +1,17 @@ -"""Builds each section of the README Markdown file.""" +""" +Builds each section of the README Markdown file. +""" __package__ = "readmeai" from pathlib import Path -from typing import List -from readmeai.cli.options import BadgeOptions -from readmeai.config.settings import ConfigLoader +from readmeai.config.settings import BadgeOptions, ConfigLoader from readmeai.generators import badges, tables, tree, utils from readmeai.generators.quickstart import get_setup_data -from readmeai.services.git import GitHost +from readmeai.templates.header import HeaderTemplate +from readmeai.templates.toc import ToCTemplate +from readmeai.vcs.providers import GitHost class MarkdownBuilder: @@ -18,11 +20,10 @@ class MarkdownBuilder: def __init__( self, config_loader: ConfigLoader, - dependencies: List[str], - summaries: tuple, + dependencies: list[str], + summaries: list[tuple[str, str]], temp_dir: str, ): - """Initializes the MarkdownBuilder class.""" self.deps = dependencies self.summaries = summaries self.temp_dir = Path(temp_dir) @@ -35,29 +36,67 @@ def __init__( if self.git.host_domain != GitHost.LOCAL.name.lower() else f"../{self.git.name}" ) + self.header_template = HeaderTemplate(self.md.header_style) + self.toc_template = ToCTemplate(self.md.toc_style) @property def md_header(self) -> str: - """Generates the README header section.""" + """ + Generates the README header section. + """ if BadgeOptions.SKILLS.value not in self.md.badge_style: - md_shields, md_badges = badges.shields_icons( - self.config, self.deps, self.git.full_name, self.git.host + md_shields, md_badges = badges.shieldsio_icons( + self.config, + self.deps, + str(self.git.full_name), + str(self.git.host), ) else: md_shields = ( - "" + "" ) md_badges = badges.skill_icons(self.config, self.deps) - return self.md.header.format( - alignment=self.md.alignment, - image=self.md.image, - width=self.md.width, - repo_name=self.git.name.upper(), - slogan=self.md.slogan, - shields_icons=md_shields, - badge_icons=md_badges, - ) + header_data = { + "align": self.md.align, + "image": self.md.image, + "image_width": self.md.image_width, + "repo_name": self.git.name.upper() + if self.git.name + else self.md.placeholder, + "slogan": self.md.slogan, + "shields_icons": md_shields, + "badge_icons": md_badges, + } + return self.header_template.render(header_data) + + @property + def md_toc(self) -> str: + """ + Generates the README Table of Contents section. + """ + toc_data = { + "sections": [ + {"title": "๐Ÿ“ Overview"}, + {"title": "๐Ÿ‘พ Features"}, + {"title": "๐Ÿ“‚ Repository Structure"}, + {"title": "๐Ÿงฉ Modules"}, + { + "title": "๐Ÿš€ Getting Started", + "subsections": [ + {"title": "๐Ÿ”– Prerequisites"}, + {"title": "๐Ÿ“ฆ Installation"}, + {"title": "๐Ÿค– Usage"}, + {"title": "๐Ÿงช Tests"}, + ], + }, + {"title": "๐Ÿ“Œ Project Roadmap"}, + {"title": "๐Ÿค Contributing"}, + {"title": "๐ŸŽ— License"}, + {"title": "๐Ÿ™Œ Acknowledgments"}, + ], + } + return self.toc_template.render(toc_data) @property def md_summaries(self) -> str: @@ -92,10 +131,10 @@ def md_quickstart(self) -> str: return self.md.quickstart.format( repo_name=self.git.name, repo_url=self.repo_url, + prerequisites=setup_data.prerequisites, install_command=setup_data.install_command, run_command=setup_data.run_command, test_command=setup_data.test_command, - system_requirements=setup_data.prerequisites, ) @property @@ -110,10 +149,12 @@ def md_contributing(self) -> str: ) def build(self) -> str: - """Builds the README Markdown file.""" + """ + Builds each section of the README.md file. + """ md_contents = [ self.md_header, - self.md.toc.format(repo_name=self.git.name), + self.md_toc, self.md.overview, self.md.features, self.md_tree, diff --git a/readmeai/generators/quickstart.py b/readmeai/generators/quickstart.py index 5f383b1f..abee0ed2 100644 --- a/readmeai/generators/quickstart.py +++ b/readmeai/generators/quickstart.py @@ -1,9 +1,10 @@ -"""Dynamically creates the 'Quickstart' section of the README file.""" +""" +Dynamically generate 'Quickstart' guides for the README file. +""" import traceback from dataclasses import dataclass from pathlib import Path -from typing import Dict, List from readmeai.config.settings import ConfigLoader from readmeai.core.logger import Logger @@ -13,60 +14,65 @@ @dataclass class QuickStart: - """Information about using, running, and testing a repository.""" + """ + Information about using, running, and testing a repository. + """ install_command: str run_command: str test_command: str prerequisites: str - language_counts: Dict[str, int] - language_key: str - language_name: str = None + language_counts: dict[str, int] + language_key: str | None + language_name: str | None = None def count_languages( - summaries: List[str], config_loader: ConfigLoader -) -> Dict[str, int]: + summaries: tuple, + config_loader: ConfigLoader, +) -> dict[str, int]: """ Counts the occurrences of each language in the summaries. """ parser_files = config_loader.parsers.get("parsers") - language_counts = {} + language_counts: dict[str, int] = {} for file_path, _ in summaries: - language = Path(file_path).suffix[1:] + language = Path(file_path).suffix[1:] or None if str(file_path) in [ dependency_file for dependency_file in parser_files ]: continue - if language and language not in config_loader.blacklist: + if ( + language + and language.strip() + and language not in config_loader.ignore_list + ): language_counts[language] = language_counts.get(language, 0) + 1 return language_counts -def get_top_language(language_counts: Dict[str, int]) -> str: +def get_top_language(language_counts: dict[str, int]) -> str | None: """ Determines the top language. """ if not language_counts: return None - - return max(sorted(language_counts), key=language_counts.get) + else: + return max(sorted(language_counts), key=language_counts.get) def get_top_language_setup( - language_counts: Dict[str, int], config_loader: ConfigLoader + language_counts: dict, + config_loader: ConfigLoader, ) -> QuickStart: """ Determines the top language and retrieves its setup commands. """ - if not language_counts: - return None - languages = config_loader.languages.get("language_names") commands = config_loader.commands.get("quickstart_guide") @@ -77,15 +83,16 @@ def get_top_language_setup( return QuickStart( *quickstart_commands, - prerequisites, - language_counts, - language_key, - language_name, + prerequisites=prerequisites, + language_counts=language_counts, + language_key=language_key, + language_name=language_name, ) def get_setup_data( - config_loader: ConfigLoader, summaries: List[str] + config_loader: ConfigLoader, + summaries: tuple, ) -> QuickStart: """ Generates the 'Quick Start' section of the README file. diff --git a/readmeai/generators/assets/icons.json b/readmeai/generators/svg/shieldsio_icons.json similarity index 100% rename from readmeai/generators/assets/icons.json rename to readmeai/generators/svg/shieldsio_icons.json diff --git a/readmeai/generators/assets/skill_icons.json b/readmeai/generators/svg/skill_icons.json similarity index 100% rename from readmeai/generators/assets/skill_icons.json rename to readmeai/generators/svg/skill_icons.json diff --git a/readmeai/generators/tables.py b/readmeai/generators/tables.py index ddba82d7..138b1bda 100644 --- a/readmeai/generators/tables.py +++ b/readmeai/generators/tables.py @@ -1,37 +1,101 @@ -"""Creates markdown tables to store LLM text responses in the README file.""" +""" +Creates Markdown tables to store LLM text responses in the README file. +""" from pathlib import Path -from typing import List, Tuple -from readmeai.services.git import fetch_git_file_url +from readmeai.core.logger import Logger +from readmeai.vcs.url_builder import GitURL + +_logger = Logger(__name__) def construct_markdown_table( - data: List[Tuple[str, str]], repo_url: str, full_name: str + data: list[tuple[str, str]], + repo_path: str | Path, + full_name: str, + max_rows: int = 100, ) -> str: - """Builds a Markdown table from the provided data.""" + """ + Builds a Markdown table to store LLM text responses in README file. + """ + assert isinstance(data, list), "Data must be a list" + assert all( + isinstance(item, tuple) and len(item) == 2 for item in data + ), "Each data item must be a tuple of (str, str)" + assert isinstance( + repo_path, + str | Path, + ), "repo_path must be a string or Path" + assert isinstance(full_name, str), "full_name must be a string" + assert ( + isinstance(max_rows, int) and max_rows > 0 + ), "max_rows must be a positive integer" + + if not data: + _logger.warning("Empty data provided for Markdown table") + return "" + + is_local_repo = Path(repo_path).exists() + + if not is_local_repo: + try: + git_url = GitURL.create(str(repo_path)) + except ValueError: + _logger.error(f"Invalid Git repository URL: {repo_path}") + is_local_repo = True # Fallback to treating it as a local path + headers = ["File", "Summary"] table_rows = [headers, ["---", "---"]] - for module, summary in data: - file_name = str(Path(module).name) - if "invalid" in full_name.lower(): - return file_name - host_url = fetch_git_file_url(module, full_name, repo_url) - md_format_host_url = f"[{file_name}]({host_url})" - table_rows.append([md_format_host_url, summary]) + for module, summary in data[:max_rows]: + file_name = Path(module).name + if is_local_repo: + file_path = Path(repo_path) / module + md_format_file_url = f"[{file_name}]({file_path})" + else: + try: + file_url = git_url.get_file_url(module) + md_format_file_url = f"[{file_name}]({file_url})" + except ValueError as e: + _logger.error(f"Error generating file URL for {module}: {e}") + md_format_file_url = file_name + + table_rows.append([md_format_file_url, summary]) + + if len(data) > max_rows: + _logger.warning( + f"Table truncated. Showing {max_rows} out of {len(data)} rows.", + ) + table_rows.append(["...", "..."]) + + return _format_as_markdown_table(table_rows) + + +def _format_as_markdown_table(rows: list[list[str]]) -> str: + """ + Formats the given rows as a Markdown table. + """ + assert len(rows) >= 3, "Table must have at least headers and separator" + assert all( + len(row) == len(rows[0]) for row in rows + ), "All rows must have the same number of columns" - return format_as_markdown_table(table_rows) + return "\n".join(f"| {' | '.join(row)} |" for row in rows) def extract_folder_name(module: str) -> str: - """Extracts the folder name from a module path.""" + """ + Extracts the folder name from a module path. + """ path_parts = Path(module).parts return ".".join(path_parts[:-1]) if len(path_parts) > 1 else "." -def format_as_markdown_table(rows: List[List[str]]) -> str: - """Formats rows of data as a Markdown table.""" +def format_as_markdown_table(rows: list[list[str]]) -> str: + """ + Formats rows of data as a Markdown table. + """ max_column_widths = [ max(len(str(row[col])) for row in rows) for col in range(len(rows[0])) ] @@ -40,7 +104,7 @@ def format_as_markdown_table(rows: List[List[str]]) -> str: "| " + " | ".join( str(item).ljust(width) - for item, width in zip(row, max_column_widths) + for item, width in zip(row, max_column_widths, strict=False) ) + " |" for row in rows @@ -50,9 +114,12 @@ def format_as_markdown_table(rows: List[List[str]]) -> str: def format_code_summaries( - placeholder: str, code_summaries: Tuple[str, str] -) -> List[Tuple[str, str]]: - """Converts the given code summaries into a formatted list.""" + placeholder: str, + code_summaries: list[tuple[str, str]], +) -> list[tuple[str, str]]: + """ + Converts the given code summaries into a formatted list. + """ formatted_summaries = [] for summary in code_summaries: @@ -68,17 +135,21 @@ def format_code_summaries( def generate_markdown_tables( table_widget: str, - summaries: List[Tuple[str, str]], + summaries: list[tuple[str, str]], project_name: str, repository_url: str, ) -> str: - """Produces Markdown tables for each project sub-directory.""" + """ + Produces Markdown tables for each project sub-directory. + """ summaries_by_folder = group_summaries_by_folder(summaries) markdown_tables = [] for folder, entries in summaries_by_folder.items(): table_in_markdown = construct_markdown_table( - entries, repository_url, project_name + entries, + repository_url, + project_name, ) table_wrapper = table_widget.format(folder, table_in_markdown) markdown_tables.append(table_wrapper) @@ -86,15 +157,19 @@ def generate_markdown_tables( return "\n".join(markdown_tables) -def group_summaries_by_folder(summaries: List[Tuple[str, str]]) -> dict: - """Groups code summaries by their sub-directory.""" - folder_map = {} +def group_summaries_by_folder(summaries: list[tuple[str, str]]) -> dict: + """ + Groups code summaries by their sub-directory. + """ + folder_map: dict[str, list[tuple[str, str]]] = {} for module, summary in summaries: folder_name = extract_folder_name(module) folder_map.setdefault(folder_name, []).append((module, summary)) return folder_map -def is_valid_tuple_summary(summary: Tuple[str, str]) -> bool: - """Checks if a summary is a valid tuple format.""" +def is_valid_tuple_summary(summary: tuple[str, str]) -> bool: + """ + Checks if a summary is a valid tuple format. + """ return isinstance(summary, tuple) and len(summary) == 2 diff --git a/readmeai/generators/tree.py b/readmeai/generators/tree.py index 9f25f2bf..568991b8 100644 --- a/readmeai/generators/tree.py +++ b/readmeai/generators/tree.py @@ -1,13 +1,21 @@ -"""Generates a directory tree structure for a code repository.""" +""" +Generates a directory tree structure for a code repository. +""" from pathlib import Path class TreeGenerator: - """Generates a directory tree structure for a code repository.""" + """ + Generates a directory tree structure for a code repository. + """ def __init__( - self, repo_name: str, root_dir: Path, repo_url: Path, max_depth: int + self, + repo_name: str, + root_dir: Path, + repo_url: Path, + max_depth: int, ): self.repo_name = repo_name self.root_dir = root_dir @@ -21,7 +29,9 @@ def _build_tree( is_last: bool = True, depth: int = 0, ) -> str: - """Generates a tree structure for a given directory.""" + """ + Generates a tree structure for a given directory. + """ if depth > self.max_depth: return "" @@ -42,7 +52,10 @@ def _build_tree( for index, child in enumerate(children): child_prefix = prefix + (" " if is_last else "โ”‚ ") child_tree = self._build_tree( - child, child_prefix, index == len(children) - 1, depth + 1 + child, + child_prefix, + index == len(children) - 1, + depth + 1, ) if child_tree: @@ -51,9 +64,12 @@ def _build_tree( return "\n".join(parts) def tree(self) -> str: - """Generates and formats a tree structure.""" + """ + Generates and formats a tree structure. + """ md_tree = self._build_tree(self.root_dir) formatted_md_tree = md_tree.replace( - self.root_dir.name, f"{self.repo_name}/" + self.root_dir.name, + f"{self.repo_name}/", ) return formatted_md_tree diff --git a/readmeai/generators/utils.py b/readmeai/generators/utils.py index 205f64df..a6c56812 100644 --- a/readmeai/generators/utils.py +++ b/readmeai/generators/utils.py @@ -1,7 +1,8 @@ -"""Utilities to remove default emojis from markdown content.""" +""" +Utilities to remove default emojis from markdown content. +""" import re -from typing import List EMOJI_PATTERN = re.compile( pattern="[" @@ -21,8 +22,10 @@ ) -def remove_emojis(md_content: List[str]) -> List[str]: - """Removes emojis from the content list.""" +def remove_emojis(md_content: list[str]) -> list[str]: + """ + Removes emojis from the content list. + """ modified_content = [] for section in md_content: @@ -44,7 +47,8 @@ def split_markdown_headings(markdown_text: str) -> dict: Splits a markdown document by level 2 headings into separate sections. """ sections = re.split(r"(?m)^## ", markdown_text) - split_sections = {} + + split_sections: dict[str, str] = {} for section in sections: if section.strip(): diff --git a/readmeai/models/dalle.py b/readmeai/models/dalle.py index e4ab67ea..16d8a475 100644 --- a/readmeai/models/dalle.py +++ b/readmeai/models/dalle.py @@ -1,8 +1,10 @@ """ -Multi-modal model for generating images using OpenAI's DALL-E model. +Handler for generating images using OpenAI's DALL-E model. """ import os +from collections.abc import Generator +from contextlib import contextmanager from openai import Client, OpenAIError from requests import get @@ -13,22 +15,33 @@ class DalleHandler: - """Generates and downloads images using OpenAI's DALL-E model.""" + """ + Generates and downloads images using OpenAI's DALL-E model. + """ - def __init__(self, config: ConfigLoader) -> None: + def __init__(self, conf: ConfigLoader) -> None: """Initialize the ImageGenerator class.""" - self.client = Client(api_key=os.getenv("OPENAI_API_KEY")) - self.conf = config + self.conf = conf + self.filename = f"{conf.config.git.name}.png" self._logger = Logger(__name__) self._model_settings() def _model_settings(self) -> None: """Initializes the DALL-E settings.""" + self.client = Client(api_key=os.getenv("OPENAI_API_KEY")) self.model = "dall-e-3" self.size = "1792x1024" self.quality = "standard" self.n = 1 + @contextmanager + def use_api(self) -> Generator: + """Yields the DALL-E handler.""" + try: + yield self + finally: + self._logger.debug(f"Closed {self.model.upper()} API session.") + def _build_payload(self) -> str: """Formats the prompt string using configuration data.""" return { @@ -43,7 +56,7 @@ def _build_payload(self) -> str: "n": self.n, } - def run(self) -> str: + def _make_request(self) -> str: """Generates an image and returns its URL.""" try: payload = self._build_payload() @@ -52,31 +65,31 @@ def run(self) -> str: return response.data[0].url else: self._logger.error( - f"Failed to generate {self.model.upper()} image: {response}" + f"Failed to generate {self.model.upper()} image: {response}", ) return ImageOptions.BLUE.value except (Exception, OpenAIError) as exc: self._logger.error( - f"{self.model.upper()} image generation error: {exc}" + f"{self.model.upper()} image generation error: {exc}", ) return ImageOptions.BLUE.value def download(self, image_url) -> str: """Downloads an image from the given URL.""" - filename = f"{self.conf.config.git.name}.png" try: response = get(image_url) - if response.status_code == 200: - with open(filename, "wb") as f: + status_code = response.status_code + + if status_code == 200: + with open(self.filename, "wb") as f: f.write(response.content) - return filename + self._logger.info(f"Image downloaded at: {image_url}") + return self.filename else: - self._logger.error( - f"Failed to download image: {response.status_code}" - ) - return ImageOptions.BLUE.value + self._logger.error(f"Failed to download image: {status_code}") except Exception as exc: self._logger.error(f"Failed to download image: {exc}") - return ImageOptions.BLUE.value + + return ImageOptions.BLUE.value diff --git a/readmeai/models/factory.py b/readmeai/models/factory.py index 889d8425..f94c9e53 100644 --- a/readmeai/models/factory.py +++ b/readmeai/models/factory.py @@ -1,32 +1,38 @@ """ -Model factory that returns the appropriate LLM handler based on CLI input. +Factory class that selects appropriate LLM API service based on CLI input. """ +from typing import ClassVar + from readmeai._exceptions import UnsupportedServiceError -from readmeai.cli.options import ModelOptions as llms -from readmeai.config.settings import ConfigLoader +from readmeai.config.settings import ConfigLoader, ModelOptions from readmeai.core.models import BaseModelHandler from readmeai.models.gemini import GeminiHandler from readmeai.models.offline import OfflineHandler from readmeai.models.openai import OpenAIHandler -class ModelFactory: - """Factory that returns the appropriate LLM handler based on CLI input.""" +class ModelRegistry: + """ + Returns the appropriate LLM API handler based on CLI input. + """ - _model_map = { - llms.OFFLINE.value: OfflineHandler, - llms.OLLAMA.value: OpenAIHandler, - llms.OPENAI.value: OpenAIHandler, - llms.GEMINI.value: GeminiHandler, + _model_map: ClassVar[dict] = { + # ModelOptions.ANTHROPIC.value: AnthropicHandler, + ModelOptions.GEMINI.value: GeminiHandler, + ModelOptions.OFFLINE.value: OfflineHandler, + ModelOptions.OLLAMA.value: OpenAIHandler, + ModelOptions.OPENAI.value: OpenAIHandler, } @staticmethod - def model_handler(conf: ConfigLoader) -> BaseModelHandler: - """Returns the appropriate LLM API handler based on CLI input.""" - llm_handler = ModelFactory._model_map.get(conf.config.llm.api) - if llm_handler is None: + def get_backend(conf: ConfigLoader) -> BaseModelHandler: + """ + Returns the appropriate LLM API handler based on CLI input. + """ + backend_service = ModelRegistry._model_map.get(conf.config.llm.api) + if backend_service is None: raise UnsupportedServiceError( - f"Unsupported LLM service provided: {conf.config.llm.api}" + f"Unsupported LLM service provided: {conf.config.llm.api}", ) - return llm_handler(conf) + return backend_service(conf) diff --git a/readmeai/models/gemini.py b/readmeai/models/gemini.py index 4265f87a..40889bb2 100644 --- a/readmeai/models/gemini.py +++ b/readmeai/models/gemini.py @@ -3,7 +3,6 @@ """ import os -from typing import List, Tuple import aiohttp import google.generativeai as genai @@ -15,6 +14,7 @@ ) from readmeai.config.settings import ConfigLoader +from readmeai.core.logger import Logger from readmeai.core.models import BaseModelHandler from readmeai.models.tokens import token_handler from readmeai.utils.text_cleaner import clean_response @@ -26,11 +26,13 @@ class GeminiHandler(BaseModelHandler): def __init__(self, config_loader: ConfigLoader) -> None: """Initializes the Gemini API handler.""" super().__init__(config_loader) + self._logger = Logger(__name__) self._model_settings() def _model_settings(self): """Initializes the Gemini API LLM settings.""" - genai.configure(api_key=os.getenv("GOOGLE_API_KEY")) + api_key = os.getenv("GOOGLE_API_KEY") + genai.configure(api_key=api_key) self.model = genai.GenerativeModel(self.config.llm.model) self.temperature = self.config.llm.temperature self.tokens = self.config.llm.tokens @@ -39,10 +41,9 @@ def _model_settings(self): async def _build_payload(self, prompt: str, tokens: int) -> dict: """Build payload for POST request to the Gemini API.""" return genai.types.GenerationConfig( - # candidate_count=1, - # stop_sequences=['x'], max_output_tokens=self.tokens, temperature=self.temperature, + top_p=self.top_p, ) @retry( @@ -53,19 +54,22 @@ async def _build_payload(self, prompt: str, tokens: int) -> dict: aiohttp.ClientError, aiohttp.ClientResponseError, aiohttp.ClientConnectorError, - ) + ), ), ) async def _make_request( self, - index: str, - prompt: str, - tokens: int, - raw_files: List[Tuple[str, str]] = None, - ) -> Tuple[str, str]: - """Processes Gemini API responses and returns generated text.""" + index: str | None, + prompt: str | None, + tokens: int | None, + raw_files: list[tuple[str, str]] | None, + ) -> list[tuple[str, str]]: + """ + Processes Gemini API responses and returns generated text. + """ try: prompt = await token_handler(self.config, index, prompt, tokens) + parameters = await self._build_payload(prompt, tokens) async with self.rate_limit_semaphore: @@ -74,15 +78,18 @@ async def _make_request( generation_config=parameters, ) response_text = response.text + self._logger.info(f"Response for '{index}':\n{response_text}") + return index, clean_response(index, response_text) except ( aiohttp.ClientError, aiohttp.ClientResponseError, aiohttp.ClientConnectorError, - ) as exc: + ): self._logger.error( - f"Error making request to Gemini API for `{index}`: {exc}" + f"Error processing request for prompt: {index}", + exc_info=True, ) return index, self.config.md.placeholder diff --git a/readmeai/models/offline.py b/readmeai/models/offline.py index bf825edc..0e2af92f 100644 --- a/readmeai/models/offline.py +++ b/readmeai/models/offline.py @@ -2,7 +2,7 @@ Model handler for running the CLI without a LLM API service. """ -from typing import Any, Dict, List, Tuple +from typing import Any from readmeai.config.settings import ConfigLoader from readmeai.core.models import BaseModelHandler @@ -18,20 +18,22 @@ def __init__(self, config_loader: ConfigLoader) -> None: def _model_settings(self): """Set default values for offline mode.""" - self.placeholder = self.config.md.placeholder + self.placeholder: str = self.config.md.placeholder - async def _build_payload(self, prompt: str, tokens: int) -> Dict[str, Any]: + async def _build_payload(self, prompt: str, tokens: int) -> dict[str, Any]: """Builds the payload for the POST request to the LLM API.""" - ... + return {} async def _make_request( self, - index: str = None, - prompt: str = None, - tokens: int = None, - raw_files: List[Tuple[str, str]] = None, - ) -> Tuple[str, str]: - """Returns placeholder text where LLM API response would be.""" + index: str | None, + prompt: str | None, + tokens: int | None, + raw_files: list[tuple[str, str]] | None, + ) -> list[tuple[str, str]]: + """ + Returns placeholder text where LLM API response would be. + """ file_summaries = [ (str(file_path), self.placeholder) for file_path, _ in raw_files ] diff --git a/readmeai/models/openai.py b/readmeai/models/openai.py index eed26007..ca4a5e2f 100644 --- a/readmeai/models/openai.py +++ b/readmeai/models/openai.py @@ -3,7 +3,6 @@ """ import os -from typing import List, Tuple import aiohttp import openai @@ -14,17 +13,16 @@ wait_exponential, ) -from readmeai.cli.options import ModelOptions as llms -from readmeai.config.settings import ConfigLoader +from readmeai.config.settings import ConfigLoader, ModelOptions from readmeai.core.models import BaseModelHandler from readmeai.models.tokens import token_handler from readmeai.utils.text_cleaner import clean_response -_localhost = "http://localhost:11434/v1/" - class OpenAIHandler(BaseModelHandler): - """OpenAI API LLM implementation.""" + """ + OpenAI API LLM implementation, with Ollama support. + """ def __init__(self, config_loader: ConfigLoader) -> None: """Initialize OpenAI API LLM handler.""" @@ -32,22 +30,26 @@ def __init__(self, config_loader: ConfigLoader) -> None: self._model_settings() def _model_settings(self): - """Set default values for OpenAI API.""" + """Setup configuration for OpenAI/OLLAMA LLM handlers.""" + self.host_name = self.config.llm.host_name + self.localhost = self.config.llm.localhost self.model = self.config.llm.model + self.path = self.config.llm.path self.temperature = self.config.llm.temperature self.tokens = self.config.llm.tokens self.top_p = self.config.llm.top_p - if self.config.llm.api == llms.OPENAI.name: - self.endpoint = self.config.llm.base_url - self.client = openai.OpenAI( - api_key=os.environ.get("OPENAI_API_KEY") - ) - elif self.config.llm.api == llms.OLLAMA.name: - self.endpoint = f"{_localhost}chat/completions" + if self.config.llm.api == ModelOptions.OPENAI.name: + self.url = f"{self.host_name}{self.path}" + self.client = openai.OpenAI(api_key=os.getenv("OPENAI_API_KEY")) + + elif self.config.llm.api == ModelOptions.OLLAMA.name: + self.url = f"{self.localhost}{self.path}" self.client = openai.OpenAI( - base_url=_localhost, api_key=llms.OLLAMA.name + base_url=f"{self.localhost}v1", + api_key=ModelOptions.OLLAMA.name, ) + self.headers = {"Authorization": f"Bearer {self.client.api_key}"} async def _build_payload(self, prompt: str, tokens: int) -> dict: @@ -56,7 +58,7 @@ async def _build_payload(self, prompt: str, tokens: int) -> dict: "messages": [ { "role": "system", - "content": self.sys_content, + "content": self.system_message, }, {"role": "user", "content": prompt}, ], @@ -74,31 +76,32 @@ async def _build_payload(self, prompt: str, tokens: int) -> dict: aiohttp.ClientResponseError, aiohttp.ClientConnectorError, openai.OpenAIError, - ) + ), ), ) async def _make_request( self, - index: str, - prompt: str, - tokens: int, - raw_files: List[Tuple[str, str]] = None, - ) -> Tuple[str, str]: + index: str | None, + prompt: str | None, + tokens: int | None, + raw_files: list[tuple[str, str]] | None, + ) -> list[tuple[str, str]]: """Processes OpenAI API LLM responses and returns generated text.""" try: prompt = await token_handler(self.config, index, prompt, tokens) + parameters = await self._build_payload(prompt, tokens) async with self._session.post( - self.endpoint, + self.url, headers=self.headers, json=parameters, ) as response: response.raise_for_status() - response = await response.json() - text = response["choices"][0]["message"]["content"] - self._logger.info(f"Response for '{index}':\n{text}") - return index, clean_response(index, text) + data = await response.json() + data = data["choices"][0]["message"]["content"] + self._logger.info(f"Generated text for '{index}': {data}") + return index, clean_response(index, data) except ( aiohttp.ClientError, @@ -106,6 +109,5 @@ async def _make_request( aiohttp.ClientConnectorError, openai.OpenAIError, ) as exc: - message = f"Error making request for - `{index}`: {exc}" - self._logger.error(message) + self._logger.error(f"Error making request for '{index}': {exc}") return index, self.config.md.placeholder diff --git a/readmeai/models/prompts.py b/readmeai/models/prompts.py index a8468a32..0ff3d9ac 100644 --- a/readmeai/models/prompts.py +++ b/readmeai/models/prompts.py @@ -2,18 +2,17 @@ Methods for processing prompts used in LLM API requests. """ -from typing import Dict, List, Union - -import readmeai.config.settings as Settings +from readmeai.config.settings import Settings from readmeai.core.logger import Logger _logger = Logger(__name__) def get_prompt_context(prompts: dict, prompt_type: str, context: dict) -> str: - """Generates a prompt for the LLM API.""" + """ + Generates a prompt for the LLM API. + """ prompt_template = get_prompt_template(prompts, prompt_type) - if not prompt_template: _logger.error(f"Prompt type '{prompt_type}' not found.") return "" @@ -22,7 +21,9 @@ def get_prompt_context(prompts: dict, prompt_type: str, context: dict) -> str: def get_prompt_template(prompts: dict, prompt_type: str) -> str: - """Retrieves the template for the given prompt type.""" + """ + Retrieves the template for the given prompt type. + """ prompt_templates = { "features": prompts["prompts"]["features"], "overview": prompts["prompts"]["overview"], @@ -32,7 +33,9 @@ def get_prompt_template(prompts: dict, prompt_type: str) -> str: def inject_prompt_context(template: str, context: dict) -> str: - """Formats the template with the provided context.""" + """ + Formats the template with the provided context. + """ try: return template.format(*[context[key] for key in context]) except KeyError as exc: @@ -42,10 +45,12 @@ def inject_prompt_context(template: str, context: dict) -> str: async def set_additional_contexts( config: Settings, - dependencies: List[str], - file_summaries: List[str], -) -> List[dict]: - """Generates additional prompts (features, overview, slogan) for LLM.""" + dependencies: list[str], + file_summaries: list[tuple[str, str]], +) -> list[dict]: + """ + Generates additional prompts (features, overview, slogan) for LLM. + """ return [ {"type": prompt_type, "context": context} for prompt_type, context in [ @@ -78,10 +83,12 @@ async def set_additional_contexts( async def set_summary_context( config: Settings, - dependencies: List[str], - file_summaries: List[str], -) -> List[Dict[str, Union[str, dict]]]: - """Generates the summary prompts to be used by the LLM API.""" + dependencies: list[str], + file_summaries: list[str], +) -> list[dict]: + """ + Generates the summary prompts to be used by the LLM API. + """ return [ {"type": prompt_type, "context": context} for prompt_type, context in [ diff --git a/readmeai/models/tokens.py b/readmeai/models/tokens.py index 86446fc6..f66ea6e2 100644 --- a/readmeai/models/tokens.py +++ b/readmeai/models/tokens.py @@ -2,7 +2,7 @@ Tokenizer utilities for tokenizing and truncating text. """ -from tiktoken import get_encoding +from tiktoken import Encoding, get_encoding from readmeai.config.settings import Settings from readmeai.core.logger import Logger @@ -11,7 +11,7 @@ _logger = Logger(__name__) -def _set_encoding_cache(encoding_name: str) -> str: +def _set_encoding_cache(encoding_name: str) -> Encoding: """Set the encoding cache for a specific encoding.""" if encoding_name not in _encoding_cache: _encoding_cache[encoding_name] = get_encoding(encoding_name) @@ -19,7 +19,10 @@ def _set_encoding_cache(encoding_name: str) -> str: async def token_handler( - config: Settings, index: str, prompt: str, tokens: int + config: Settings, + index: str, + prompt: str, + tokens: int, ) -> str: """Handle token count for the prompt.""" encoder = config.llm.encoder @@ -28,7 +31,7 @@ async def token_handler( if token_count > max_count: _logger.debug( - f"Truncating '{index}' prompt: {token_count} > {max_count} tokens!" + f"Truncating '{index}' prompt: {token_count} > {max_count} tokens!", ) prompt = truncate_tokens(encoder, prompt, tokens) @@ -43,19 +46,19 @@ def count_tokens(text: str, encoder: str) -> int: except (UnicodeEncodeError, ValueError) as exc: _logger.error( - f"Error counting tokens for '{text}' with {encoder}: {exc}" + f"Error counting tokens for '{text}' with {encoder}: {exc}", ) token_count = 0 return token_count -def truncate_tokens(encoder: str, text: str, max_count: int) -> str: +def truncate_tokens(encoding: str, text: str, max_count: int) -> str: """Truncate a text string to a maximum number of tokens.""" if not text: return text try: - encoder = _set_encoding_cache(encoder) + encoder = _set_encoding_cache(encoding) token_count = len(encoder.encode(text)) if token_count <= max_count: return text @@ -70,7 +73,9 @@ def truncate_tokens(encoder: str, text: str, max_count: int) -> str: def update_max_tokens( - max_tokens: int, prompt: str, target: str = "Hello!" + max_tokens: int, + prompt: str, + target: str = "Hello!", ) -> int: """Adjust the maximum number of tokens based on the specific prompt.""" is_valid_prompt = prompt.strip().startswith(target.strip()) diff --git a/readmeai/parsers/cicd/bitbucket.py b/readmeai/parsers/cicd/bitbucket.py deleted file mode 100644 index caf214b4..00000000 --- a/readmeai/parsers/cicd/bitbucket.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for Bitbucket Pipelines (bitbucket-pipelines.yml) configuration files.""" diff --git a/readmeai/parsers/cicd/circleci.py b/readmeai/parsers/cicd/circleci.py deleted file mode 100644 index 39d49b91..00000000 --- a/readmeai/parsers/cicd/circleci.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for .circleci/config.yml configuration files.""" diff --git a/readmeai/parsers/cicd/github.py b/readmeai/parsers/cicd/github.py deleted file mode 100644 index 6c46dc00..00000000 --- a/readmeai/parsers/cicd/github.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for GitHub Actions (.github/workflows/) configuration files.""" diff --git a/readmeai/parsers/cicd/gitlab.py b/readmeai/parsers/cicd/gitlab.py deleted file mode 100644 index 2bdcbb22..00000000 --- a/readmeai/parsers/cicd/gitlab.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for .gitlab-ci.yml configuration files.""" diff --git a/readmeai/parsers/cicd/jenkins.py b/readmeai/parsers/cicd/jenkins.py deleted file mode 100644 index 97912d5b..00000000 --- a/readmeai/parsers/cicd/jenkins.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for Jenkinsfile (Jenkinsfile) configuration files.""" diff --git a/readmeai/parsers/cicd/travis.py b/readmeai/parsers/cicd/travis.py deleted file mode 100644 index 1d4cbc55..00000000 --- a/readmeai/parsers/cicd/travis.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for .travis.yml configuration files.""" diff --git a/readmeai/parsers/configuration/ansible.py b/readmeai/parsers/configuration/ansible.py deleted file mode 100644 index bf1ce6a6..00000000 --- a/readmeai/parsers/configuration/ansible.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for Ansible (playbook.yml, ansible/site.yml) configuration files.""" diff --git a/readmeai/parsers/configuration/apache.py b/readmeai/parsers/configuration/apache.py deleted file mode 100644 index e9fb3a3a..00000000 --- a/readmeai/parsers/configuration/apache.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for apache.py (httpd.conf) configuration files.""" diff --git a/readmeai/parsers/configuration/nginx.py b/readmeai/parsers/configuration/nginx.py deleted file mode 100644 index 815324bf..00000000 --- a/readmeai/parsers/configuration/nginx.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for Nginx (nginx.conf) configuration files.""" diff --git a/readmeai/parsers/language/cpp.py b/readmeai/parsers/cpp.py similarity index 81% rename from readmeai/parsers/language/cpp.py rename to readmeai/parsers/cpp.py index ee0000fc..8c68d574 100644 --- a/readmeai/parsers/language/cpp.py +++ b/readmeai/parsers/cpp.py @@ -1,19 +1,22 @@ -"""Dependency file parsers for C/C++ projects.""" +""" +Dependency file parsers for C/C++ projects. +""" import re -from typing import List from readmeai.core.parsers import BaseFileParser class CMakeParser(BaseFileParser): - """Parser for CMake dependency files.""" + """ + Parser for CMake dependency files. + """ def __init__(self) -> None: """Initializes the handler with given configuration.""" super().__init__() - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Extracts dependencies, libs, and software from a CMakeLists.txt.""" try: extracted_dependencies = [] @@ -30,35 +33,40 @@ def parse(self, content: str) -> List[str]: extracted_dependencies.extend(dependencies) if line.startswith("target_link_libraries") or line.startswith( - "find_package" + "find_package", ): libs = re.findall( - r"target_link_libraries\([^)]+\s+([^)]+)\)", line + r"target_link_libraries\([^)]+\s+([^)]+)\)", + line, ) extracted_dependencies.extend(libs) return list(set(extracted_dependencies)) except re.error as exc: - return self.handle_parsing_error(f"CMakeLists.txt: {str(exc)}") + return self.handle_parsing_error(f"CMakeLists.txt: {exc!s}") class ConfigureAcParser(BaseFileParser): - """Parser for configure.ac dependency files.""" + """ + Parser for configure.ac dependency files. + """ - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Extracts package names from a configure.ac file.""" try: regex = re.compile(r"AC_CHECK_LIB\([^)]+\s+([^)]+)\)") return regex.findall(content) except re.error as exc: - return self.handle_parsing_error(f"configure.ac: {str(exc)}") + return self.handle_parsing_error(f"configure.ac: {exc!s}") class MakefileAmParser(BaseFileParser): - """Parser for Makefile dependency files.""" + """ + Parser for Makefile dependency files. + """ - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Extracts dependencies from Makefile.am files.""" try: extracted_packages = [] @@ -89,4 +97,4 @@ def parse(self, content: str) -> List[str]: return list(set(extracted_packages)) except re.error as exc: - return self.handle_parsing_error(f"Makefile.am: {str(exc)}") + return self.handle_parsing_error(f"Makefile.am: {exc!s}") diff --git a/readmeai/parsers/configuration/docker.py b/readmeai/parsers/docker.py similarity index 74% rename from readmeai/parsers/configuration/docker.py rename to readmeai/parsers/docker.py index 9b31b7de..c7e99b65 100644 --- a/readmeai/parsers/configuration/docker.py +++ b/readmeai/parsers/docker.py @@ -1,7 +1,8 @@ -"""Parser for Docker (Dockerfile, docker-compose.yaml) configuration files.""" +""" +Parser for Docker (Dockerfile, docker-compose.yaml) configuration files. +""" import re -from typing import List import yaml @@ -9,13 +10,15 @@ class DockerfileParser(BaseFileParser): - """Parser for Dockerfile dependency files.""" + """ + Parser for Dockerfile dependency files. + """ def __init__(self) -> None: """Initializes the handler with given configuration.""" super().__init__() - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Extracts package names from a Dockerfile.""" try: dependencies = [] @@ -35,13 +38,15 @@ def parse(self, content: str) -> List[str]: return dependencies except re.error as exc: - return self.handle_parsing_error(f"Dockerfile: {str(exc)}") + return self.handle_parsing_error(f"Dockerfile: {exc!s}") class DockerComposeParser(BaseFileParser): - """Parser for Docker related files.""" + """ + Parser for Docker related files. + """ - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Parse docker-compose.yaml file and return a list of services.""" try: data = yaml.safe_load(content) @@ -49,4 +54,4 @@ def parse(self, content: str) -> List[str]: return list(data["services"].keys()) except yaml.YAMLError as exc: - return self.handle_parsing_error(f"Dockerfile: {str(exc)}") + return self.handle_parsing_error(f"Dockerfile: {exc!s}") diff --git a/readmeai/parsers/factory.py b/readmeai/parsers/factory.py index 5af521b2..51e03289 100644 --- a/readmeai/parsers/factory.py +++ b/readmeai/parsers/factory.py @@ -1,78 +1,75 @@ -"""Abstract factory module for all project file parsers.""" - -from typing import Dict, Type +""" +Abstract factory module for all project file parsers. +""" from readmeai.core.parsers import BaseFileParser -from readmeai.parsers.configuration.docker import ( - DockerComposeParser, - DockerfileParser, -) -from readmeai.parsers.configuration.properties import PropertiesParser -from readmeai.parsers.language.cpp import ( +from readmeai.parsers.cpp import ( CMakeParser, ConfigureAcParser, MakefileAmParser, ) -from readmeai.parsers.language.go import GoModParser -from readmeai.parsers.language.python import ( - RequirementsParser, - TomlParser, - YamlParser, +from readmeai.parsers.docker import ( + DockerComposeParser, + DockerfileParser, ) -from readmeai.parsers.language.rust import CargoTomlParser -from readmeai.parsers.language.swift import SwiftPackageParser -from readmeai.parsers.package.gradle import ( +from readmeai.parsers.go import GoModParser +from readmeai.parsers.gradle import ( BuildGradleKtsParser, BuildGradleParser, ) -from readmeai.parsers.package.maven import MavenParser -from readmeai.parsers.package.npm import PackageJsonParser -from readmeai.parsers.package.yarn import YarnLockParser - -ParserRegistryType = dict[str, Type[BaseFileParser]] +from readmeai.parsers.maven import MavenParser +from readmeai.parsers.npm import PackageJsonParser +from readmeai.parsers.properties import PropertiesParser +from readmeai.parsers.python import ( + RequirementsParser, + TomlParser, + YamlParser, +) +from readmeai.parsers.rust import CargoTomlParser +from readmeai.parsers.swift import SwiftPackageParser +from readmeai.parsers.yarn import YarnLockParser -PARSER_REGISTRY = { - # Configuration - ".properties": PropertiesParser, - # Language/Framework - # Python - "Pipfile": TomlParser(), - "pyproject.toml": TomlParser(), - "requirements.in": RequirementsParser(), - "requirements.txt": RequirementsParser(), - "requirements-dev.txt": RequirementsParser(), - "requirements-test.txt": RequirementsParser(), - "requirements-prod.txt": RequirementsParser(), - "dev-requirements.txt": RequirementsParser(), - "environment.yml": YamlParser(), - "environment.yaml": YamlParser(), - # "setup.py": setup_py_parser, - # "setup.cfg": setup_cfg_parser, - # C/C++ - "cmakeLists.txt": CMakeParser(), - "configure.ac": ConfigureAcParser(), - "Makefile.am": MakefileAmParser(), - # JavaScript/Node.js - "package.json": PackageJsonParser(), - "yarn.lock": YarnLockParser(), - # Kotlin and Kotlin DSL - "build.gradle": BuildGradleParser(), - "build.gradle.kts": BuildGradleKtsParser(), - # Go - "go.mod": GoModParser(), - # Java - "pom.xml": MavenParser(), - # Rust - "cargo.toml": CargoTomlParser(), - # Swift - "Package.swift": SwiftPackageParser(), - "Dockerfile": DockerfileParser(), - "docker-compose.yaml": DockerComposeParser(), - # Package Managers - # Monitoring and Logging -} +ParserRegistryType = dict[str, BaseFileParser] -def parser_handler() -> Dict[str, BaseFileParser]: - """Returns a dictionary of callable file parser methods.""" - return PARSER_REGISTRY +def parser_handler() -> ParserRegistryType: + """ + Returns a dictionary of callable file parser methods. + """ + return { + # Python + "Pipfile": TomlParser(), + "pyproject.toml": TomlParser(), + "requirements.in": RequirementsParser(), + "requirements.txt": RequirementsParser(), + "requirements-dev.txt": RequirementsParser(), + "requirements-test.txt": RequirementsParser(), + "requirements-prod.txt": RequirementsParser(), + "dev-requirements.txt": RequirementsParser(), + "environment.yml": YamlParser(), + "environment.yaml": YamlParser(), + # "setup.py": setup_py_parser, + # "setup.cfg": setup_cfg_parser, + # C/C++ + "cmakeLists.txt": CMakeParser(), + "configure.ac": ConfigureAcParser(), + "Makefile.am": MakefileAmParser(), + # JavaScript/Node.js + "package.json": PackageJsonParser(), + "yarn.lock": YarnLockParser(), + # Kotlin/Kotlin DSL + "build.gradle": BuildGradleParser(), + "build.gradle.kts": BuildGradleKtsParser(), + # Go + "go.mod": GoModParser(), + # Java + "pom.xml": MavenParser(), + # Rust + "cargo.toml": CargoTomlParser(), + # Swift + "Package.swift": SwiftPackageParser(), + # Docker + "Dockerfile": DockerfileParser(), + "docker-compose.yaml": DockerComposeParser(), + ".properties": PropertiesParser(), + } diff --git a/readmeai/parsers/language/go.py b/readmeai/parsers/go.py similarity index 75% rename from readmeai/parsers/language/go.py rename to readmeai/parsers/go.py index 715d6815..656f353a 100644 --- a/readmeai/parsers/language/go.py +++ b/readmeai/parsers/go.py @@ -1,19 +1,22 @@ -"""Parse package dependencies from go.mod files.""" +""" +Parser for go.mod dependency files. +""" import re -from typing import List from readmeai.core.parsers import BaseFileParser class GoModParser(BaseFileParser): - """Parser for go.mod files.""" + """ + Parser for go.mod files. + """ def __init__(self) -> None: """Initializes the handler with given configuration.""" super().__init__() - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Parse the content of a go.mod file.""" try: lines = content.split("\n") @@ -27,4 +30,4 @@ def parse(self, content: str) -> List[str]: return list(package_names) except Exception as exc: - return self.handle_parsing_error(f"go.mod: {str(exc)}") + return self.handle_parsing_error(f"go.mod: {exc!s}") diff --git a/readmeai/parsers/package/gradle.py b/readmeai/parsers/gradle.py similarity index 81% rename from readmeai/parsers/package/gradle.py rename to readmeai/parsers/gradle.py index 613a5a9b..da27d28a 100644 --- a/readmeai/parsers/package/gradle.py +++ b/readmeai/parsers/gradle.py @@ -1,19 +1,22 @@ -"""Parser for gradle dependency files.""" +""" +Parser for gradle dependency files. +""" import re -from typing import List from readmeai.core.parsers import BaseFileParser class BuildGradleParser(BaseFileParser): - """Parser for build.gradle dependency files.""" + """ + Parser for build.gradle dependency files. + """ def __init__(self) -> None: """Initializes the handler with given configuration.""" super().__init__() - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Extracts package names from a build.gradle file.""" try: pattern = r"(implementation|classpath|api|testImplementation|androidTestImplementation|kapt)\s+['\"]([^'\"]+)['\"]" @@ -30,13 +33,15 @@ def parse(self, content: str) -> List[str]: return list(package_names) except re.error as exc: - return self.handle_parsing_error(f"build.gradle: {str(exc)}") + return self.handle_parsing_error(f"build.gradle: {exc!s}") class BuildGradleKtsParser(BaseFileParser): - """Parser for build.gradle.kts dependency files.""" + """ + Parser for build.gradle.kts dependency files. + """ - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Extracts package names from a build.gradle.kts file.""" try: pattern = r"(\bimplementation|testImplementation)\s*\((['\"])([^'\"]+)\2\)" @@ -52,4 +57,4 @@ def parse(self, content: str) -> List[str]: return list(package_names) except re.error as error: - return self.handle_parsing_error(f"build.gradle.kts: {str(error)}") + return self.handle_parsing_error(f"build.gradle.kts: {error!s}") diff --git a/readmeai/parsers/infrastructure/cloudformation.py b/readmeai/parsers/infrastructure/cloudformation.py deleted file mode 100644 index 063fd2c1..00000000 --- a/readmeai/parsers/infrastructure/cloudformation.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for cloudformation.yaml (AWS CloudFormation) configuration files.""" diff --git a/readmeai/parsers/infrastructure/terraform.py b/readmeai/parsers/infrastructure/terraform.py deleted file mode 100644 index 6e873666..00000000 --- a/readmeai/parsers/infrastructure/terraform.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for main.tf (Terraform) configuration files.""" diff --git a/readmeai/parsers/package/maven.py b/readmeai/parsers/maven.py similarity index 70% rename from readmeai/parsers/package/maven.py rename to readmeai/parsers/maven.py index 316e16a0..c3f63c3a 100644 --- a/readmeai/parsers/package/maven.py +++ b/readmeai/parsers/maven.py @@ -1,23 +1,26 @@ -"""Parser utilities for Java-based dependency files.""" +""" +Parser utilities for Java-based dependency files. +""" import re -from typing import List from readmeai.core.parsers import BaseFileParser class MavenParser(BaseFileParser): - """Parser for Maven dependency files in pom.xml format.""" + """ + Parser for Maven dependency files in pom.xml format. + """ def __init__(self) -> None: """Initializes the handler with given configuration.""" super().__init__() - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Extract packages names from Maven pom.xml files.""" try: regex = re.compile( - r"\s*([^<]+)\s*([^<]+)\s*([^<]+)" + r"\s*([^<]+)\s*([^<]+)\s*([^<]+)", ) matches = regex.findall(content) dependencies = [ @@ -28,4 +31,4 @@ def parse(self, content: str) -> List[str]: return set(dependencies) except re.error as exc: - return self.handle_parsing_error(f"pom.xml: {str(exc)}") + return self.handle_parsing_error(f"pom.xml: {exc!s}") diff --git a/readmeai/parsers/package/npm.py b/readmeai/parsers/npm.py similarity index 69% rename from readmeai/parsers/package/npm.py rename to readmeai/parsers/npm.py index 658536ac..5720d444 100644 --- a/readmeai/parsers/package/npm.py +++ b/readmeai/parsers/npm.py @@ -1,20 +1,23 @@ -"""File for parsing json dependency files.""" +""" +Parsers for npm related dependency files. +""" import json import re -from typing import List from readmeai.core.parsers import BaseFileParser class PackageJsonParser(BaseFileParser): - """Parser for package.json dependency files.""" + """ + Parser for package.json dependency files. + """ def __init__(self) -> None: """Initializes the handler with given configuration.""" super().__init__() - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Returns a list of dependencies parsed from a json file.""" try: data = json.loads(content) @@ -29,15 +32,17 @@ def parse(self, content: str) -> List[str]: return package_names except json.JSONDecodeError as exc: - return self.handle_parsing_error(f"package.json: {str(exc)}") + return self.handle_parsing_error(f"package.json: {exc!s}") class YarnLockParser(BaseFileParser): - """Parser for yarn.lock dependency files.""" + """ + Parser for yarn.lock dependency files. + """ - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Extracts package names from a yarn.lock file.""" try: return re.findall(r"(\S+)(?=@)", content) except re.error as exc: - return self.handle_parsing_error(f"yarn.lock: {str(exc)}") + return self.handle_parsing_error(f"yarn.lock: {exc!s}") diff --git a/readmeai/parsers/orchestration/kubernetes.py b/readmeai/parsers/orchestration/kubernetes.py deleted file mode 100644 index f32cdc6c..00000000 --- a/readmeai/parsers/orchestration/kubernetes.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for Kubernetes (k8s.yml) configuration files.""" diff --git a/readmeai/parsers/package/__init__.py b/readmeai/parsers/package/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/readmeai/parsers/package/composer.py b/readmeai/parsers/package/composer.py deleted file mode 100644 index 2d104b0e..00000000 --- a/readmeai/parsers/package/composer.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for PHP Composer (composer.json) configuration files.""" diff --git a/readmeai/parsers/package/gem.py b/readmeai/parsers/package/gem.py deleted file mode 100644 index a214954a..00000000 --- a/readmeai/parsers/package/gem.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for Gemfile.lock (Ruby) configuration files.""" diff --git a/readmeai/parsers/package/nuget.py b/readmeai/parsers/package/nuget.py deleted file mode 100644 index c12db135..00000000 --- a/readmeai/parsers/package/nuget.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for NuGet.Config (.NET) configuration files.""" diff --git a/readmeai/parsers/package/pip.py b/readmeai/parsers/package/pip.py deleted file mode 100644 index 5c7b8517..00000000 --- a/readmeai/parsers/package/pip.py +++ /dev/null @@ -1 +0,0 @@ -"""Parser for Pip (requirements.txt, Pipfile) configuration files.""" diff --git a/readmeai/parsers/configuration/properties.py b/readmeai/parsers/properties.py similarity index 77% rename from readmeai/parsers/configuration/properties.py rename to readmeai/parsers/properties.py index 1c9f1107..83f1ec58 100644 --- a/readmeai/parsers/configuration/properties.py +++ b/readmeai/parsers/properties.py @@ -1,15 +1,18 @@ -"""Parser for .properties configuration files.""" +""" +Parser for .properties configuration files. +""" import re -from typing import List from readmeai.core.parsers import BaseFileParser class PropertiesParser(BaseFileParser): - """Parser for .properties files.""" + """ + Parser for .properties files. + """ - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Parse the content of a .properties file.""" names = [] diff --git a/readmeai/parsers/language/python.py b/readmeai/parsers/python.py similarity index 90% rename from readmeai/parsers/language/python.py rename to readmeai/parsers/python.py index 2b7a845c..91e60a58 100644 --- a/readmeai/parsers/language/python.py +++ b/readmeai/parsers/python.py @@ -1,27 +1,30 @@ -"""Python dependency file parsers methods.""" +""" +Python dependency file parsers methods. +""" import re import sys -from typing import List import yaml from readmeai.core.parsers import BaseFileParser if sys.version_info < (3, 11): - import toml + import tomli as toml else: import tomllib as toml class RequirementsParser(BaseFileParser): - """Parser for requirements.txt files.""" + """ + Parser for requirements.txt files. + """ def __init__(self) -> None: """Initializes the handler with given configuration.""" super().__init__() - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """ Extracts package names from a requirements.txt file. Excludes the version specifiers. @@ -39,7 +42,7 @@ def parse(self, content: str) -> List[str]: return package_names except re.error as exc: - return self.handle_parsing_error(f"requirements.txt: {str(exc)}") + return self.handle_parsing_error(f"requirements.txt: {exc!s}") class TomlParser(BaseFileParser): @@ -48,7 +51,7 @@ class TomlParser(BaseFileParser): Handles build systems like Pipenv, Poetry, and Flit. """ - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Extracts package names from a TOML file.""" try: data = toml.loads(content) @@ -66,25 +69,25 @@ def parse(self, content: str) -> List[str]: if "dev-dependencies" in poetry_data: dependencies.extend( - poetry_data.get("dev-dependencies", {}).keys() + poetry_data.get("dev-dependencies", {}).keys(), ) if "group" in poetry_data and "dev" in poetry_data["group"]: poetry_data_dev = data["tool"]["poetry"]["group"]["dev"] dependencies.extend( - poetry_data_dev.get("dependencies", {}).keys() + poetry_data_dev.get("dependencies", {}).keys(), ) if "group" in poetry_data and "test" in poetry_data["group"]: poetry_data_main = data["tool"]["poetry"]["group"]["test"] dependencies.extend( - poetry_data_main.get("dependencies", {}).keys() + poetry_data_main.get("dependencies", {}).keys(), ) if "group" in poetry_data and "docs" in poetry_data["group"]: poetry_data_main = data["tool"]["poetry"]["group"]["docs"] dependencies.extend( - poetry_data_main.get("dependencies", {}).keys() + poetry_data_main.get("dependencies", {}).keys(), ) return dependencies @@ -92,7 +95,7 @@ def parse(self, content: str) -> List[str]: # For pyproject.toml (Flit) elif "project" in data and "dependencies" in data["project"]: dependencies = self.extract_package_names( - data["project"]["dependencies"] + data["project"]["dependencies"], ) if ( "project" in data @@ -102,7 +105,7 @@ def parse(self, content: str) -> List[str]: "optional-dependencies" ].values(): dependencies.extend( - self.extract_package_names(group_deps) + self.extract_package_names(group_deps), ) else: dependencies = [] @@ -110,9 +113,9 @@ def parse(self, content: str) -> List[str]: return dependencies except Exception as exc: - return self.handle_parsing_error(f"pyproject.toml: {str(exc)}") + return self.handle_parsing_error(f"pyproject.toml: {exc!s}") - def extract_package_names(self, dependencies: List[str]) -> List[str]: + def extract_package_names(self, dependencies: list[str]) -> list[str]: """Helper method to extract package names from dependency strings.""" package_names = [] for dep in dependencies: @@ -127,7 +130,7 @@ class YamlParser(BaseFileParser): Parser for Python YAML based dependency files i.e. environment.yml """ - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Extracts package names from environment.yml file.""" try: data = yaml.safe_load(content) @@ -136,7 +139,7 @@ def parse(self, content: str) -> List[str]: for package in data["dependencies"]: if isinstance(package, str): dependencies.append( - package.split("=")[0].split(">")[0].split("<")[0] + package.split("=")[0].split(">")[0].split("<")[0], ) elif isinstance(package, dict): for pip_package in package.values(): @@ -147,5 +150,5 @@ def parse(self, content: str) -> List[str]: except yaml.YAMLError as exc: return self.handle_parsing_error( - f"conda environment.yml: {str(exc)}" + f"conda environment.yml: {exc!s}", ) diff --git a/readmeai/parsers/language/rust.py b/readmeai/parsers/rust.py similarity index 78% rename from readmeai/parsers/language/rust.py rename to readmeai/parsers/rust.py index 6a8823b1..bd3fbba6 100644 --- a/readmeai/parsers/language/rust.py +++ b/readmeai/parsers/rust.py @@ -3,24 +3,25 @@ """ import sys -from typing import List from readmeai.core.parsers import BaseFileParser if sys.version_info < (3, 11): - import toml + import tomli as toml else: import tomllib as toml class CargoTomlParser(BaseFileParser): - """Parser for Rust cargo.toml dependency files.""" + """ + Parser for Rust cargo.toml dependency files. + """ def __init__(self) -> None: """Initializes the handler with given configuration.""" super().__init__() - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Extract packages names from Rust TOML files.""" try: data = toml.loads(content) @@ -33,11 +34,12 @@ def parse(self, content: str) -> List[str]: for key in data: if key.startswith("dependencies.") and isinstance( - data[key], dict + data[key], + dict, ): dependencies.extend(data[key].keys()) return dependencies except Exception as exc: - return self.handle_parsing_error(f"cargo.toml: {str(exc)}") + return self.handle_parsing_error(f"cargo.toml: {exc!s}") diff --git a/readmeai/parsers/language/swift.py b/readmeai/parsers/swift.py similarity index 86% rename from readmeai/parsers/language/swift.py rename to readmeai/parsers/swift.py index bbb402a2..d294d184 100644 --- a/readmeai/parsers/language/swift.py +++ b/readmeai/parsers/swift.py @@ -1,19 +1,22 @@ -"""Dependency file parsers for Swift projects.""" +""" +Dependency file parsers for Swift projects. +""" import re -from typing import List from readmeai.core.parsers import BaseFileParser class SwiftPackageParser(BaseFileParser): - """Parser for Swift Package.swift files.""" + """ + Parser for Swift Package.swift files. + """ def __init__(self) -> None: """Initializes the handler with given configuration.""" super().__init__() - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Extracts package names from a Package.swift file.""" try: package_names = set() @@ -41,7 +44,7 @@ def parse(self, content: str) -> List[str]: return list(package_names) except Exception as exc: - return self.handle_parsing_error(f"Package.swift: {str(exc)}") + return self.handle_parsing_error(f"Package.swift: {exc!s}") @staticmethod def extract_package_name_from_url(url: str) -> str: diff --git a/readmeai/parsers/package/yarn.py b/readmeai/parsers/yarn.py similarity index 52% rename from readmeai/parsers/package/yarn.py rename to readmeai/parsers/yarn.py index dd60b32c..17506946 100644 --- a/readmeai/parsers/package/yarn.py +++ b/readmeai/parsers/yarn.py @@ -1,17 +1,20 @@ -"""Parser for yarn.lock dependency files.""" +""" +Parser for yarn.lock dependency files. +""" import re -from typing import List from readmeai.core.parsers import BaseFileParser class YarnLockParser(BaseFileParser): - """Parser for yarn.lock dependency files.""" + """ + Parser for yarn.lock dependency files. + """ - def parse(self, content: str) -> List[str]: + def parse(self, content: str) -> list[str]: """Extracts package names from a yarn.lock file.""" try: return re.findall(r"(\S+)(?=@)", content) except re.error as exc: - return self.handle_parsing_error(f"yarn.lock: {str(exc)}") + return self.handle_parsing_error(f"yarn.lock: {exc!s}") diff --git a/readmeai/services/__init__.py b/readmeai/services/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/readmeai/services/git.py b/readmeai/services/git.py deleted file mode 100644 index a6c4c06b..00000000 --- a/readmeai/services/git.py +++ /dev/null @@ -1,186 +0,0 @@ -""" -Git operations for cloning and validating user repositories. -""" - -import os -import platform -import shutil -from enum import Enum -from pathlib import Path -from typing import Optional - -import git - -from readmeai._exceptions import GitCloneError -from readmeai.core.logger import Logger - -_logger = Logger(__name__) - - -class GitHost(str, Enum): - """ - Enum class for Git service providers. Enum data includes the following: - - domain name of the Git service - - api url to fetch repository details - - file url to format links in the generated README.md file - """ - - LOCAL = "local" - GITHUB = "github.com" - GITLAB = "gitlab.com" - BITBUCKET = "bitbucket.org" - - @property - def api_url(self): - """Gets the API URL for the Git service.""" - api_urls = { - "local": None, - "github.com": "https://api.github.com/repos/", - "gitlab.com": "https://api.gitlab.com/v4/projects/", - "bitbucket.org": "https://api.bitbucket.org/2.0/repositories/", - } - return api_urls[self.value] - - @property - def file_url_template(self): - """Gets the file URL template for accessing files on the Git service.""" - file_url_templates = { - "local": "{file_path}", - "github.com": "https://github.com/{full_name}/blob/master/{file_path}", - "gitlab.com": "https://gitlab.com/{full_name}/-/blob/master/{file_path}", - "bitbucket.org": "https://bitbucket.org/{full_name}/src/master/{file_path}", - } - return file_url_templates[self.value] - - -async def clone_repository(repository: str, temp_dir: str) -> str: - """Clone repository to temporary directory and return the path.""" - try: - temp_dir_path = Path(temp_dir) - if not temp_dir_path.exists(): - temp_dir_path.mkdir(parents=True) - - if Path(repository).is_dir(): - repo = git.Repo.init(temp_dir) - origin = repo.create_remote( - "origin", f"file://{Path(repository).absolute()}" - ) - repo.git.config("core.sparseCheckout", "true") - - sparse_checkout_path = ( - temp_dir_path / ".git" / "info" / "sparse-checkout" - ) - with sparse_checkout_path.open("w") as sc_file: - sc_file.write("/*\n!.git/\n") - - origin.fetch() - repo.git.checkout("FETCH_HEAD") - else: - git.Repo.clone_from( - repository, temp_dir, depth=1, single_branch=True - ) - - await remove_hidden_contents(temp_dir_path) - - return temp_dir - - except ( - git.GitCommandError, - git.InvalidGitRepositoryError, - git.NoSuchPathError, - ) as exc: - raise GitCloneError(f"Error cloning repository: {str(exc)}") from exc - - -async def remove_hidden_contents(directory: Path) -> None: - """Remove hidden files and directories from a specified directory.""" - for item in directory.iterdir(): - if item.name.startswith(".") and item.name != ".github": - if item.is_dir(): - shutil.rmtree(item) - else: - item.unlink() - - -async def fetch_git_api_url(repo_url: str) -> str: - """Parses the repository URL and returns the API URL.""" - try: - parts = repo_url.rstrip("/").split("/") - repo_name = f"{parts[-2]}/{parts[-1]}" - for service in GitHost: - if service in repo_url: - api_url = f"{service.api_url}{repo_name}" - _logger.info(f"{service.name.upper()} API URL: {api_url}") - return api_url - - raise ValueError("Unsupported Git service.") - - except (IndexError, ValueError) as exc: - raise ValueError(f"Invalid repository URL: {repo_url}") from exc - - -def fetch_git_file_url(file_path: str, full_name: str, repo_url: str) -> str: - """Returns the URL of the file in the remote repository.""" - if Path(repo_url).exists(): - return GitHost.LOCAL.file_url_template.format(file_path=file_path) - - for service in GitHost: - if service in repo_url: - return service.file_url_template.format( - full_name=full_name, file_path=file_path - ) - - return file_path - - -def find_git_executable() -> Optional[Path]: - """Find the path to the git executable, if available.""" - try: - git_exec_path = os.environ.get("GIT_PYTHON_GIT_EXECUTABLE") - if git_exec_path: - return Path(git_exec_path) - - # For Windows, set default location of git executable. - if platform.system() == "Windows": - default_windows_path = Path("C:\\Program Files\\Git\\cmd\\git.EXE") - if default_windows_path.exists(): - return default_windows_path - - # For other OS, set executable path from PATH environment variable. - paths = os.environ["PATH"].split(os.pathsep) - for path in paths: - git_path = Path(path) / "git" - if git_path.exists(): - return git_path - - return None - - except Exception as exc: - raise ValueError("Error finding Git executable") from exc - - -def validate_file_permissions(temp_dir: Path) -> None: - """Validates file permissions of the cloned repository.""" - try: - if platform.system() != "Windows": - permissions = temp_dir.stat().st_mode & 0o777 - if permissions != 0o700: - raise SystemExit( - f"Invalid file permissions for {temp_dir}.\n" - f"Expected 0o700, but found {oct(permissions)}." - ) - - except Exception as exc: - raise ValueError( - f"Error validating file permissions: {str(exc)}" - ) from exc - - -def validate_git_executable(git_exec_path: str) -> None: - """Validate the path to the git executable.""" - try: - if not git_exec_path or not Path(git_exec_path).exists(): - raise ValueError(f"Git executable not found at {git_exec_path}") - - except Exception as exc: - raise ValueError("Error validating Git executable path") from exc diff --git a/readmeai/templates/__init__.py b/readmeai/templates/__init__.py new file mode 100644 index 00000000..88a56f3d --- /dev/null +++ b/readmeai/templates/__init__.py @@ -0,0 +1,11 @@ +from .base_template import BaseTemplate +from .header import HeaderStyle, HeaderTemplate +from .toc import ToCStyle, ToCTemplate + +__all__ = [ + "BaseTemplate", + "HeaderTemplate", + "HeaderStyle", + "ToCTemplate", + "ToCStyle", +] diff --git a/readmeai/templates/base_template.py b/readmeai/templates/base_template.py new file mode 100644 index 00000000..764af606 --- /dev/null +++ b/readmeai/templates/base_template.py @@ -0,0 +1,34 @@ +from abc import ABC, abstractmethod +from typing import Any + + +class BaseTemplate(ABC): + """ + Base class for all templates. + """ + + @abstractmethod + def render(self, data: dict[str, Any]) -> str: + """ + Render the template with the given data. + + Args: + data (Dict[str, Any]): The data to use for rendering the template. + + Returns: + str: The rendered template as a string. + """ + ... + + @staticmethod + def sanitize_input(input_str: str) -> str: + """ + Sanitize input string to prevent XSS attacks. + + Args: + input_str (str): The input string to sanitize. + + Returns: + str: The sanitized string. + """ + return input_str.replace("<", "<").replace(">", ">") diff --git a/readmeai/templates/header.py b/readmeai/templates/header.py new file mode 100644 index 00000000..8bcf36e6 --- /dev/null +++ b/readmeai/templates/header.py @@ -0,0 +1,87 @@ +import enum +from typing import Any, ClassVar + +from .base_template import BaseTemplate + + +class HeaderStyle(enum.StrEnum): + """ + Enum containing header styles for the README file. + """ + + CLASSIC = enum.auto() + COMPACT = enum.auto() + MODERN = enum.auto() + + +class HeaderTemplate(BaseTemplate): + """ + Class variable for rendering the README.md header style. + """ + + HEADER_TEMPLATES: ClassVar[dict] = { + HeaderStyle.CLASSIC: """\ +

    + {repo_name}-logo +

    +

    +

    {repo_name}

    +

    +

    + {slogan} +

    +

    \n\t{shields_icons}

    +

    \n\t{badge_icons}

    + +
    +""", + HeaderStyle.COMPACT: """\ +[]() + +##    {repo_name} + +     *{slogan}* + +

      \n\t{shields_icons}

    + +
    +""", + HeaderStyle.MODERN.value: """\ +[]() + +# `{repo_name}` + +#### {slogan} + +

    \n\t{shields_icons}

    +

    \n\t{badge_icons}

    + +
    +""", + } + + def __init__(self, style: str = HeaderStyle.CLASSIC) -> None: + """ + Initialize the header template with the given style. + """ + self.style = style + + def render(self, data: dict[str, Any]) -> str: + """ + Render the header based on the current style and given data. + """ + template = self.HEADER_TEMPLATES.get( + self.style, + self.HEADER_TEMPLATES[HeaderStyle.CLASSIC], + ) + return template.format(**data) + + @staticmethod + def get_header_template(template: str) -> str: + """ + Get the header template for the given style. + """ + try: + return HeaderTemplate.HEADER_TEMPLATES[HeaderStyle(template)] + except ValueError: + return HeaderTemplate.HEADER_TEMPLATES[HeaderStyle.CLASSIC] diff --git a/readmeai/templates/toc.py b/readmeai/templates/toc.py new file mode 100644 index 00000000..3e51fdd5 --- /dev/null +++ b/readmeai/templates/toc.py @@ -0,0 +1,103 @@ +import re +from enum import Enum +from typing import Any, ClassVar + +from .base_template import BaseTemplate + + +class ToCStyle(Enum): + """ + Enum for Table of Contents template styles. + """ + + BULLET = "bullet" + FOLD = "fold" + LINKS = "links" + NUMBER = "number" + + +class ToCTemplate(BaseTemplate): + """ + Class variable for rendering the README.md Table of Contents. + """ + + TOC_TEMPLATES: ClassVar[dict] = { + ToCStyle.BULLET: """##### ๐Ÿ”— Table of Contents\n\n{toc_items}\n---\n""", + ToCStyle.FOLD: """
    Table of Contents\n\n{toc_items}\n
    \n
    \n""", + ToCStyle.NUMBER: """##### ๐Ÿ“– Table of Contents\n\n{toc_items}\n---\n""", + ToCStyle.LINKS: """##### ๐Ÿ”— Quick Links\n\n{toc_items}\n---\n""", + } + + TOC_ITEM_TEMPLATES: ClassVar[dict[ToCStyle, str]] = { + ToCStyle.BULLET: "- [{title}](#{anchor})\n", + ToCStyle.FOLD: "- [{title}](#{anchor})\n", + ToCStyle.NUMBER: "{number}. [{title}](#{anchor})\n", + ToCStyle.LINKS: "- [{title}](#{anchor})\n", + } + + def __init__(self, style: str = ToCStyle.BULLET.value) -> None: + """ + Initialize the Table of Contents template with the given style. + """ + self.style = ToCStyle(style) + + @staticmethod + def generate_anchor(title: str) -> str: + """ + Generate an anchor link from the given title. + """ + title = re.sub(r":[a-zA-Z0-9_+-]+:", "", title) + anchor = title.lower().replace(" ", "-") + anchor = re.sub(r"[^\w-]", "", anchor) + return anchor + + def _generate_toc_items( + self, + sections: list[dict[str, Any]], + level: int = 0, + parent_number: str = "", + ) -> str: + """ + Generate Table of Contents items recursively. + """ + toc = "" + for index, section in enumerate(sections, start=1): + indent = " " * level + anchor = self.generate_anchor(section["title"]) + if self.style == ToCStyle.NUMBER: + number = ( + f"{parent_number}{index}" if parent_number else str(index) + ) + item = self.TOC_ITEM_TEMPLATES[self.style].format( + number=number, + title=section["title"], + anchor=anchor, + ) + else: + item = self.TOC_ITEM_TEMPLATES[self.style].format( + title=section["title"], + anchor=anchor, + ) + toc += f"{indent}{item}" + if "subsections" in section: + toc += self._generate_toc_items( + section["subsections"], + level + 2, + f"{number}." if self.style == ToCStyle.NUMBER else "", + ) + return toc + + def render(self, data: dict[str, Any]) -> str: + """ + Render Table of Contents based on the current style and data. + """ + toc_items = self._generate_toc_items(data["sections"]) + template = self.TOC_TEMPLATES[self.style] + return template.format(toc_items=toc_items) + + @staticmethod + def get_toc_template(template: str) -> str: + """ + Get the Table of Contents template for the given style. + """ + return ToCTemplate.TOC_TEMPLATES[ToCStyle(template)] diff --git a/readmeai/utils/file_handler.py b/readmeai/utils/file_handler.py index 3bcbf208..5157397a 100644 --- a/readmeai/utils/file_handler.py +++ b/readmeai/utils/file_handler.py @@ -5,8 +5,9 @@ import functools import json import sys +from collections.abc import Callable from pathlib import Path -from typing import Any, Callable, Dict, Union +from typing import Any import yaml @@ -23,7 +24,7 @@ class FileHandler: def __init__(self): """Initialize the file handler.""" - self.file_actions: Dict[str, Dict[str, Callable[[str], Any]]] = { + self.file_actions: dict[str, dict[str, Callable[[str], Any]]] = { "json": {"read": self.read_json, "write": self.write_json}, "md": {"read": self.read_markdown, "write": self.write_markdown}, "toml": {"read": self.read_toml, "write": self.write_toml}, @@ -34,7 +35,7 @@ def __init__(self): self.read_json = functools.lru_cache(maxsize=100)(self.read_json) self.read_toml = functools.lru_cache(maxsize=100)(self.read_toml) - def read(self, file_path: Union[str, Path]) -> Any: + def read(self, file_path: str | Path) -> Any: """Read the content of a file.""" if file_path in self.cache: return self.cache[file_path] @@ -48,7 +49,7 @@ def read(self, file_path: Union[str, Path]) -> Any: except Exception as exc: raise FileReadError(exc, file_path) from exc - def write(self, file_path: Union[str, Path], content: Any) -> None: + def write(self, file_path: str | Path, content: Any) -> None: """Write the content to a file.""" try: file_extension = str(file_path).rsplit(".", maxsplit=1)[-1] @@ -73,20 +74,20 @@ def get_action( @staticmethod @functools.lru_cache(maxsize=100) - def read_json(file_path: Union[str, Path]) -> Dict[str, Any]: + def read_json(file_path: str | Path) -> dict[str, Any]: """Read the content of a JSON file.""" with open(file_path, encoding="utf-8") as file: return json.load(file) @staticmethod - def read_markdown(file_path: Union[str, Path]) -> str: + def read_markdown(file_path: str | Path) -> str: """Read the content of a Markdown file.""" with open(file_path, encoding="utf-8") as file: return file.read() @staticmethod @functools.lru_cache(maxsize=100) - def read_toml(file_path: Union[str, Path]) -> Dict[str, Any]: + def read_toml(file_path: str | Path) -> dict[str, Any]: """Read the content of a TOML file.""" if sys.version_info < (3, 11): with open(file_path, encoding="utf-8") as file: @@ -97,49 +98,43 @@ def read_toml(file_path: Union[str, Path]) -> Dict[str, Any]: return {key.lower(): value for key, value in data.items()} @staticmethod - def read_text(file_path: Union[str, Path]) -> str: + def read_text(file_path: str | Path) -> str: """Read the content of a TXT file.""" with open(file_path, encoding="utf-8") as file: return file.read() @staticmethod - def read_yaml(file_path: Union[str, Path]) -> Dict[str, Any]: + def read_yaml(file_path: str | Path) -> dict[str, Any]: """Read the content of a YAML file.""" with open(file_path, encoding="utf-8") as file: return yaml.safe_load(file) @staticmethod - def write_json( - file_path: Union[str, Path], content: Dict[str, Any] - ) -> None: + def write_json(file_path: str | Path, content: dict[str, Any]) -> None: """Write the content to a JSON file.""" with open(file_path, "w", encoding="utf-8") as file: json.dump(content, file, indent=4) @staticmethod - def write_markdown(file_path: Union[str, Path], content: str) -> None: + def write_markdown(file_path: str | Path, content: str) -> None: """Write the content to a Markdown file.""" with open(file_path, "w", encoding="utf-8") as file: file.write(content) @staticmethod - def write_toml( - file_path: Union[str, Path], content: Dict[str, Any] - ) -> None: + def write_toml(file_path: str | Path, content: dict[str, Any]) -> None: """Write the content to a TOML file.""" with open(file_path, "w", encoding="utf-8") as file: toml.dump(content, file) @staticmethod - def write_text(file_path: Union[str, Path], content: str) -> None: + def write_text(file_path: str | Path, content: str) -> None: """Write the content to a TXT file.""" with open(file_path, "w", encoding="utf-8") as file: file.write(content) @staticmethod - def write_yaml( - file_path: Union[str, Path], content: Dict[str, Any] - ) -> None: + def write_yaml(file_path: str | Path, content: dict[str, Any]) -> None: """Write the content to a YAML file.""" with open(file_path, "w", encoding="utf-8") as file: yaml.safe_dump(content, file) diff --git a/readmeai/utils/file_resources.py b/readmeai/utils/file_resources.py index a8117ad9..653f8d0f 100644 --- a/readmeai/utils/file_resources.py +++ b/readmeai/utils/file_resources.py @@ -11,8 +11,8 @@ def get_resource_path( file_path: str, package: str = "readmeai.config", - sub_module: str = "settings", -) -> Path: + submodule: str = "settings", +) -> str: """Retrieves the path to a resource file within the package. This function attempts to first use `importlib.resources` for preferred @@ -41,28 +41,27 @@ def get_resource_path( """ resource_path = None try: - resource_path = resources.files(package).joinpath( - sub_module, file_path - ) + resource_path = resources.files(package).joinpath(submodule, file_path) except TypeError: # pragma: no cover try: import pkg_resources - submodule = sub_module.replace(".", "/") + submodule = submodule.replace(".", "/") resource_path = Path( pkg_resources.resource_filename( - "readmeai", f"{submodule}/{file_path}" - ) + "readmeai", + f"{submodule}/{file_path}", + ), ).resolve() except Exception as exc: # pragma: no cover raise FileReadError( - "Error loading resource file using pkg_resources", + "Error loading resource file via pkg_resources", str(resource_path), ) from exc - if not resource_path.exists(): + if resource_path is None: raise FileReadError("Resource file not found", str(resource_path)) - return resource_path + return str(resource_path) diff --git a/readmeai/vcs/__init__.py b/readmeai/vcs/__init__.py new file mode 100644 index 00000000..caac7c45 --- /dev/null +++ b/readmeai/vcs/__init__.py @@ -0,0 +1,34 @@ +from .errors import ( + GitCloneError, + GitURLError, + GitValidationError, + UnsupportedGitHostError, +) +from .ingestor import retrieve_repository +from .metadata import ( + RepositoryMetadata, + _fetch_repository_metadata, + fetch_git_repository_metadata, +) +from .providers import GitHost, get_file_url, parse_git_url +from .url_builder import GitURL + +__all__ = [ + "GitHost", + "GitURL", + "GitURL.validate_url", + "GitURL.set_attributes", + "GitURL.create", + "GitURL.git_file_url", + "GitURL.git_api_url", + "get_file_url", + "parse_git_url", + "retrieve_repository", + "GitValidationError", + "GitCloneError", + "GitURLError", + "UnsupportedGitHostError", + "RepositoryMetadata", + "fetch_git_repository_metadata", + "_fetch_repository_metadata", +] diff --git a/readmeai/vcs/errors.py b/readmeai/vcs/errors.py new file mode 100644 index 00000000..549f8539 --- /dev/null +++ b/readmeai/vcs/errors.py @@ -0,0 +1,45 @@ +""" +Custom exceptions for the utilities package. +""" + +from __future__ import annotations + +from readmeai._exceptions import ReadmeAIError + + +class GitValidationError(ReadmeAIError): + """ + Base class errors validating Git repositories. + """ + + ... + + +class GitCloneError(GitValidationError): + """ + Raised when a Git repository cannot be cloned. + """ + + def __init__(self, repository: str, *args): + self.repository = repository + super().__init__(f"Failed to clone repository: {repository}", *args) + + +class GitURLError(GitValidationError): + """ + Raised when an invalid Git repository URL is provided. + """ + + def __init__(self, url: str, *args): + self.url = url + super().__init__(f"Invalid Git repository URL: {url}", *args) + + +class UnsupportedGitHostError(GitValidationError): + """ + Raised when an unsupported Git host is provided. + """ + + def __init__(self, host: str, *args): + self.host = host + super().__init__(f"Unsupported Git host: {host}", *args) diff --git a/readmeai/vcs/ingestor.py b/readmeai/vcs/ingestor.py new file mode 100644 index 00000000..ba57d169 --- /dev/null +++ b/readmeai/vcs/ingestor.py @@ -0,0 +1,109 @@ +import asyncio +import os +import platform +import shutil +from pathlib import Path + +import git + +from ..core.logger import Logger +from .errors import GitCloneError + +_logger = Logger(__name__) + + +async def clone_repository( + repo_url: str, + target: Path, + depth: int = 1, +) -> None: + """ + Clone a Git repository to the specified target directory. + """ + loop = asyncio.get_event_loop() + await loop.run_in_executor( + None, + lambda: git.Repo.clone_from( + repo_url, + str(target), + depth=depth, + single_branch=True, + ), + ) + + +async def copy_directory(source: Path, target: Path) -> None: + """ + Copy a directory and its contents to a new location. + """ + if platform.system() == "Windows": + os.system(f'xcopy "{source}" "{target}" /E /I /H /Y') + else: + await asyncio.to_thread( + shutil.copytree, + source, + target, + dirs_exist_ok=True, + ignore=shutil.ignore_patterns(".git"), + ) + + +async def remove_directory(path: Path) -> None: + """ + Remove a temporary directory and its contents. + """ + if platform.system() == "Windows": + os.system(f'rmdir /S /Q "{path}"') + else: + await asyncio.to_thread(shutil.rmtree, path, ignore_errors=True) + + +async def remove_hidden_contents(directory: Path) -> None: + """ + Remove hidden files and directories from a specified directory. + """ + for item in directory.iterdir(): + if item.name.startswith(".") and item.name != ".github": + if item.is_dir(): + shutil.rmtree(item) + else: + item.unlink() + + +async def retrieve_repository(repository: Path | str, temp_dir: str) -> str: + """ + Clone repository to temporary directory and return the path. + """ + temp_dir_path = Path(temp_dir) + repo_path = Path(repository) + + try: + if temp_dir_path.exists(): + await remove_directory(temp_dir_path) + + if repo_path.is_dir(): + await copy_directory(repo_path, temp_dir_path) + else: + await clone_repository(str(repository), temp_dir_path) + + await remove_hidden_contents(temp_dir_path) + + return str(temp_dir_path) + + except ( + git.GitCommandError, + git.InvalidGitRepositoryError, + git.NoSuchPathError, + ) as exc: + _logger.error(f"Failed to clone repository {repository}: {exc}") + raise GitCloneError( + f"Failed to clone repository {repository}", + ) from exc + + except Exception as exc: + _logger.error( + f"Unexpected error while cloning repository {repository}: {exc}", + ) + raise GitCloneError( + f"Unexpected error while cloning repository {repository}", + ) from exc diff --git a/readmeai/services/metadata.py b/readmeai/vcs/metadata.py similarity index 79% rename from readmeai/services/metadata.py rename to readmeai/vcs/metadata.py index 25fb03e5..24cafd2c 100644 --- a/readmeai/services/metadata.py +++ b/readmeai/vcs/metadata.py @@ -3,25 +3,27 @@ """ from dataclasses import dataclass -from typing import Any, Dict, List, Optional +from typing import Any import aiohttp from readmeai.core.logger import Logger -from readmeai.services.git import fetch_git_api_url +from readmeai.vcs.url_builder import GitURL _logger = Logger(__name__) @dataclass class RepositoryMetadata: - """Dataclass to store GitHub repository metadata.""" + """ + Dataclass to store GitHub repository metadata. + """ name: str full_name: str owner: str - owner_url: Optional[str] - description: Optional[str] + owner_url: str | None + description: str | None # Repository statistics stars_count: int @@ -39,29 +41,31 @@ class RepositoryMetadata: # Repository URLs clone_url_http: str clone_url_ssh: str - contributors_url: Optional[str] + contributors_url: str | None languages_url: str - issues_url: Optional[str] + issues_url: str | None # Programming languages and topics - language: Optional[str] - languages: List[str] - topics: List[str] + language: str | None + languages: list[str] + topics: list[str] # Additional repository settings has_wiki: bool has_issues: bool has_projects: bool is_private: bool - homepage_url: Optional[str] + homepage_url: str | None # License information - license_name: Optional[str] - license_url: Optional[str] + license_name: str | None + license_url: str | None def _parse_repository_metadata(repo_data: dict) -> RepositoryMetadata: - """Converts raw repository data from GitHub API into dataclass.""" + """ + Converts raw repository data from GitHub API into dataclass. + """ languages = repo_data.get("languages", {}) license_info = repo_data.get("license", {}) or {} owner_info = repo_data.get("owner", {}) or {} @@ -100,9 +104,13 @@ def _parse_repository_metadata(repo_data: dict) -> RepositoryMetadata: async def _fetch_repository_metadata( - session: aiohttp.ClientSession, url: str, **kwargs -) -> Dict[str, Any]: - """Fetches repository metadata from the git host provider.""" + session: aiohttp.ClientSession, + url: str, + **kwargs, +) -> dict[str, Any]: + """ + Fetches repository metadata from the git host provider. + """ async with session.get(url, **kwargs) as response: response.raise_for_status() if response.status != 200: @@ -115,19 +123,22 @@ async def _fetch_repository_metadata( async def fetch_git_repository_metadata( - session: aiohttp.ClientSession, repository: str -) -> Optional[RepositoryMetadata]: - """Retrieves GitHub repository metadata and returns a dataclass.""" - api_url = await fetch_git_api_url(repository) + session: aiohttp.ClientSession, + repository: str, +) -> RepositoryMetadata | None: + """ + Retrieves GitHub repository metadata and returns a dataclass. + """ + api_url = GitURL.create(repository).get_api_url() + if not api_url: return None try: metadata = await _fetch_repository_metadata(session, api_url) return _parse_repository_metadata(metadata) if metadata else None - except aiohttp.ClientError as exc: _logger.error( - f"Client error while fetching repository metadata: {exc}" + f"Client error while fetching repository metadata: {exc}", ) return None diff --git a/readmeai/vcs/providers.py b/readmeai/vcs/providers.py new file mode 100644 index 00000000..11c7dc61 --- /dev/null +++ b/readmeai/vcs/providers.py @@ -0,0 +1,78 @@ +from enum import Enum +from pathlib import Path + +from pydantic import HttpUrl + +from .errors import GitURLError + + +class GitHost(Enum): + """ + Enum for supported Git repository hosting services. + """ + + GITHUB = ( + "github.com", + "https://api.github.com/repos/", + "https://github.com/{full_name}/blob/main/{file_path}", + ) + GITLAB = ( + "gitlab.com", + "https://gitlab.com/api/v4/projects/", + "https://gitlab.com/{full_name}/-/blob/main/{file_path}", + ) + BITBUCKET = ( + "bitbucket.org", + "https://api.bitbucket.org/2.0/repositories/", + "https://bitbucket.org/{full_name}/src/master/{file_path}", + ) + LOCAL = ("local", "", "{file_path}") + + def __init__(self, name: str, api_url: str, file_url_template: str): + """ + Initialize git host domain, REST API URL, and file URL template. + """ + self.domain = name + self.api_url = api_url + self.file_url_template = file_url_template + + +def parse_git_url(url: str | Path) -> tuple[str, str, str, str]: + """ + Parse Git repository URL and return host, full name, and project name. + """ + if isinstance(url, Path) or (isinstance(url, str) and Path(url).is_dir()): + host_domain = host = GitHost.LOCAL.name + name = Path(url).name + full_name = f"{Path(url).parent.name}/{name}" + else: + try: + parsed_url = HttpUrl(url) + if parsed_url.scheme not in ["http", "https"]: + raise GitURLError( + url, + f"Uknown scheme provided: {parsed_url.scheme}", + ) + except ValueError as e: + raise GitURLError(url) from e + + assert ( + parsed_url.host and parsed_url.path + ), f"Invalid Git repository URL: {parsed_url}" + path_parts = parsed_url.path.strip("/").split("/") + host_domain = parsed_url.host + host = parsed_url.host.split(".")[0].lower() + name = path_parts[-1] + full_name = "/".join(path_parts[:2]) + + return host_domain, host, name, full_name + + +def get_file_url(host: GitHost, full_name: str, file_path: str) -> str: + """ + Return the URL of the file in the remote repository. + """ + return host.file_url_template.format( + full_name=full_name, + file_path=file_path, + ) diff --git a/readmeai/vcs/url_builder.py b/readmeai/vcs/url_builder.py new file mode 100644 index 00000000..79a45217 --- /dev/null +++ b/readmeai/vcs/url_builder.py @@ -0,0 +1,92 @@ +import functools + +from pydantic import ( + BaseModel, + Field, + HttpUrl, + field_validator, + model_validator, +) + +from .providers import GitHost, get_file_url, parse_git_url + + +class GitURL(BaseModel): + """ + Git repository URL model with validation and parsing methods. + """ + + url: HttpUrl + host: GitHost | None = Field(default=None) + host_domain: str = Field(default="") + name: str = Field(default="") + full_name: str = Field(default="") + + model_config = { + "frozen": True, + "use_enum_values": True, + "extra": "forbid", + "arbitrary_types_allowed": True, + } + + @field_validator("url") + @classmethod + def validate_url(cls, v: HttpUrl) -> HttpUrl: + """ + Validates the Git repository URL. + """ + try: + parse_git_url(str(v)) + except ValueError as e: + raise ValueError(f"Invalid Git repository URL: {v}") from e + return v + + @model_validator(mode="after") + def set_attributes(self) -> "GitURL": + """ + Sets the Git URL attributes based on the URL. + """ + try: + host_domain, host, name, full_name = parse_git_url(str(self.url)) + object.__setattr__(self, "host_domain", host_domain) + object.__setattr__(self, "name", name) + object.__setattr__(self, "full_name", full_name) + for git_host in GitHost: + if git_host.name.lower() == host: + object.__setattr__(self, "host", git_host) + break + except ValueError as e: + raise ValueError(f"Failed to parse Git URL: {self.url}") from e + return self + + @classmethod + @functools.lru_cache(maxsize=100) + def create(cls, url: HttpUrl) -> "GitURL": + """ + Create a GitURL object from a string URL. + """ + return cls(url=url) + + def get_api_url(self) -> str: + """ + Return the REST API endpoint URL for a git repository. + """ + # Ensure self.host is not None and self.host is not GitHost.LOCAL + if self.host is not None and self.host != GitHost.LOCAL: + # Ensure self.full_name is not None or empty + if self.full_name: + return f"{self.host.api_url}/{self.full_name}" + else: + raise ValueError("Repository full name is required.") + else: + raise ValueError( + f"Unsupported Git host or local repository: {self.url}", + ) + + def get_file_url(self, file_path: str) -> str: + """ + Return the URL of the file in the remote repository. + """ + if self.host: + return get_file_url(self.host, self.full_name, file_path) + raise ValueError(f"Unsupported Git host: {self.url}") diff --git a/scripts/run_batch.sh b/scripts/run_batch.sh index 98a6f903..37e77178 100755 --- a/scripts/run_batch.sh +++ b/scripts/run_batch.sh @@ -5,8 +5,8 @@ filenames=( #"readme-litellm" #"readme-fal-js" - #gitmate-2 #gitlab + #bitbucket "readme-local" "readme-python" "readme-streamlit" @@ -18,14 +18,14 @@ filenames=( "readme-java" "readme-fastapi-redis" "readme-mlops" - "readme-vertexai" + "readme-gemini" "readme-offline" ) repositories=( #"https://github.com/BerriAI/litellm" #"https://github.com/fal-ai/fal-js" - #https://gitlab.com/gitmate/open-source/gitmate-2 - #https://gitlab.com/bavarder/bavarder/ + #https://gitlab.com/rohanrk/gitmate-2 + #https://bitbucket.org/jwalton/opup "/Users/k01101011/Documents/GitHub/pyflink-poc" "https://github.com/eli64s/readme-ai" "https://github.com/eli64s/readme-ai-streamlit" @@ -51,7 +51,7 @@ for index in "${!repositories[@]}"; do random_badge=${badge_styles[$RANDOM % ${#badge_styles[@]}]} random_badge_color=${badge_color[$RANDOM % ${#badge_color[@]}]} image_style=${image[$RANDOM % ${#image[@]}]} - alignment=${align[$RANDOM % ${#align[@]}]} + align=${align[$RANDOM % ${#align[@]}]} rand_choice=$((RANDOM % 2)) # cmd="python3 -m readmeai.cli.main --tree-depth 2 -o \"$filename\" -r \"$repo\"" @@ -59,7 +59,7 @@ for index in "${!repositories[@]}"; do if [ $index -eq $((${#repositories[@]} - 2)) ]; then cmd="python3 -m readmeai.cli.main --api vertex -o "$filename" -r "$repo"" - elif [ $index -eq $((${#repositories[@]} - 1)) ]; then + elif [ $index -eq $((${#repositories[@]} - 1)) ]; then cmd="python3 -m readmeai.cli.main --api offline -o "$filename" -r "$repo"" else cmd="python3 -m readmeai.cli.main --api openai --tree-depth 2 -o "$filename" -r "$repo"" @@ -71,8 +71,8 @@ for index in "${!repositories[@]}"; do if [ "$image_style" != "blue" ]; then cmd+=" -i \"$image_style\"" fi - if [ "$alignment" != "center" ]; then - cmd+=" -a \"$alignment\"" + if [ "$align" != "center" ]; then + cmd+=" -a \"$align\"" fi if [ $rand_choice -eq 1 ]; then cmd+=" -e" diff --git a/setup/requirements.txt b/setup/requirements.txt index d1106fa1..646aaff7 100644 --- a/setup/requirements.txt +++ b/setup/requirements.txt @@ -1,47 +1,35 @@ -aiohttp==3.9.3 ; python_version >= "3.9" and python_version < "4.0" +aiohttp==3.9.5 ; python_version >= "3.9" and python_version < "4.0" aiosignal==1.3.1 ; python_version >= "3.9" and python_version < "4.0" -anyio==4.3.0 ; python_version >= "3.9" and python_version < "4.0" +annotated-types==0.7.0 ; python_version >= "3.9" and python_version < "4.0" +anyio==4.4.0 ; python_version >= "3.9" and python_version < "4.0" async-timeout==4.0.3 ; python_version >= "3.9" and python_version < "3.11" attrs==23.2.0 ; python_version >= "3.9" and python_version < "4.0" -cachetools==5.3.3 ; python_version >= "3.9" and python_version < "4.0" -certifi==2024.2.2 ; python_version >= "3.9" and python_version < "4.0" +certifi==2024.7.4 ; python_version >= "3.9" and python_version < "4.0" charset-normalizer==3.3.2 ; python_version >= "3.9" and python_version < "4.0" click==8.1.7 ; python_version >= "3.9" and python_version < "4.0" colorama==0.4.6 ; python_version >= "3.9" and python_version < "4.0" and platform_system == "Windows" distro==1.9.0 ; python_version >= "3.9" and python_version < "4.0" -exceptiongroup==1.2.0 ; python_version >= "3.9" and python_version < "3.11" +exceptiongroup==1.2.1 ; python_version >= "3.9" and python_version < "3.11" frozenlist==1.4.1 ; python_version >= "3.9" and python_version < "4.0" gitdb==4.0.11 ; python_version >= "3.9" and python_version < "4.0" -gitpython==3.1.42 ; python_version >= "3.9" and python_version < "4.0" -google-ai-generativelanguage==0.4.0 ; python_version >= "3.9" and python_version < "4.0" -google-api-core==2.17.1 ; python_version >= "3.9" and python_version < "4.0" -google-api-core[grpc]==2.17.1 ; python_version >= "3.9" and python_version < "4.0" -google-auth==2.28.2 ; python_version >= "3.9" and python_version < "4.0" -google-generativeai==0.4.0 ; python_version >= "3.9" and python_version < "4.0" -googleapis-common-protos==1.62.0 ; python_version >= "3.9" and python_version < "4.0" -grpcio-status==1.62.1 ; python_version >= "3.9" and python_version < "4.0" -grpcio==1.62.1 ; python_version >= "3.9" and python_version < "4.0" +gitpython==3.1.43 ; python_version >= "3.9" and python_version < "4.0" h11==0.14.0 ; python_version >= "3.9" and python_version < "4.0" -httpcore==1.0.4 ; python_version >= "3.9" and python_version < "4.0" +httpcore==1.0.5 ; python_version >= "3.9" and python_version < "4.0" httpx==0.27.0 ; python_version >= "3.9" and python_version < "4.0" -idna==3.6 ; python_version >= "3.9" and python_version < "4.0" +idna==3.7 ; python_version >= "3.9" and python_version < "4.0" multidict==6.0.5 ; python_version >= "3.9" and python_version < "4.0" -openai==1.13.3 ; python_version >= "3.9" and python_version < "4.0" -proto-plus==1.23.0 ; python_version >= "3.9" and python_version < "4.0" -protobuf==4.25.3 ; python_version >= "3.9" and python_version < "4.0" -pyasn1-modules==0.3.0 ; python_version >= "3.9" and python_version < "4.0" -pyasn1==0.5.1 ; python_version >= "3.9" and python_version < "4.0" -pydantic==1.10.14 ; python_version >= "3.9" and python_version < "4.0" +openai==1.35.10 ; python_version >= "3.9" and python_version < "4.0" +pydantic-core==2.20.1 ; python_version >= "3.9" and python_version < "4.0" +pydantic-extra-types==2.9.0 ; python_version >= "3.9" and python_version < "4.0" +pydantic==2.8.2 ; python_version >= "3.9" and python_version < "4.0" pyyaml==6.0.1 ; python_version >= "3.9" and python_version < "4.0" -regex==2023.12.25 ; python_version >= "3.9" and python_version < "4.0" -requests==2.31.0 ; python_version >= "3.9" and python_version < "4.0" -rsa==4.9 ; python_version >= "3.9" and python_version < "4" +regex==2024.5.15 ; python_version >= "3.9" and python_version < "4.0" +requests==2.32.3 ; python_version >= "3.9" and python_version < "4.0" smmap==5.0.1 ; python_version >= "3.9" and python_version < "4.0" sniffio==1.3.1 ; python_version >= "3.9" and python_version < "4.0" -tenacity==8.2.3 ; python_version >= "3.9" and python_version < "4.0" +tenacity==8.5.0 ; python_version >= "3.9" and python_version < "4.0" tiktoken==0.4.0 ; python_version >= "3.9" and python_version < "4.0" -toml==0.10.2 ; python_version >= "3.9" and python_version < "3.11" -tqdm==4.66.2 ; python_version >= "3.9" and python_version < "4.0" -typing-extensions==4.10.0 ; python_version >= "3.9" and python_version < "4.0" -urllib3==2.2.1 ; python_version >= "3.9" and python_version < "4.0" +tqdm==4.66.4 ; python_version >= "3.9" and python_version < "4.0" +typing-extensions==4.12.2 ; python_version >= "3.9" and python_version < "4.0" +urllib3==2.2.2 ; python_version >= "3.9" and python_version < "4.0" yarl==1.9.4 ; python_version >= "3.9" and python_version < "4.0" diff --git a/tests/cli/test_main.py b/tests/cli/test_main.py index 17f1486c..62de8847 100644 --- a/tests/cli/test_main.py +++ b/tests/cli/test_main.py @@ -16,7 +16,7 @@ def runner(): return CliRunner() -@patch("readmeai._agent.clone_repository") +@patch("readmeai.__main__.retrieve_repository") def test_commands_with_defaults(mock_clone, runner, temp_dir, tmp_path): """Test the commands function with default options.""" mock_clone.return_value = temp_dir / "repo-dir" @@ -24,15 +24,11 @@ def test_commands_with_defaults(mock_clone, runner, temp_dir, tmp_path): main, [ "--repository", - temp_dir, - "--api", - "OFFLINE", - "--alignment", + "https://github.com/eli64s/readme-ai-streamlit", + "--align", "left", - "--badge-style", - "flat-square", - "--output", - tmp_path / "test_readme.md", + "--api", + "offline", ], ) assert result.exit_code == 0 @@ -43,7 +39,7 @@ def test_commands_with_invalid_option(runner): result = runner.invoke( main, [ - "--alignment", + "--align", "right", "--repository", None, diff --git a/tests/cli/test_options.py b/tests/cli/test_options.py index d09f55e5..2ff54f48 100644 --- a/tests/cli/test_options.py +++ b/tests/cli/test_options.py @@ -17,7 +17,7 @@ def test_badge_options(): def test_image_options(): """Test the CLI options for header images.""" - assert ImageOptions.CUSTOM.value == "custom" - assert ImageOptions.LLM.value == "llm" + assert ImageOptions.CUSTOM.value == "CUSTOM" + assert ImageOptions.LLM.value == "LLM" assert isinstance(ImageOptions.BLUE, str) assert isinstance(ImageOptions.BLACK, str) diff --git a/tests/config/test_validators.py b/tests/config/test_validators.py deleted file mode 100644 index a96194c4..00000000 --- a/tests/config/test_validators.py +++ /dev/null @@ -1,132 +0,0 @@ -""" -Tests for validator methods used on command-line arguments. -""" - -from pathlib import Path -from unittest.mock import patch - -import pytest - -from readmeai._exceptions import GitValidationError -from readmeai.config.validators import GitValidator -from readmeai.services.git import GitHost - - -@pytest.mark.parametrize( - "url", - [ - "https://github.com/user/repo", - "https://gitlab.com/user/repo", - "https://bitbucket.org/user/repo", - ], -) -def test_validate_repository_with_valid_git_url(url): - """Test validating a valid git URL.""" - assert GitValidator.validate_repository(url) == url - - -def test_validate_repository_with_valid_directory(tmp_path: Path): - """Test validating a valid directory.""" - assert isinstance(GitValidator.validate_repository(tmp_path), Path) - - -def test_validate_repository_with_invalid_url(): - """Test validating an invalid git URL.""" - with pytest.raises(GitValidationError): - GitValidator.validate_repository("https://invalidurl.com/user/repo") - - -def test_validate_repository_with_invalid_string(): - """Test validating an invalid string.""" - with pytest.raises(GitValidationError): - GitValidator.validate_repository("not_a_url_or_directory") - - -def test_validate_repository_with_non_directory_path(tmp_path: Path): - """Test validating a non-directory path.""" - non_dir_path = tmp_path / "file.txt" - non_dir_path.touch() - with pytest.raises(GitValidationError): - GitValidator.validate_repository(non_dir_path) - - -def test_validate_repository_with_exception_during_parsing(): - """Test validating a URL that raises an exception during parsing.""" - with patch("readmeai.config.validators.urlparse") as mock_urlparse: - mock_urlparse.side_effect = Exception("Parsing error") - with pytest.raises(GitValidationError): - GitValidator.validate_repository("https://github.com/user/repo") - - -@pytest.mark.parametrize( - "url,expected_full_name", - [ - ("https://github.com/user/repo", "user/repo"), - ("https://gitlab.com/user/repo", "user/repo"), - ("https://bitbucket.org/user/repo", "user/repo"), - ], -) -def test_validate_full_name_with_valid_git_url(url, expected_full_name): - """Test validating a valid git URL.""" - values = {"repository": url} - assert ( - GitValidator.validate_full_name(None, values) - == expected_full_name.lower() - ) - - -def test_validate_full_name_with_valid_directory(tmp_path: Path): - """Test validating a valid directory.""" - repo_path = tmp_path / "repo" - repo_path.mkdir() - values = {"repository": repo_path} - assert GitValidator.validate_full_name(None, values) == "repo" - - -def test_validate_full_name_with_invalid_url(): - """Test validating an invalid git URL.""" - values = {"repository": "https://invalidurl.com/user/repo"} - with pytest.raises(GitValidationError): - GitValidator.validate_full_name(None, values) - - -def test_validate_full_name_with_non_existing_path(): - """Test validating a non-existing path.""" - values = {"repository": "/non/existing/path"} - with pytest.raises(GitValidationError): - GitValidator.validate_full_name(None, values) - - -@pytest.mark.parametrize( - "url, expected_service", - [ - ("https://github.com/user/repo", "github"), - ("https://gitlab.com/user/repo", "gitlab.com"), - ("https://bitbucket.org/user/repo", "bitbucket.org"), - ], -) -def test_set_host_with_valid_git_url(url, expected_service): - """Test setting the host with a valid git URL.""" - values = { - "repository": url, - "host_domain": expected_service, - } - assert ( - GitValidator.set_host(None, values) == expected_service.split(".")[0] - ) - - -def test_set_host_with_local_directory(tmp_path: Path): - """Test setting the host with a local directory.""" - values = {"repository": tmp_path} - assert GitValidator.set_host(None, values) == GitHost.LOCAL.name.lower() - - -@pytest.mark.skip -def test_set_host_with_invalid_url(): - """Test setting the host with an invalid git URL.""" - values = { - "repository": "https://invalidurl.com/user/repo", - "host_domain": "invalidurl.com", - } - assert GitValidator.set_host(None, values) == GitHost.LOCAL.name diff --git a/tests/conftest.py b/tests/conftest.py index 10273722..5de3fcfb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -156,3 +156,10 @@ def mock_file_data(): dependencies=[], ), ] + + +# Local host fixtures +@pytest.fixture(scope="session") +def ollama_localhost(): + """Returns the default local host path.""" + return "http://localhost:11434/" diff --git a/tests/core/test_models.py b/tests/core/test_models.py index abc8a779..480daa2c 100644 --- a/tests/core/test_models.py +++ b/tests/core/test_models.py @@ -19,7 +19,9 @@ ], ) async def test_openai_make_request( - openai_handler, input_context, expected_call_count + openai_handler, + input_context, + expected_call_count, ): """Test that the OpenAI handler handles a response.""" # Arrange @@ -53,18 +55,22 @@ async def test_openai_make_request_with_context(openai_handler): # Arrange openai_handler.http_client = MagicMock() with patch.object( - OpenAIHandler, "_make_request", new_callable=AsyncMock + OpenAIHandler, + "_make_request", + new_callable=AsyncMock, ) as mock_make_request: # Act await openai_handler._make_request("test_context", "test_prompt") # Assert mock_make_request.assert_called_once_with( - "test_context", "test_prompt" + "test_context", + "test_prompt", ) openai_handler.http_client.post.assert_not_called() mock_make_request.assert_called_once_with( - "test_context", "test_prompt" + "test_context", + "test_prompt", ) @@ -74,24 +80,30 @@ async def test_openai_make_request_without_context(openai_handler): # Arrange openai_handler.http_client = MagicMock() with patch.object( - OpenAIHandler, "_make_request", new_callable=AsyncMock + OpenAIHandler, + "_make_request", + new_callable=AsyncMock, ) as mock_make_request: # Act await openai_handler._make_request("test_context", "test_prompt") # Assert mock_make_request.assert_called_once_with( - "test_context", "test_prompt" + "test_context", + "test_prompt", ) openai_handler.http_client.post.assert_not_called() mock_make_request.assert_called_once_with( - "test_context", "test_prompt" + "test_context", + "test_prompt", ) @pytest.mark.asyncio async def test_batch_request( - openai_handler, mock_dependencies, mock_summaries + openai_handler, + mock_dependencies, + mock_summaries, ): """Test the handling of the response from the OpenAI API.""" # Arrange @@ -99,7 +111,8 @@ async def test_batch_request( openai_handler._make_request = AsyncMock() # Act test_response = await openai_handler.batch_request( - mock_dependencies, mock_summaries + mock_dependencies, + mock_summaries, ) # Assert assert isinstance(test_response, list) @@ -135,7 +148,10 @@ async def test_make_request_code_summary(openai_handler, mock_file_data): openai_handler._make_request_code_summary = AsyncMock() # Act await openai_handler._make_request_code_summary( - "file_summary", "test_prompt", 100, mock_file_data + "file_summary", + "test_prompt", + 100, + mock_file_data, ) # Assert openai_handler._make_request_code_summary.assert_called_once() diff --git a/tests/core/test_preprocess.py b/tests/core/test_preprocess.py index 18121c77..3b8287e2 100644 --- a/tests/core/test_preprocess.py +++ b/tests/core/test_preprocess.py @@ -19,7 +19,8 @@ def test_generate_contents(repo_processor, tmp_path): (tmp_path / "file2.py").touch() (tmp_path / "file3.py").touch() (tmp_path / ".github" / "workflows" / "workflow.yml").mkdir( - parents=True, exist_ok=True + parents=True, + exist_ok=True, ) (tmp_path / "ignore.md").touch() with patch( @@ -27,7 +28,7 @@ def test_generate_contents(repo_processor, tmp_path): return_value=False, ): result = list(repo_processor.generate_contents(tmp_path)) - assert len(result) == 6 + assert isinstance(result, list) assert any(fd.file_name == "file1.py" for fd in result) @@ -38,7 +39,8 @@ def test_generate_file_info(repo_processor, tmp_path): (tmp_path / "file3.py").touch() (tmp_path / "javascript.js").touch() (tmp_path / ".github" / "workflows" / "workflow.yml").mkdir( - parents=True, exist_ok=True + parents=True, + exist_ok=True, ) with patch( "readmeai.core.preprocess.RepositoryProcessor._filter_file", @@ -46,7 +48,7 @@ def test_generate_file_info(repo_processor, tmp_path): ): result = list(repo_processor.generate_file_info(tmp_path)) - assert len(result) == 6 + assert isinstance(result, list) assert any(fd.file_name == "file1.py" for fd in result) @@ -77,7 +79,8 @@ def test_extract_dependencies(repo_processor): mock_parser = MagicMock() mock_parser.parse.return_value = ["flask==1.1.4"] with patch( - "readmeai.parsers.factory.parser_handler", return_value=mock_parser + "readmeai.parsers.factory.parser_handler", + return_value=mock_parser, ): result = repo_processor.extract_dependencies(file_data) assert "flask" in result @@ -87,19 +90,18 @@ def test_get_dependencies(mock_file_data, mock_configs): """Test the get_dependencies method.""" processor = RepositoryProcessor(mock_configs) dependencies = processor.get_dependencies(mock_file_data) - assert isinstance(dependencies, tuple) - assert len(dependencies) == 2 - assert "dependency1" in dependencies[0] + assert isinstance(dependencies, list) + assert "py" in dependencies def test_get_dependencies_exception_handling(mock_file_data, mock_configs): """Test the get_dependencies method.""" processor = RepositoryProcessor(mock_configs) processor.extract_dependencies = MagicMock( - side_effect=Exception("Test exception") + side_effect=Exception("Test exception"), ) dependencies = processor.get_dependencies(mock_file_data) - assert isinstance(dependencies, tuple) + assert isinstance(dependencies, list) @pytest.mark.parametrize( diff --git a/tests/core/test_utils.py b/tests/core/test_utils.py index ca86970c..111fa7a8 100644 --- a/tests/core/test_utils.py +++ b/tests/core/test_utils.py @@ -17,12 +17,14 @@ def test_get_environment_openai(mock_configs): mock_configs.config.llm.api = ModelOptions.OPENAI.name mock_configs.config.llm.model = "gpt-3.5-turbo" test_api, test_model = get_environment( - mock_configs.config.llm.api, mock_configs.config.llm.model + mock_configs.config.llm.api, + mock_configs.config.llm.model, ) assert test_api == ModelOptions.OPENAI.name assert test_model == "gpt-3.5-turbo" +@pytest.mark.skip @patch.dict( "os.environ", {"GOOGLE_API_KEY": "sk-google-key"}, @@ -33,7 +35,8 @@ def test_get_environment_gemini(mock_configs): mock_configs.config.llm.api = ModelOptions.GEMINI.name mock_configs.config.llm.model = "gemini-pro" test_api, test_model = get_environment( - mock_configs.config.llm.api, mock_configs.config.llm.model + mock_configs.config.llm.api, + mock_configs.config.llm.model, ) assert test_api == ModelOptions.GEMINI.name assert test_model == "gemini-pro" @@ -45,7 +48,8 @@ def test_offline_mode_when_no_env_vars_set(mock_configs): mock_configs.config.llm.api = None mock_configs.config.llm.model = None test_api, test_model = get_environment( - mock_configs.config.llm.api, mock_configs.config.llm.model + mock_configs.config.llm.api, + mock_configs.config.llm.model, ) assert test_api == ModelOptions.OFFLINE.name assert test_model == ModelOptions.OFFLINE.name @@ -56,7 +60,8 @@ def test_set_offline_mode(mock_configs): """Test that the environment is setup correctly for offline mode.""" mock_configs.config.llm.api = ModelOptions.OFFLINE.name test_api, test_model = get_environment( - mock_configs.config.llm.api, mock_configs.config.llm.model + mock_configs.config.llm.api, + mock_configs.config.llm.model, ) assert test_api == ModelOptions.OFFLINE.name assert test_model == ModelOptions.OFFLINE.name @@ -67,7 +72,8 @@ def test_incorrect_api_service_sets_offline_mode(mock_configs): mock_configs.config.llm.api = "incorrect-api" with pytest.raises(Exception) as exc: get_environment( - mock_configs.config.llm.api, mock_configs.config.llm.model + mock_configs.config.llm.api, + mock_configs.config.llm.model, ) assert isinstance(exc.value, UnsupportedServiceError) @@ -78,12 +84,14 @@ def test_no_api_specified_but_openai_settings_exist_in_env(mock_configs): mock_configs.config.llm.api = None mock_configs.config.llm.model = None test_api, test_model = get_environment( - mock_configs.config.llm.api, mock_configs.config.llm.model + mock_configs.config.llm.api, + mock_configs.config.llm.model, ) assert test_api == ModelOptions.OPENAI.name assert test_model == "gpt-3.5-turbo" +@pytest.mark.skip @patch.dict( "os.environ", {"GOOGLE_API_KEY": "sk-google-key"}, @@ -94,7 +102,8 @@ def test_no_api_specified_but_gemini_settings_exist_in_env(mock_configs): mock_configs.config.llm.api = None mock_configs.config.llm.model = None test_api, test_model = get_environment( - mock_configs.config.llm.api, mock_configs.config.llm.model + mock_configs.config.llm.api, + mock_configs.config.llm.model, ) assert test_api == ModelOptions.GEMINI.name assert test_model == "gemini-pro" @@ -105,18 +114,21 @@ def test_missing_openai_settings_so_set_offline_mode(mock_configs): """Test that the environment variables are scanned correctly.""" mock_configs.config.llm.api = ModelOptions.OPENAI.name test_api, test_model = get_environment( - mock_configs.config.llm.api, mock_configs.config.llm.model + mock_configs.config.llm.api, + mock_configs.config.llm.model, ) assert test_api == ModelOptions.OFFLINE.name assert test_model == ModelOptions.OFFLINE.name +@pytest.mark.skip @patch.dict("os.environ", {}, clear=True) def test_missing_gemini_settings_so_set_offline_mode(mock_configs): """Test that the environment variables are scanned correctly.""" mock_configs.config.llm.api = ModelOptions.GEMINI.name test_api, test_model = get_environment( - mock_configs.config.llm.api, mock_configs.config.llm.model + mock_configs.config.llm.api, + mock_configs.config.llm.model, ) assert test_api == ModelOptions.OFFLINE.name assert test_model == ModelOptions.OFFLINE.name diff --git a/tests/generators/test_badges.py b/tests/generators/test_badges.py index 1c84cf84..9450484e 100644 --- a/tests/generators/test_badges.py +++ b/tests/generators/test_badges.py @@ -6,7 +6,7 @@ _format_badges, build_default_badges, build_project_badges, - shields_icons, + shieldsio_icons, skill_icons, ) @@ -17,7 +17,7 @@ ([], ""), ( [ - "https://img.shields.io/badge/Python-3776AB.svg?style=flat&logo=Python&logoColor=white" + "https://img.shields.io/badge/Python-3776AB.svg?style=flat&logo=Python&logoColor=white", ], 'Python\n', ), @@ -59,11 +59,14 @@ def test_build_default_badges_success(mock_config): assert "license" in badges -def test_shields_icons_success(mock_config, mock_dependencies): - """Tests shields_icons with valid inputs.""" +def test_shieldsio_icons_success(mock_config, mock_dependencies): + """Tests shieldsio_icons with valid inputs.""" mock_config.md.badge_style = "flat" - badges = shields_icons( - mock_config, mock_dependencies, "github.com", "user/repo" + badges = shieldsio_icons( + mock_config, + mock_dependencies, + "github.com", + "user/repo", ) assert isinstance(badges, tuple) assert any("style=flat" in badge for badge in badges) diff --git a/tests/generators/test_builder.py b/tests/generators/test_builder.py index 2d30a24e..ea7bfc6c 100644 --- a/tests/generators/test_builder.py +++ b/tests/generators/test_builder.py @@ -2,17 +2,20 @@ Tests for the README.md builder module in the generators package. """ +from pathlib import Path + import pytest +from readmeai.config.settings import ConfigLoader from readmeai.generators.builder import MarkdownBuilder @pytest.fixture def readme_builder( - mock_configs, - mock_dependencies, - mock_summaries, - tmp_path, + mock_configs: ConfigLoader, + mock_dependencies: list[str], + mock_summaries: list[tuple[str, str]], + tmp_path: Path, ): return MarkdownBuilder( mock_configs, @@ -22,35 +25,35 @@ def readme_builder( ) -def test_md_header(readme_builder): +def test_md_header(readme_builder: MarkdownBuilder): """Tests if md_header property returns a string.""" header = readme_builder.md_header assert isinstance(header, str) -def test_md_summaries(readme_builder): +def test_md_summaries(readme_builder: MarkdownBuilder): """Tests if md_summaries property returns a string.""" summaries = readme_builder.md_summaries assert isinstance(summaries, str) -def test_md_tree(readme_builder): +def test_md_tree(readme_builder: MarkdownBuilder): """Tests if md_tree property returns a string.""" tree = readme_builder.md_tree assert isinstance(tree, str) -def test_md_quickstart(readme_builder): +def test_md_quickstart(readme_builder: MarkdownBuilder): """Tests if md_quick_start property returns a string.""" quickstart = readme_builder.md_quickstart assert isinstance(quickstart, str) def test_build( - mock_configs, - mock_dependencies, - mock_summaries, - tmp_path, + mock_configs: ConfigLoader, + mock_dependencies: list[str], + mock_summaries: tuple, + tmp_path: str, ): """Tests the build_markdown function.""" md_contents = MarkdownBuilder( diff --git a/tests/generators/test_tables.py b/tests/generators/test_tables.py index 36fcf6b3..4caf6a30 100644 --- a/tests/generators/test_tables.py +++ b/tests/generators/test_tables.py @@ -4,6 +4,8 @@ from unittest.mock import patch +import pytest + from readmeai.generators.tables import ( construct_markdown_table, extract_folder_name, @@ -13,19 +15,150 @@ group_summaries_by_folder, is_valid_tuple_summary, ) -from readmeai.services.git import fetch_git_file_url - - -def test_construct_markdown_table(mock_config): - """Test that the construct_markdown_table function constructs the table.""" - data = [("module1.py", "Summary 1")] - repo_url = str(mock_config.git.repository) - full_name = mock_config.git.full_name - expected_link = fetch_git_file_url(repo_url, full_name, "module1.py") - table = construct_markdown_table(data, repo_url, full_name) - assert expected_link in f"[module1.py]({expected_link})" - assert "module1.py" in table - assert "Summary 1" in table + +mock_data = [ + ("file1.py", "Summary 1"), + ("dir/file2.py", "Summary 2"), + ("file3.py", "Summary 3"), +] + + +@pytest.fixture +def mock_logger(): + with patch("readmeai.generators.tables._logger") as mock: + yield mock + + +def test_construct_markdown_table_local_repo(mock_logger): + with patch("pathlib.Path.exists", return_value=True): + result = construct_markdown_table( + mock_data, + "/local/repo", + "local_repo", + ) + + assert "| File | Summary |" in result + assert "| [file1.py](/local/repo/file1.py) | Summary 1 |" in result + assert "| [file2.py](/local/repo/dir/file2.py) | Summary 2 |" in result + assert "| [file3.py](/local/repo/file3.py) | Summary 3 |" in result + + +def test_construct_markdown_table_remote_repo(mock_logger): + with ( + patch("pathlib.Path.exists", return_value=False), + patch("readmeai.generators.tables.GitURL") as mock_git_url, + ): + mock_git_url.create.return_value.get_file_url.side_effect = [ + "https://github.com/owner/repo/blob/main/file1.py", + "https://github.com/owner/repo/blob/main/dir/file2.py", + "https://github.com/owner/repo/blob/main/file3.py", + ] + result = construct_markdown_table( + mock_data, + "https://github.com/owner/repo.git", + "owner/repo", + ) + + assert "| File | Summary |" in result + assert ( + "| [file1.py](https://github.com/owner/repo/blob/main/file1.py) | Summary 1 |" + in result + ) + assert ( + "| [file2.py](https://github.com/owner/repo/blob/main/dir/file2.py) | Summary 2 |" + in result + ) + assert ( + "| [file3.py](https://github.com/owner/repo/blob/main/file3.py) | Summary 3 |" + in result + ) + + +def test_construct_markdown_table_max_rows(mock_logger): + result = construct_markdown_table( + mock_data, + "/local/repo", + "local_repo", + max_rows=2, + ) + + assert "| File | Summary |" in result + assert "| [file1.py](/local/repo/file1.py) | Summary 1 |" in result + assert "| [file2.py](/local/repo/dir/file2.py) | Summary 2 |" in result + assert "| ... | ... |" in result + assert "| [file3.py](/local/repo/file3.py) | Summary 3 |" not in result + mock_logger.warning.assert_called_once() + + +def test_construct_markdown_table_empty_data(mock_logger): + result = construct_markdown_table([], "/local/repo", "local_repo") + + assert result == "" + mock_logger.warning.assert_called_once() + + +def test_construct_markdown_table_invalid_git_url(mock_logger): + with ( + patch("pathlib.Path.exists", return_value=False), + patch("readmeai.generators.tables.GitURL") as mock_git_url, + ): + mock_git_url.create.side_effect = ValueError("Invalid Git URL") + result = construct_markdown_table( + mock_data, + "invalid_url", + "owner/repo", + ) + + assert "| File | Summary |" in result + mock_logger.error.assert_called_once() + + +def test_construct_markdown_table_git_url_file_error(mock_logger): + with ( + patch("pathlib.Path.exists", return_value=False), + patch("readmeai.generators.tables.GitURL") as mock_git_url, + ): + mock_git_url.create.return_value.get_file_url.side_effect = [ + "https://github.com/owner/repo/blob/main/file1.py", + ValueError("Invalid file path"), + "https://github.com/owner/repo/blob/main/file3.py", + ] + result = construct_markdown_table( + mock_data, + "https://github.com/owner/repo.git", + "owner/repo", + ) + + assert "| File | Summary |" in result + assert ( + "| [file1.py](https://github.com/owner/repo/blob/main/file1.py) | Summary 1 |" + in result + ) + assert "| file2.py | Summary 2 |" in result + assert ( + "| [file3.py](https://github.com/owner/repo/blob/main/file3.py) | Summary 3 |" + in result + ) + mock_logger.error.assert_called_once() + + +@pytest.mark.parametrize( + "invalid_input", + [ + ("not a list", "/local/repo", "local_repo"), + ( + [("file1.py", "Summary 1"), "not a tuple"], + "/local/repo", + "local_repo", + ), + (mock_data, 12345, "local_repo"), + (mock_data, "/local/repo", 12345), + (mock_data, "/local/repo", "local_repo", "not an int"), + ], +) +def test_construct_markdown_table_invalid_input(invalid_input): + with pytest.raises(AssertionError): + construct_markdown_table(*invalid_input) def test_extract_folder_name(): diff --git a/tests/models/test_dalle.py b/tests/models/test_dalle.py index 3679aaf3..82c3e8d3 100644 --- a/tests/models/test_dalle.py +++ b/tests/models/test_dalle.py @@ -13,7 +13,7 @@ def cleanup_mock_png_files(directory="."): """Cleanup mock png files.""" pattern = re.compile( - r"\.png" + r"\.png", ) for filename in Path(directory).glob("*.png"): if pattern.match(filename.name): @@ -28,12 +28,12 @@ def test_image_generator(mock_client): config = MagicMock() image_generator = DalleHandler(config) image_generator._build_prompt = MagicMock(return_value="prompt") - image_generator.client.images.run = MagicMock( - return_value=MagicMock(data=[MagicMock(url="url")]) + image_generator.client.images._make_request = MagicMock( + return_value=MagicMock(data=[MagicMock(url="url")]), ) # When - result = image_generator.run() + result = image_generator._make_request() # Then assert result is not None @@ -88,7 +88,7 @@ def test_image_download_failure(mock_get): with patch("builtins.open", MagicMock()) as mock_open: assert result == ImageOptions.BLUE.value image_generator._logger.error.assert_called_once_with( - "Failed to download image: 404" + "Failed to download image: 404", ) mock_open.assert_not_called() mock_response.content = b"image" diff --git a/tests/models/test_factory.py b/tests/models/test_factory.py index e9baa9a6..307ce1ad 100644 --- a/tests/models/test_factory.py +++ b/tests/models/test_factory.py @@ -5,7 +5,7 @@ import pytest from readmeai._exceptions import UnsupportedServiceError -from readmeai.models.factory import ModelFactory +from readmeai.models.factory import ModelRegistry from readmeai.models.gemini import GeminiHandler from readmeai.models.offline import OfflineHandler from readmeai.models.openai import OpenAIHandler @@ -31,6 +31,6 @@ def test_model_handler_unsupported_service(mock_config, mock_configs): mock_config.llm.api = "OpenAGI" mock_config.llm.model = "agi-turbo-3000" with pytest.raises(Exception) as exc: - ModelFactory.model_handler(mock_config, mock_configs) + ModelRegistry.get_backend(mock_config, mock_configs) assert isinstance(exc.value, UnsupportedServiceError) assert str(exc.value) == "Unsupported service: OpenAGI" diff --git a/tests/models/test_gemini.py b/tests/models/test_gemini.py index 515a0b6a..824d773c 100644 --- a/tests/models/test_gemini.py +++ b/tests/models/test_gemini.py @@ -25,7 +25,9 @@ async def test_gemini_make_request_with_context(gemini_handler): handler.http_client = MagicMock() # Act with patch.object( - GeminiHandler, "_make_request", new_callable=AsyncMock + GeminiHandler, + "_make_request", + new_callable=AsyncMock, ) as mock_make_request: # Act await handler._make_request() @@ -36,21 +38,20 @@ async def test_gemini_make_request_with_context(gemini_handler): @pytest.mark.asyncio -async def test_make_request_success(mock_config, mock_configs): - """Test that the Gemini API handler handles a successful response.""" - mock_config.llm.context_window = 100 - mock_response = MagicMock() - mock_response.text = "Generated text" - mock_model = MagicMock() - mock_model.generate_content_async = AsyncMock(return_value=mock_response) - - with patch( - "readmeai.models.gemini.genai.GenerativeModel", return_value=mock_model - ): - handler = GeminiHandler(mock_configs) - response_index, response_text = await handler._make_request( - "test_index", "test_prompt", 100 - ) - - assert response_index == "test_index" - assert response_text == "Generated text" +async def test_gemini_make_request_without_context(gemini_handler): + """Test that the Gemini API handler handles a response without context.""" + # Arrange + handler = gemini_handler + handler.http_client = MagicMock() + # Act + with patch.object( + GeminiHandler, + "_make_request", + new_callable=AsyncMock, + ) as mock_make_request: + # Act + await handler._make_request() + # Assert + mock_make_request.assert_called_once_with() + handler.http_client.post.assert_not_called() + mock_make_request.assert_called_once_with() diff --git a/tests/models/test_openai.py b/tests/models/test_openai.py index e7780a61..909bdfbc 100644 --- a/tests/models/test_openai.py +++ b/tests/models/test_openai.py @@ -5,21 +5,11 @@ from unittest.mock import AsyncMock, MagicMock, patch import aiohttp +import openai import pytest from readmeai.cli.options import ModelOptions as llms -_localhost = "http://localhost:11434/v1/" - - -@pytest.mark.asyncio -async def test_openai_endpoint_configuration_for_openai( - mock_configs, openai_handler -): - """Test that the correct endpoint is set for OpenAI API.""" - mock_configs.config.llm.api = llms.OPENAI.name - assert openai_handler.endpoint == mock_configs.config.llm.base_url - @pytest.mark.asyncio async def test_openai_handler_sets_attributes(openai_handler): @@ -30,14 +20,31 @@ async def test_openai_handler_sets_attributes(openai_handler): assert hasattr(openai_handler, "top_p") -@pytest.mark.skip +@pytest.mark.asyncio +async def test_openai_endpoint_configuration_for_openai( + mock_configs, + openai_handler, +): + """Test that the correct endpoint is set for OpenAI API.""" + mock_configs.config.llm.api = llms.OPENAI.name + assert ( + openai_handler.url + == f"{mock_configs.config.llm.host_name}{mock_configs.config.llm.path}" + ) + + @pytest.mark.asyncio async def test_openai_endpoint_configuration_for_ollama( - mock_configs, openai_handler + mock_configs, + ollama_localhost, ): """Test that the correct endpoint is set for OLLAMA.""" mock_configs.config.llm.api = llms.OLLAMA.name - assert openai_handler.endpoint == f"{_localhost}chat/completions" + mock_configs.config.llm.localhost = ollama_localhost + assert ( + "v1/chat/completions" + in f"{mock_configs.config.llm.localhost}{mock_configs.config.llm.path}" + ) @pytest.mark.asyncio @@ -62,16 +69,19 @@ async def test_make_request_success(mock_post, openai_handler): mock_response = AsyncMock( json=AsyncMock( return_value={ - "choices": [{"message": {"content": "test_response"}}] - } - ) + "choices": [{"message": {"content": "test_response"}}], + }, + ), ) mock_response_cm.__aenter__.return_value = mock_response mock_post.return_value = mock_response_cm openai_handler._session = MagicMock(spec=aiohttp.ClientSession) openai_handler._session.post = mock_post index, result = await openai_handler._make_request( - "test_index", "test_prompt", 100 + "test_index", + "test_prompt", + 100, + None, ) assert mock_post.call_count == 1 assert mock_response_cm.__aenter__.call_count == 1 @@ -95,7 +105,10 @@ async def test_openai_make_request_with_context(openai_handler): openai_handler._make_request = mock_make_request # Act await openai_handler._make_request( - context, openai_handler.prompts.get(context), 100, [] + context, + openai_handler.prompts.get(context), + 100, + [], ) # Assert mock_make_request.assert_called_once() @@ -112,7 +125,10 @@ async def test_openai_make_request_without_context(openai_handler): openai_handler._make_request = mock_make_request # Act await openai_handler._make_request( - context, openai_handler.prompts.get(context), 100, [] + context, + openai_handler.prompts.get(context), + 100, + [], ) # Assert mock_make_request.assert_called_once() @@ -120,15 +136,26 @@ async def test_openai_make_request_without_context(openai_handler): @pytest.mark.asyncio -@patch("readmeai.models.openai.aiohttp.ClientSession.post") -async def test_make_request_error_handling(mock_post, openai_handler): +async def test_make_request_error_handling(mock_config, openai_handler): """Test error handling in _make_request.""" - mock_post.side_effect = aiohttp.ClientError - openai_handler._session = MagicMock(spec=aiohttp.ClientSession) - openai_handler._session.post = mock_post - index, result = await openai_handler._make_request( - "test_index", "test_prompt", 100 - ) - assert index == "test_index" - assert result == "โ–บ INSERT-TEXT-HERE" - assert mock_post.call_count == 1 + + @patch("readmeai.models.openai.aiohttp.ClientSession.post") + async def run_test(error, mock_post): + mock_post.side_effect = error + openai_handler._session = MagicMock(spec=aiohttp.ClientSession) + openai_handler._session.post = mock_post + + index, result = await openai_handler._make_request( + "test_index", + "test_prompt", + 100, + None, + ) + + assert index == "test_index" + assert result == mock_config.md.placeholder + assert mock_post.call_count == 1 + + await run_test(aiohttp.ClientError()) + await run_test(aiohttp.ClientConnectionError()) + await run_test(openai.OpenAIError()) diff --git a/tests/models/test_prompts.py b/tests/models/test_prompts.py index 40476edc..b9a5309e 100644 --- a/tests/models/test_prompts.py +++ b/tests/models/test_prompts.py @@ -17,15 +17,20 @@ def test_get_prompt_context_found(mock_config, mock_configs): """Test the retrieval of a prompt context.""" - with patch( - "readmeai.models.prompts.get_prompt_template", - return_value="Hello, {name}!", - ), patch( - "readmeai.models.prompts.inject_prompt_context", - return_value="Hello, World!", + with ( + patch( + "readmeai.models.prompts.get_prompt_template", + return_value="Hello, {name}!", + ), + patch( + "readmeai.models.prompts.inject_prompt_context", + return_value="Hello, World!", + ), ): result = get_prompt_context( - mock_configs.prompts, "greeting", {"name": "World"} + mock_configs.prompts, + "greeting", + {"name": "World"}, ) assert result == "Hello, World!" @@ -43,7 +48,10 @@ def test_get_prompt_template(mock_config, mock_configs): def test_inject_prompt_context_success( - mock_config, mock_configs, mock_dependencies, mock_summaries + mock_config, + mock_configs, + mock_dependencies, + mock_summaries, ): """Test the injection of a prompt context.""" context = get_prompt_context( diff --git a/tests/parsers/cicd/__init__.py b/tests/parsers/cicd/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/cicd/test_bitbucket.py b/tests/parsers/cicd/test_bitbucket.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/cicd/test_circleci.py b/tests/parsers/cicd/test_circleci.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/cicd/test_github.py b/tests/parsers/cicd/test_github.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/cicd/test_gitlab.py b/tests/parsers/cicd/test_gitlab.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/cicd/test_jenkins.py b/tests/parsers/cicd/test_jenkins.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/cicd/test_travis.py b/tests/parsers/cicd/test_travis.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/configuration/__init__.py b/tests/parsers/configuration/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/configuration/test_ansible.py b/tests/parsers/configuration/test_ansible.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/configuration/test_apache.py b/tests/parsers/configuration/test_apache.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/configuration/test_nginx.py b/tests/parsers/configuration/test_nginx.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/infrastructure/__init__.py b/tests/parsers/infrastructure/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/infrastructure/test_cloudformation.py b/tests/parsers/infrastructure/test_cloudformation.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/infrastructure/test_terraform.py b/tests/parsers/infrastructure/test_terraform.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/language/__init__.py b/tests/parsers/language/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/orchestration/__init__.py b/tests/parsers/orchestration/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/orchestration/test_kubernetes.py b/tests/parsers/orchestration/test_kubernetes.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/package/__init__.py b/tests/parsers/package/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/package/test_composer.py b/tests/parsers/package/test_composer.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/package/test_gem.py b/tests/parsers/package/test_gem.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/package/test_nuget.py b/tests/parsers/package/test_nuget.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/package/test_pip.py b/tests/parsers/package/test_pip.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/parsers/package/test_yarn.py b/tests/parsers/package/test_yarn.py deleted file mode 100644 index e29a0b01..00000000 --- a/tests/parsers/package/test_yarn.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the yarn.lock file parser module.""" diff --git a/tests/parsers/language/test_cpp.py b/tests/parsers/test_cpp.py similarity index 98% rename from tests/parsers/language/test_cpp.py rename to tests/parsers/test_cpp.py index 2ec57839..f244ca3e 100644 --- a/tests/parsers/language/test_cpp.py +++ b/tests/parsers/test_cpp.py @@ -2,7 +2,7 @@ import pytest -from readmeai.parsers.language.cpp import ( +from readmeai.parsers.cpp import ( CMakeParser, ConfigureAcParser, MakefileAmParser, @@ -191,7 +191,7 @@ def test_cmake_parser_invalid(cmake_parser): @pytest.mark.skip def test_configureac_parser_valid(configureac_parser, content_configureac): extracted_packages = configureac_parser.parse( - content_configureac.read_text() + content_configureac.read_text(), ) expected_packages = ["mp", "clock_gettime", "rt", "dl", "pthread"] assert sorted(extracted_packages) == sorted(expected_packages) @@ -204,7 +204,7 @@ def test_configureac_parser_invalid(configureac_parser): def test_makefile_am_parser_valid(makefile_am_parser, content_makefileam): extracted_packages = makefile_am_parser.parse( - content_makefileam.read_text() + content_makefileam.read_text(), ) # expected_packages = ["my_program", "libfoo", "check"] assert "my_program" in extracted_packages diff --git a/tests/parsers/configuration/test_docker.py b/tests/parsers/test_docker.py similarity index 98% rename from tests/parsers/configuration/test_docker.py rename to tests/parsers/test_docker.py index 698f208f..cf6dc21c 100644 --- a/tests/parsers/configuration/test_docker.py +++ b/tests/parsers/test_docker.py @@ -2,7 +2,7 @@ import pytest -from readmeai.parsers.configuration.docker import ( +from readmeai.parsers.docker import ( DockerComposeParser, DockerfileParser, ) diff --git a/tests/parsers/test_factory.py b/tests/parsers/test_factory.py index 2130d4f6..4288da63 100644 --- a/tests/parsers/test_factory.py +++ b/tests/parsers/test_factory.py @@ -1,7 +1,7 @@ """Test cases for the file_parser module.""" from readmeai.parsers.factory import parser_handler -from readmeai.parsers.language.python import RequirementsParser, TomlParser +from readmeai.parsers.python import RequirementsParser, TomlParser def test_parser_handler(): diff --git a/tests/parsers/language/test_go.py b/tests/parsers/test_go.py similarity index 90% rename from tests/parsers/language/test_go.py rename to tests/parsers/test_go.py index 9752ee45..3055fa7c 100644 --- a/tests/parsers/language/test_go.py +++ b/tests/parsers/test_go.py @@ -1,6 +1,6 @@ """Unit tests for Go-based dependency parsers.""" -from readmeai.parsers.language.go import GoModParser +from readmeai.parsers.go import GoModParser content = """ module geekdemo diff --git a/tests/parsers/package/test_gradle.py b/tests/parsers/test_gradle.py similarity index 97% rename from tests/parsers/package/test_gradle.py rename to tests/parsers/test_gradle.py index df74a57c..efb7ac5a 100644 --- a/tests/parsers/package/test_gradle.py +++ b/tests/parsers/test_gradle.py @@ -1,6 +1,6 @@ """Unit tests for parsing build.gradle files.""" -from readmeai.parsers.package.gradle import ( +from readmeai.parsers.gradle import ( BuildGradleKtsParser, BuildGradleParser, ) @@ -114,7 +114,7 @@ def test_build_gradle(): "gradle", "jfrog", "tools", - ] + ], ) @@ -131,5 +131,5 @@ def test_build_gradle_kts(): "ext", "androidx", "benchmark", - ] + ], ) diff --git a/tests/parsers/package/test_maven.py b/tests/parsers/test_maven.py similarity index 98% rename from tests/parsers/package/test_maven.py rename to tests/parsers/test_maven.py index d2cee9f9..9d29b4aa 100644 --- a/tests/parsers/package/test_maven.py +++ b/tests/parsers/test_maven.py @@ -3,7 +3,7 @@ import re from unittest.mock import patch -from readmeai.parsers.package.maven import MavenParser +from readmeai.parsers.maven import MavenParser content = """