diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..160d3e6 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,21 @@ +# Check http://editorconfig.org for more information +# This is the main config file for this project: +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +[*.py] +indent_size = 4 + +[*.txt] +indent_style = tab +indent_size = 4 + +[*.{diff,md}] +trim_trailing_whitespace = false diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..1c10065 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* stanislav.khlud@saritasa.com sergey.shirokov@saritasa.com leonid.malin@saritasa.com diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..08ed0f1 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,16 @@ +# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file +version: 2 +updates: +- package-ecosystem: pip + directory: "/" + schedule: + interval: daily + time: "00:00" + open-pull-requests-limit: 10 + +- package-ecosystem: github-actions + directory: "/" + schedule: + interval: daily + time: "00:00" + open-pull-requests-limit: 10 diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml new file mode 100644 index 0000000..7376074 --- /dev/null +++ b/.github/workflows/checks.yaml @@ -0,0 +1,52 @@ +name: CR + +on: + push: + branches: + - main + pull_request: + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + cr: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11", "3.12"] + timeout-minutes: 10 + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: latest + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + - uses: actions/cache@v3 + with: + path: ~/.cache/pre-commit + key: ${{ runner.os }}-pre-commit-${{ matrix.python-version }}-${{ hashFiles('**/.pre-commit-config.yaml') }} + restore-keys: | + ${{ runner.os }}-pre-commit- + - name: Cache poetry dependencies + id: cached-poetry-dependencies + uses: actions/cache@v3 + with: + path: .venv + key: ${{ runner.os }}-poetry-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }} + # Install dependencies if cache does not exist + - name: Install local dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction --all-extras + - name: Run checks + run: | + poetry run inv pre-commit.run-hooks diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c4328d3 --- /dev/null +++ b/.gitignore @@ -0,0 +1,162 @@ +### https://github.com/github/gitignore/blob/main/Python.gitignore +### VisualStudioCode template +.vscode/* +!.vscode/recommended_settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +### Python template +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ +pytest.xml + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal +# Local settings +local.py +# Media folder +media/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.venv +config/settings/.env +config/settings/.env.tmp +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# Ignore idea folder +.idea + +# Ignore tmp folder +.tmp/ + +# sql files +*.sql + +# Local invoke config +.invoke + +# pyenv's file for specifying local Python version +.python-version diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..adb783c --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,53 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-yaml + - id: check-toml + - id: check-json + exclude: ".*\\.vscode/" + - id: end-of-file-fixer + - id: trailing-whitespace + - id: detect-aws-credentials + args: [--allow-missing-credentials] + - id: debug-statements + - id: check-merge-conflict + - id: detect-private-key + + - repo: https://github.com/python-poetry/poetry + rev: 1.8.0 + hooks: + - id: poetry-check + args: ["--lock"] + + - repo: https://github.com/asottile/add-trailing-comma + rev: v3.1.0 + hooks: + - id: add-trailing-comma + + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.3.2 + hooks: + - id: ruff + args: [ --fix ] + - id: ruff-format + + - repo: https://github.com/pycqa/docformatter + rev: v1.7.5 + hooks: + - id: docformatter + args: [ + --wrap-descriptions=0, + --in-place, + --blank + ] + + - repo: local + hooks: + - id: mypy + name: Run mypy + entry: inv mypy.run + language: system + pass_filenames: false + types: [ file ] + stages: [ push ] diff --git a/.vscode/recommended_settings.json b/.vscode/recommended_settings.json new file mode 100644 index 0000000..8d97d58 --- /dev/null +++ b/.vscode/recommended_settings.json @@ -0,0 +1,23 @@ +{ + "files.exclude": { + "**/__pycache__": true, + "**/.mypy_cache": true + }, + + "editor.rulers": [79], + + "editor.bracketPairColorization.enabled": true, + + "python.analysis.typeCheckingMode": "basic", + + "python.analysis.inlayHints.functionReturnTypes": true, + "mypy.enabled": false, + + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + + "[python]": { + "editor.formatOnSave": true, + "editor.defaultFormatter": "charliermarsh.ruff" + } + } diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..e774650 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,54 @@ +# How to contribute + +## Dependencies + +We use [poetry](https://github.com/python-poetry/poetry) to manage the dependencies. + +To install them you would need to run `install` command: + +```bash +poetry install +``` + +To activate your `virtualenv` run `poetry shell`. + +## Style checks + +We use `pre-commit` for quality control. + +To run checks: + +```bash +inv pre-commit.run-hooks +``` + +Note: we also have flake8 dependencies for proper support of flake8 vscode +plugin. + +## Submitting your code + +We use [trunk based](https://trunkbaseddevelopment.com/) development. + +What the point of this method? + +1. We use protected `main` branch, + so the only way to push your code is via pull request +2. We use issue branches: to implement a new feature or to fix a bug + create a new branch named `issue-$TASKNUMBER` +3. Then create a pull request to `main` branch +4. We use `git tag`s to make releases, so we can track what has changed + since the latest release + +So, this way we achieve an easy and scalable development process +which frees us from merging hell and long-living branches. + +In this method, the latest version of the app is always in the `main` branch. + +### Before submitting + +Before submitting your code please do the following steps: + +1. Add any changes you want +2. Edit documentation if you have changed something significant +3. Update `CHANGELOG.md` with a quick summary of your changes +4. Run `pre-commit` to ensure that style is correct diff --git a/LICENCE b/LICENCE new file mode 100644 index 0000000..8616e51 --- /dev/null +++ b/LICENCE @@ -0,0 +1,21 @@ + + The MIT License (MIT) + Copyright (c) 2024 Saritasa + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE + OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..ad7eda5 --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,20 @@ +version: '3.7' +name: "saritasa-sqlachemy-tools" + +services: + postgres: + image: postgres:15 + ports: + - "5432:5432" + healthcheck: + test: [ + "CMD-SHELL", + "pg_isready -h postgres -t 5 -U ${COMPOSE_PROJECT_NAME}-user || false" + ] + interval: 1s + timeout: 5s + retries: 10 + environment: + - POSTGRES_DB=${COMPOSE_PROJECT_NAME}-dev + - POSTGRES_USER=${COMPOSE_PROJECT_NAME}-user + - POSTGRES_PASSWORD=manager diff --git a/invocations/__init__.py b/invocations/__init__.py new file mode 100644 index 0000000..351a3ad --- /dev/null +++ b/invocations/__init__.py @@ -0,0 +1 @@ +from . import project diff --git a/invocations/project.py b/invocations/project.py new file mode 100644 index 0000000..462e1b0 --- /dev/null +++ b/invocations/project.py @@ -0,0 +1,21 @@ +import invoke +import saritasa_invocations + + +@invoke.task +def build( + context: invoke.Context, +) -> None: + """Build python environ.""" + saritasa_invocations.poetry.install(context) + + +@invoke.task +def init( + context: invoke.Context, +) -> None: + """Prepare env for working with project.""" + saritasa_invocations.git.setup(context) + saritasa_invocations.system.copy_vscode_settings(context) + build(context) + saritasa_invocations.docker.up(context) diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..21ae784 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,946 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = true +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "executing" +version = "2.0.1" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.5" +files = [ + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + +[[package]] +name = "factory-boy" +version = "3.3.0" +description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." +optional = true +python-versions = ">=3.7" +files = [ + {file = "factory_boy-3.3.0-py2.py3-none-any.whl", hash = "sha256:a2cdbdb63228177aa4f1c52f4b6d83fab2b8623bf602c7dedd7eb83c0f69c04c"}, + {file = "factory_boy-3.3.0.tar.gz", hash = "sha256:bc76d97d1a65bbd9842a6d722882098eb549ec8ee1081f9fb2e8ff29f0c300f1"}, +] + +[package.dependencies] +Faker = ">=0.7.0" + +[package.extras] +dev = ["Django", "Pillow", "SQLAlchemy", "coverage", "flake8", "isort", "mongoengine", "sqlalchemy-utils", "tox", "wheel (>=0.32.0)", "zest.releaser[recommended]"] +doc = ["Sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] + +[[package]] +name = "faker" +version = "24.2.0" +description = "Faker is a Python package that generates fake data for you." +optional = true +python-versions = ">=3.8" +files = [ + {file = "Faker-24.2.0-py3-none-any.whl", hash = "sha256:dce4754921f9fa7e2003c26834093361b8f45072e0f46f172d6ca1234774ecd4"}, + {file = "Faker-24.2.0.tar.gz", hash = "sha256:87d5e7730426e7b36817921679c4eaf3d810cedb8c81194f47adc3df2122ca18"}, +] + +[package.dependencies] +python-dateutil = ">=2.4" + +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "identify" +version = "2.5.35" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, + {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "invoke" +version = "2.2.0" +description = "Pythonic task execution" +optional = false +python-versions = ">=3.6" +files = [ + {file = "invoke-2.2.0-py3-none-any.whl", hash = "sha256:6ea924cc53d4f78e3d98bc436b08069a03077e6f85ad1ddaa8a116d7dad15820"}, + {file = "invoke-2.2.0.tar.gz", hash = "sha256:ee6cbb101af1a859c7fe84f2a264c059020b0cb7fe3535f9424300ab568f6bd5"}, +] + +[[package]] +name = "ipdb" +version = "0.13.13" +description = "IPython-enabled pdb" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "ipdb-0.13.13-py3-none-any.whl", hash = "sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4"}, + {file = "ipdb-0.13.13.tar.gz", hash = "sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726"}, +] + +[package.dependencies] +decorator = {version = "*", markers = "python_version >= \"3.11\""} +ipython = {version = ">=7.31.1", markers = "python_version >= \"3.11\""} + +[[package]] +name = "ipython" +version = "8.22.2" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.10" +files = [ + {file = "ipython-8.22.2-py3-none-any.whl", hash = "sha256:3c86f284c8f3d8f2b6c662f885c4889a91df7cd52056fd02b7d8d6195d7f56e9"}, + {file = "ipython-8.22.2.tar.gz", hash = "sha256:2dcaad9049f9056f1fef63514f176c7d41f930daa78d05b82a176202818f2c14"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt-toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5.13.0" + +[package.extras] +all = ["ipython[black,doc,kernel,nbconvert,nbformat,notebook,parallel,qtconsole,terminal]", "ipython[test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "stack-data", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.5" +files = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy" +version = "1.9.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pre-commit" +version = "3.6.2" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.6.2-py2.py3-none-any.whl", hash = "sha256:ba637c2d7a670c10daedc059f5c49b5bd0aadbccfcd7ec15592cf9665117532c"}, + {file = "pre_commit-3.6.2.tar.gz", hash = "sha256:c3ef34f463045c88658c5b99f38c1e297abdcc0ff13f98d3370055fbbfabc67e"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "prompt-toolkit" +version = "3.0.43" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pydantic" +version = "2.6.4" +description = "Data validation using Python type hints" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, + {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.16.3" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.16.3" +description = "" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, + {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, + {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, + {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, + {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, + {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, + {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, + {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, + {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, + {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, + {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, + {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, + {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, + {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = true +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "rich" +version = "13.7.1" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "saritasa-invocations" +version = "1.1.0" +description = "Collection of invoke commands used by Saritasa" +optional = false +python-versions = ">=3.10,<4.0" +files = [ + {file = "saritasa_invocations-1.1.0-py3-none-any.whl", hash = "sha256:af01659c51084d27f115d5aaf9f6075b75b9d4a71b2e1f95102c7e8288bae2b4"}, + {file = "saritasa_invocations-1.1.0.tar.gz", hash = "sha256:67559974fce5ea221416f15d78eecc9cb47fbc11e035d62b4673ec0a91d23d16"}, +] + +[package.dependencies] +invoke = ">=2,<3" +rich = ">=13,<14" + +[package.extras] +env-settings = ["python-decouple (>=3,<4)"] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.28" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.28-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0b148ab0438f72ad21cb004ce3bdaafd28465c4276af66df3b9ecd2037bf252"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbda76961eb8f27e6ad3c84d1dc56d5bc61ba8f02bd20fcf3450bd421c2fcc9c"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feea693c452d85ea0015ebe3bb9cd15b6f49acc1a31c28b3c50f4db0f8fb1e71"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5da98815f82dce0cb31fd1e873a0cb30934971d15b74e0d78cf21f9e1b05953f"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a5adf383c73f2d49ad15ff363a8748319ff84c371eed59ffd0127355d6ea1da"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56856b871146bfead25fbcaed098269d90b744eea5cb32a952df00d542cdd368"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-win32.whl", hash = "sha256:943aa74a11f5806ab68278284a4ddd282d3fb348a0e96db9b42cb81bf731acdc"}, + {file = "SQLAlchemy-2.0.28-cp310-cp310-win_amd64.whl", hash = "sha256:c6c4da4843e0dabde41b8f2e8147438330924114f541949e6318358a56d1875a"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46a3d4e7a472bfff2d28db838669fc437964e8af8df8ee1e4548e92710929adc"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3dd67b5d69794cfe82862c002512683b3db038b99002171f624712fa71aeaa"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61e2e41656a673b777e2f0cbbe545323dbe0d32312f590b1bc09da1de6c2a02"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0315d9125a38026227f559488fe7f7cee1bd2fbc19f9fd637739dc50bb6380b2"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af8ce2d31679006e7b747d30a89cd3ac1ec304c3d4c20973f0f4ad58e2d1c4c9"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:81ba314a08c7ab701e621b7ad079c0c933c58cdef88593c59b90b996e8b58fa5"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-win32.whl", hash = "sha256:1ee8bd6d68578e517943f5ebff3afbd93fc65f7ef8f23becab9fa8fb315afb1d"}, + {file = "SQLAlchemy-2.0.28-cp311-cp311-win_amd64.whl", hash = "sha256:ad7acbe95bac70e4e687a4dc9ae3f7a2f467aa6597049eeb6d4a662ecd990bb6"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d3499008ddec83127ab286c6f6ec82a34f39c9817f020f75eca96155f9765097"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9b66fcd38659cab5d29e8de5409cdf91e9986817703e1078b2fdaad731ea66f5"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea30da1e76cb1acc5b72e204a920a3a7678d9d52f688f087dc08e54e2754c67"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:124202b4e0edea7f08a4db8c81cc7859012f90a0d14ba2bf07c099aff6e96462"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e23b88c69497a6322b5796c0781400692eca1ae5532821b39ce81a48c395aae9"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b6303bfd78fb3221847723104d152e5972c22367ff66edf09120fcde5ddc2e2"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-win32.whl", hash = "sha256:a921002be69ac3ab2cf0c3017c4e6a3377f800f1fca7f254c13b5f1a2f10022c"}, + {file = "SQLAlchemy-2.0.28-cp312-cp312-win_amd64.whl", hash = "sha256:b4a2cf92995635b64876dc141af0ef089c6eea7e05898d8d8865e71a326c0385"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e91b5e341f8c7f1e5020db8e5602f3ed045a29f8e27f7f565e0bdee3338f2c7"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45c7b78dfc7278329f27be02c44abc0d69fe235495bb8e16ec7ef1b1a17952db"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3eba73ef2c30695cb7eabcdb33bb3d0b878595737479e152468f3ba97a9c22a4"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5df5d1dafb8eee89384fb7a1f79128118bc0ba50ce0db27a40750f6f91aa99d5"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2858bbab1681ee5406650202950dc8f00e83b06a198741b7c656e63818633526"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-win32.whl", hash = "sha256:9461802f2e965de5cff80c5a13bc945abea7edaa1d29360b485c3d2b56cdb075"}, + {file = "SQLAlchemy-2.0.28-cp37-cp37m-win_amd64.whl", hash = "sha256:a6bec1c010a6d65b3ed88c863d56b9ea5eeefdf62b5e39cafd08c65f5ce5198b"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:843a882cadebecc655a68bd9a5b8aa39b3c52f4a9a5572a3036fb1bb2ccdc197"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dbb990612c36163c6072723523d2be7c3eb1517bbdd63fe50449f56afafd1133"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7e4baf9161d076b9a7e432fce06217b9bd90cfb8f1d543d6e8c4595627edb9"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0a5354cb4de9b64bccb6ea33162cb83e03dbefa0d892db88a672f5aad638a75"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fffcc8edc508801ed2e6a4e7b0d150a62196fd28b4e16ab9f65192e8186102b6"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aca7b6d99a4541b2ebab4494f6c8c2f947e0df4ac859ced575238e1d6ca5716b"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-win32.whl", hash = "sha256:8c7f10720fc34d14abad5b647bc8202202f4948498927d9f1b4df0fb1cf391b7"}, + {file = "SQLAlchemy-2.0.28-cp38-cp38-win_amd64.whl", hash = "sha256:243feb6882b06a2af68ecf4bec8813d99452a1b62ba2be917ce6283852cf701b"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc4974d3684f28b61b9a90fcb4c41fb340fd4b6a50c04365704a4da5a9603b05"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87724e7ed2a936fdda2c05dbd99d395c91ea3c96f029a033a4a20e008dd876bf"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68722e6a550f5de2e3cfe9da6afb9a7dd15ef7032afa5651b0f0c6b3adb8815d"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:328529f7c7f90adcd65aed06a161851f83f475c2f664a898af574893f55d9e53"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:df40c16a7e8be7413b885c9bf900d402918cc848be08a59b022478804ea076b8"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:426f2fa71331a64f5132369ede5171c52fd1df1bd9727ce621f38b5b24f48750"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-win32.whl", hash = "sha256:33157920b233bc542ce497a81a2e1452e685a11834c5763933b440fedd1d8e2d"}, + {file = "SQLAlchemy-2.0.28-cp39-cp39-win_amd64.whl", hash = "sha256:2f60843068e432311c886c5f03c4664acaef507cf716f6c60d5fde7265be9d7b"}, + {file = "SQLAlchemy-2.0.28-py3-none-any.whl", hash = "sha256:78bb7e8da0183a8301352d569900d9d3594c48ac21dc1c2ec6b3121ed8b6c986"}, + {file = "SQLAlchemy-2.0.28.tar.gz", hash = "sha256:dd53b6c4e6d960600fd6532b79ee28e2da489322fcf6648738134587faf767b6"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +mypy = {version = ">=0.910", optional = true, markers = "extra == \"mypy\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "traitlets" +version = "5.14.2" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.2-py3-none-any.whl", hash = "sha256:fcdf85684a772ddeba87db2f398ce00b40ff550d1528c03c14dbf6a02003cd80"}, + {file = "traitlets-5.14.2.tar.gz", hash = "sha256:8cdd83c040dab7d1dee822678e5f5d100b514f7b72b01615b26fc5718916fdf9"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.1)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "virtualenv" +version = "20.25.1" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, + {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[extras] +auto-schema = ["pydantic"] +factories = ["factory-boy"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "631ce82692e7ea81db4a9c084b8def42262c4b5ee9a43d2459fba55631c70758" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..2a514de --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,212 @@ +[tool.poetry] +name = "saritasa-sqlalchemy-tools" +version = "0.1.0" +description = "Tools for sqlalchemy used by saritasa" +authors = [ + "Saritasa ", +] +maintainers = [ + "Stanislav Khlud ", +] +homepage = "https://pypi.org/project/saritasa-sqlachemy-tools/" +repository = "https://github.com/saritasa-nest/saritasa-sqlachemy-tools/" +keywords = [ + "python", + "sqlachemy" +] +license = "MIT" +readme = "README.md" +packages = [ + { include = "saritasa_sqlalchemy_tools" } +] + +classifiers = [ + "Development Status :: 4 - Beta", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Intended Audience :: Developers", + "Operating System :: OS Independent", + "Topic :: Software Development :: Libraries :: Python Modules", +] + +[tool.poetry.dependencies] +python = "^3.11" +# The Python SQL Toolkit and Object Relational Mapper +# https://docs.sqlalchemy.org/ +sqlalchemy = {extras = ["mypy"], version = "<3"} +# Fast creating of model instances. May be used in tests +# https://factoryboy.readthedocs.io/en/stable/ +factory-boy = {version= "<4", optional = true} +# Data validation using Python type hints +# https://docs.pydantic.dev/latest/ +pydantic = {version= "<3", optional = true} + +[tool.poetry.extras] +factories = ["factory-boy"] +auto_schema = ["pydantic"] + +[tool.poetry.group.dev.dependencies] +# Improved REPL +ipdb = "^0.13.13" +ipython = "^8.22.1" +# A framework for managing and maintaining multi-language pre-commit hooks. +# https://pre-commit.com/ +pre-commit = "^3.6.2" +# Collection of invoke commands used by Saritasa +# Invoke is a Python (2.7 and 3.4+) task execution tool & library, +# drawing inspiration from various sources to arrive at a powerful & clean feature set. +# http://www.pyinvoke.org/ +# https://github.com/saritasa-nest/saritasa-python-invocations/ +saritasa_invocations = "^1.1.0" + +[tool.poetry.group.linters.dependencies] +# Mypy is a static type checker for Python. +# https://mypy.readthedocs.io/en/stable/ +mypy = "^1.9.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.ruff] +line-length = 79 +indent-width = 4 +target-version = "py311" + +[tool.ruff.lint] +extend-select = [ + # https://docs.astral.sh/ruff/rules/#pycodestyle-e-w + "W", + "E", + # https://docs.astral.sh/ruff/rules/#mccabe-c90 + "C90", + # https://docs.astral.sh/ruff/rules/#isort-i + "I", + # https://docs.astral.sh/ruff/rules/#pep8-naming-n + "N", + # https://docs.astral.sh/ruff/rules/#pydocstyle-d + "D", + # https://docs.astral.sh/ruff/rules/#pyupgrade-up + "UP", + # https://docs.astral.sh/ruff/rules/#flake8-annotations-ann + "ANN", + # https://docs.astral.sh/ruff/rules/#flake8-bandit-s + "S", + # https://docs.astral.sh/ruff/rules/#flake8-bugbear-b + "B", + # https://docs.astral.sh/ruff/rules/#flake8-builtins-a + "A", + # https://docs.astral.sh/ruff/rules/#flake8-commas-com + "COM", + # https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4 + "C4", + # https://docs.astral.sh/ruff/rules/#flake8-datetimez-dtz + "DTZ", + # https://docs.astral.sh/ruff/rules/#flake8-debugger-t10 + "T10", + # https://docs.astral.sh/ruff/rules/#flake8-django-dj + "DJ", + # https://docs.astral.sh/ruff/rules/#flake8-print-t20 + "T20", + # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim + "PT", + # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim + "SIM", + # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim + "PTH", + # https://docs.astral.sh/ruff/rules/#flake8-todos-td + "TD", + # https://docs.astral.sh/ruff/rules/#eradicate-era + "ERA", + # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf + "RUF" +] +ignore = [ + # https://docs.astral.sh/ruff/rules/ANN101 + "ANN101", + # https://docs.astral.sh/ruff/rules/ANN102 + "ANN102", + # https://docs.astral.sh/ruff/rules/ANN401 + "ANN401", + # https://docs.astral.sh/ruff/rules/ANN003 + "ANN003", + # https://docs.astral.sh/ruff/rules/D100 + "D100", + # https://docs.astral.sh/ruff/rules/D104 + "D104", + # https://docs.astral.sh/ruff/rules/D106 + "D106", + # https://docs.astral.sh/ruff/rules/D107 + "D107", +] +[tool.ruff.lint.per-file-ignores] +"__init__.py" = [ + # https://docs.astral.sh/ruff/rules/F401 + "F401", +] +"**/tests/*" = [ + # https://docs.astral.sh/ruff/rules/S101 + "S101", + # https://docs.astral.sh/ruff/rules/S106 + "S106", + # https://docs.astral.sh/ruff/rules/S311 + "S311", +] +"**/test_*" = [ + # https://docs.astral.sh/ruff/rules/ANN201 + "ANN201", +] +[tool.ruff.lint.isort] +force-wrap-aliases = true +split-on-trailing-comma = true +section-order = [ + "future", + "standard-library", + "sqlachemy", + "third-party", + "first-party", + "local-folder", +] +[tool.ruff.lint.isort.sections] +sqlachemy = ["sqlachemy"] + +[tool.ruff.lint.flake8-pytest-style] +fixture-parentheses = true +parametrize-names-type = "list" +parametrize-values-type = "list" +parametrize-values-row-type = "list" + +[tool.ruff.format] +quote-style = "double" +indent-style = "space" +skip-magic-trailing-comma = false +line-ending = "auto" + +[tool.mypy] +plugins = [ + "sqlalchemy.ext.mypy.plugin", +] +# https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports +ignore_missing_imports = true +strict = false +warn_no_return = false +check_untyped_defs = true +disallow_any_generics = true +exclude = [ + "venv", + ".venv", +] + +[[tool.mypy.overrides]] +module = "tests/*" +disable_error_code = [ + "attr-defined", + "operator", +] + +[tool.pyright] +ignore = [ + # Disabled for tests because of dynamic nature of pytest + "**/*test_*.py", + "invocations/**" +] diff --git a/saritasa_sqlalchemy_tools/__init__.py b/saritasa_sqlalchemy_tools/__init__.py new file mode 100644 index 0000000..c4a0852 --- /dev/null +++ b/saritasa_sqlalchemy_tools/__init__.py @@ -0,0 +1,52 @@ +from .auto_schema import ( + ModelAutoSchema, + ModelAutoSchemaError, + ModelAutoSchemaT, +) +from .models import ( + BaseIDModel, + BaseModel, + BaseModelT, + BaseSoftDeleteModel, + BaseSoftDeleteModelT, + BaseTimeStampedModel, + FieldEnumT, + M2MFilterConfig, + ModelAttribute, + ModelAttributeSequence, + ModelType, + SelectStatement, + SoftDeleteBaseIDModel, + SoftDeleteMixin, + SQLAlchemyModel, + TimeStampedBaseIDModel, + TimeStampedMixin, +) +from .repositories import ( + Annotation, + AnnotationSequence, + BaseRepository, + BaseRepositoryT, + BaseSoftDeleteRepository, + ComparisonOperator, + Filter, + LazyLoaded, + LazyLoadedSequence, + OrderingClausesT, + OrderingEnum, + OrderingEnumMeta, + SQLWhereFilter, + SubQueryReturnT, + WhereFilter, + WhereFilters, + transform_search_filter, +) +from .session import ( + Session, + SessionFactory, + get_async_db_session, + get_async_db_session_context, + get_async_engine, + get_async_session_factory, +) +from .testing import AsyncSQLAlchemyModelFactory, AsyncSQLAlchemyOptions diff --git a/saritasa_sqlalchemy_tools/auto_schema.py b/saritasa_sqlalchemy_tools/auto_schema.py new file mode 100644 index 0000000..0d58257 --- /dev/null +++ b/saritasa_sqlalchemy_tools/auto_schema.py @@ -0,0 +1,526 @@ +import collections.abc +import datetime +import decimal +import types +import typing + +import pydantic +import pydantic_core +import sqlalchemy.dialects.postgresql.ranges + +from . import models + +PydanticFieldConfig: typing.TypeAlias = tuple[ + types.UnionType + | type[typing.Any] + | pydantic_core.PydanticUndefinedType + | typing.Annotated, # type: ignore + typing.Any, +] +MetaField: typing.TypeAlias = str | tuple[str, type] +MetaExtraFieldConfig: typing.TypeAlias = dict[str, typing.Any] +PydanticValidator: typing.TypeAlias = collections.abc.Callable[ + [typing.Any, typing.Any, pydantic.ValidationInfo], + typing.Any, +] + + +class ModelAutoSchemaError(Exception): + """Base exception for auto schema generator.""" + + +class UnableProcessTypeError(ModelAutoSchemaError): + """Raised when we don't know how to handle db type.""" + + +class UnableToExtractEnumClassError(ModelAutoSchemaError): + """Raised when we can't extract python enum class from model.""" + + +class ModelAutoSchema: + """Class for generating pydantic models based on sqlalchemy models. + + Create pydantic model based on fields of model specified in meta class. + + Example: + ------- + class User(sqlalchemy.orm.DeclarativeBase): + + __tablename__ = "users" + + name: orm.Mapped[str] = orm.mapped_column( + String(50), + nullable=False, + ) + location_id: orm.Mapped[int] = orm.mapped_column( + ForeignKey("locations.id"), + nullable=False, + ) + location = orm.relationship( + "Location", + back_populates="users", + ) + + class UserAutoSchema(ModelAutoSchema): + class Meta: + model = User + fields = ( + "id", + "name", + ("location", LocationSchema), # Use tuple to specify custom + # type or for relationship + # fields. + ) + model_config = pydantic.ConfigDict(from_attributes=True) + extra_fields_config = { # Use this to add extra field constraints + "name": { + "min_length: 1, + } + } + extra_fields_validators = { # Use this to assign custom validators + "name": (validator_func1, validator_func2), + } + + """ + + class Meta: + model: models.SQLAlchemyModel + fields: collections.abc.Sequence[MetaField] = () + model_config: pydantic.ConfigDict + base_model: pydantic.BaseModel + extra_fields_config: dict[str, MetaExtraFieldConfig] + extra_fields_validators: dict[ + str, + collections.abc.Sequence[PydanticValidator], + ] + + @classmethod + def get_schema_name(cls) -> str: + """Generate name for new schema class.""" + return cls.__name__.replace("AutoSchema", "") + + @classmethod + def get_schema( + cls, + cls_kwargs: dict[str, typing.Any] | None = None, + ) -> type[pydantic.BaseModel]: + """Generate schema from model.""" + base_model = getattr( + cls.Meta, + "base_model", + None, + ) + model_config = getattr( + cls.Meta, + "model_config", + pydantic.ConfigDict(from_attributes=True), + ) + # Only config or base model could be passed to create_model + if base_model: + raise ValueError( + "Only config or base model could be passed to create_model", + ) + + extra_fields_config = getattr( + cls.Meta, + "extra_fields_config", + {}, + ) + extra_fields_validators = getattr( + cls.Meta, + "extra_fields_validators", + {}, + ) + generated_fields: dict[str, PydanticFieldConfig] = {} + validators: dict[str, typing.Any] = {} + for field in cls.Meta.fields: + extra_field_config = extra_fields_config.get(field, {}) + if isinstance(field, str): + generated_fields[field] = cls._generate_field( + model=cls.Meta.model, + field=field, + extra_field_config=extra_field_config, + ) + for index, validator in enumerate( + extra_fields_validators.get(field, ()), + ): + validators[f"{field}_validator_{index}"] = ( + pydantic.field_validator(field)(validator) + ) + continue + if isinstance(field, tuple): + field_name, field_type = field + generated_fields[field_name] = ( + cls._generate_field_with_custom_type( + model=cls.Meta.model, + field=field_name, + field_type=field_type, + extra_field_config=extra_field_config, + ) + ) + for index, validator in enumerate( + extra_fields_validators.get(field, ()), + ): + validators[f"{field_name}_validator_{index}"] = ( + pydantic.field_validator(field_name)(validator) + ) + continue + raise UnableProcessTypeError( + f"Can't process the following field {field}", + ) + return pydantic.create_model( + cls.get_schema_name(), + __base__=base_model, + __config__=model_config, + __validators__=validators, + __cls_kwargs__=cls_kwargs, + **generated_fields, # type: ignore + ) # type: ignore[call-overload] + + @classmethod + def _generate_field( + cls, + model: models.SQLAlchemyModel, + field: str, + extra_field_config: MetaExtraFieldConfig, + ) -> PydanticFieldConfig: + """Generate field for pydantic model.""" + model_attribute: models.ModelAttribute = getattr( + model, + field, + ) + types_mapping = cls._get_db_types_mapping() + if isinstance(model_attribute, property): + return cls._generate_property_field( + model, + field, + model_attribute, + extra_field_config, + ) + if model_attribute.type.__class__ not in types_mapping: + raise UnableProcessTypeError( + "Can't generate generate type for" + f" {model_attribute.type.__class__}" + f" for field {field}", + ) + return types_mapping[model_attribute.type.__class__]( + model, + field, + model_attribute, + model_attribute.type, + extra_field_config, + ) + + @classmethod + def _generate_field_with_custom_type( + cls, + model: models.SQLAlchemyModel, + field: str, + field_type: typing.Any, + extra_field_config: MetaExtraFieldConfig, + ) -> PydanticFieldConfig: + """Generate field with custom type.""" + if isinstance( + field_type, + type, + ) and issubclass( + field_type, + ModelAutoSchema, + ): + model_attribute: models.ModelAttribute = getattr( + model, + field, + ) + field_type_generated = field_type.get_schema() + if isinstance(model_attribute, property): + return cls._generate_property_custom_field( + field_type_generated=field_type_generated, + model=model, + field=field, + model_attribute=model_attribute, + extra_field_config=extra_field_config, + ) + if model_attribute.property.uselist: + field_type_generated = list[field_type_generated] # type: ignore + is_nullable = next( + iter(model_attribute.property.local_columns), + ).nullable + return ( + ( + field_type_generated | None + if is_nullable + else field_type_generated + ), + pydantic_core.PydanticUndefined, + ) + return field_type, pydantic_core.PydanticUndefined + + @classmethod + def _get_db_types_mapping( + cls, + ) -> dict[ + type[typing.Any], + collections.abc.Callable[ + [ + models.SQLAlchemyModel, + str, + models.ModelAttribute, + models.ModelType, + MetaExtraFieldConfig, + ], + PydanticFieldConfig, + ], + ]: + """Get mapping of types and field generators.""" + return { + sqlalchemy.String: cls._generate_string_field, + sqlalchemy.Text: cls._generate_string_field, + sqlalchemy.Integer: cls._generate_integer_field, + sqlalchemy.SmallInteger: cls._generate_small_integer_field, + sqlalchemy.Enum: cls._generate_enum_field, + sqlalchemy.Date: cls._generate_date_field, + sqlalchemy.DateTime: cls._generate_datetime_field, + sqlalchemy.Boolean: cls._generate_bool_field, + sqlalchemy.Numeric: cls._generate_numeric_field, + sqlalchemy.Interval: cls._generate_interval_field, + sqlalchemy.ARRAY: cls._generate_array_field, + } + + @classmethod + def _generate_property_field( + cls, + model: models.SQLAlchemyModel, + field: str, + model_attribute: property, + extra_field_config: MetaExtraFieldConfig, + ) -> PydanticFieldConfig: + """Generate property field.""" + return ( + model_attribute.fget.__annotations__["return"], + pydantic_core.PydanticUndefined, + ) + + @classmethod + def _generate_property_custom_field( + cls, + field_type_generated: type, + model: models.SQLAlchemyModel, + field: str, + model_attribute: property, + extra_field_config: MetaExtraFieldConfig, + ) -> PydanticFieldConfig: + """Generate property for custom field.""" + annotation = model_attribute.fget.__annotations__["return"] + is_nullable = False + if typing.get_origin(annotation) == types.UnionType: + annotation, _ = typing.get_args(annotation) + is_nullable = True + if isinstance(annotation, collections.abc.Iterable): + field_type_generated = list[field_type_generated] # type: ignore + return ( + ( + field_type_generated | None + if is_nullable + else field_type_generated + ), + pydantic_core.PydanticUndefined, + ) + + @classmethod + def _generate_string_field( + cls, + model: models.SQLAlchemyModel, + field: str, + model_attribute: models.ModelAttribute, + model_type: models.ModelType, + extra_field_config: MetaExtraFieldConfig, + ) -> PydanticFieldConfig: + """Generate string field.""" + constraints: dict[str, typing.Any] = { + "strip_whitespace": True, + "max_length": model_type.length, # type: ignore + } + constraints.update(**extra_field_config) + return ( # type: ignore + typing.Annotated[ + str | None if model_attribute.nullable else str, + pydantic.StringConstraints(**constraints), + ], + pydantic_core.PydanticUndefined, + ) + + @classmethod + def _generate_integer_field( + cls, + model: models.SQLAlchemyModel, + field: str, + model_attribute: models.ModelAttribute, + model_type: models.ModelType, + extra_field_config: MetaExtraFieldConfig, + ) -> PydanticFieldConfig: + """Generate integer field.""" + constraints: dict[str, typing.Any] = { + "ge": -2147483648, + "le": 2147483647, + } + constraints.update(**extra_field_config) + int_type = typing.Annotated[int, pydantic.Field(**constraints)] + return ( + int_type | None if model_attribute.nullable else int_type, + pydantic_core.PydanticUndefined, + ) + + @classmethod + def _generate_small_integer_field( + cls, + model: models.SQLAlchemyModel, + field: str, + model_attribute: models.ModelAttribute, + model_type: models.ModelType, + extra_field_config: MetaExtraFieldConfig, + ) -> PydanticFieldConfig: + """Generate small integer field.""" + constraints: MetaExtraFieldConfig = { + "ge": -32768, + "le": 32767, + } + constraints.update(**extra_field_config) + int_type = typing.Annotated[int, pydantic.Field(**constraints)] + return ( + int_type | None if model_attribute.nullable else int_type, + pydantic_core.PydanticUndefined, + ) + + @classmethod + def _generate_numeric_field( + cls, + model: models.SQLAlchemyModel, + field: str, + model_attribute: models.ModelAttribute, + model_type: models.ModelType, + extra_field_config: MetaExtraFieldConfig, + ) -> PydanticFieldConfig: + """Generate numeric field.""" + constraints: MetaExtraFieldConfig = {**extra_field_config} + decimal_type = typing.Annotated[ + decimal.Decimal, + pydantic.Field( + json_schema_extra={ + "precision": model_type.precision, # type: ignore + "scale": model_type.scale, # type: ignore + }, + **constraints, + ), + ] + return ( + decimal_type | None if model_attribute.nullable else decimal_type, + pydantic_core.PydanticUndefined, + ) + + @classmethod + def _generate_bool_field( + cls, + model: models.SQLAlchemyModel, + field: str, + model_attribute: models.ModelAttribute, + model_type: models.ModelType, + extra_field_config: MetaExtraFieldConfig, + ) -> PydanticFieldConfig: + """Generate boolean field.""" + return ( + bool | None if model_attribute.nullable else bool, + pydantic_core.PydanticUndefined, + ) + + @classmethod + def _generate_enum_field( + cls, + model: models.SQLAlchemyModel, + field: str, + model_attribute: models.ModelAttribute, + model_type: models.ModelType, + extra_field_config: MetaExtraFieldConfig, + ) -> PydanticFieldConfig: + """Generate enum field.""" + if model_type.enum_class is None: # type: ignore + raise UnableToExtractEnumClassError( + f"Can't extract enum for {field} in {model}", + ) + return ( + model_type.enum_class | None # type: ignore + if model_attribute.nullable + else model_type.enum_class # type: ignore + ), pydantic_core.PydanticUndefined + + @classmethod + def _generate_datetime_field( + cls, + model: models.SQLAlchemyModel, + field: str, + model_attribute: models.ModelAttribute, + model_type: models.ModelType, + extra_field_config: dict[str, typing.Any], + ) -> PydanticFieldConfig: + """Generate datetime field.""" + return ( + datetime.datetime | None + if model_attribute.nullable + else datetime.datetime + ), pydantic_core.PydanticUndefined + + @classmethod + def _generate_date_field( + cls, + model: models.SQLAlchemyModel, + field: str, + model_attribute: models.ModelAttribute, + model_type: models.ModelType, + extra_field_config: MetaExtraFieldConfig, + ) -> PydanticFieldConfig: + """Generate date field.""" + return ( + datetime.date | None if model_attribute.nullable else datetime.date + ), pydantic_core.PydanticUndefined + + @classmethod + def _generate_interval_field( + cls, + model: models.SQLAlchemyModel, + field: str, + model_attribute: models.ModelAttribute, + model_type: models.ModelType, + extra_field_config: MetaExtraFieldConfig, + ) -> PydanticFieldConfig: + """Generate interval field.""" + return ( + datetime.timedelta | None + if model_attribute.nullable + else datetime.timedelta + ), pydantic_core.PydanticUndefined + + @classmethod + def _generate_array_field( + cls, + model: models.SQLAlchemyModel, + field: str, + model_attribute: models.ModelAttribute, + model_type: models.ModelType, + extra_field_config: MetaExtraFieldConfig, + ) -> PydanticFieldConfig: + """Generate array field.""" + list_type, _ = cls._get_db_types_mapping()[ + model_type.item_type.__class__ # type: ignore + ]( + model, + field, + model_attribute, + model_type.item_type, # type: ignore + extra_field_config, + ) + return list[list_type], pydantic_core.PydanticUndefined # type: ignore + + +ModelAutoSchemaT = typing.TypeVar( + "ModelAutoSchemaT", + bound=ModelAutoSchema, +) diff --git a/saritasa_sqlalchemy_tools/models/__init__.py b/saritasa_sqlalchemy_tools/models/__init__.py new file mode 100644 index 0000000..5c2e2d0 --- /dev/null +++ b/saritasa_sqlalchemy_tools/models/__init__.py @@ -0,0 +1,21 @@ +from .core import ( + BaseIDModel, + BaseModel, + BaseSoftDeleteModel, + BaseTimeStampedModel, + M2MFilterConfig, + SoftDeleteBaseIDModel, + SoftDeleteMixin, + TimeStampedBaseIDModel, + TimeStampedMixin, +) +from .types import ( + BaseModelT, + BaseSoftDeleteModelT, + FieldEnumT, + ModelAttribute, + ModelAttributeSequence, + ModelType, + SelectStatement, + SQLAlchemyModel, +) diff --git a/saritasa_sqlalchemy_tools/models/core.py b/saritasa_sqlalchemy_tools/models/core.py new file mode 100644 index 0000000..72e0daf --- /dev/null +++ b/saritasa_sqlalchemy_tools/models/core.py @@ -0,0 +1,103 @@ +import dataclasses +import datetime +import typing + +import sqlalchemy +import sqlalchemy.ext.asyncio +import sqlalchemy.orm + + +class TimeStampedMixin: + """A mixin that adds timestamped fields to the model.""" + + created: sqlalchemy.orm.Mapped[datetime.datetime] = ( + sqlalchemy.orm.mapped_column( + nullable=False, + server_default=sqlalchemy.sql.func.now(), + ) + ) + modified: sqlalchemy.orm.Mapped[datetime.datetime] = ( + sqlalchemy.orm.mapped_column( + nullable=False, + server_default=sqlalchemy.sql.func.now(), + onupdate=sqlalchemy.sql.func.now(), + ) + ) + + +class SoftDeleteMixin: + """A mixin which supports soft delete.""" + + deleted: sqlalchemy.orm.Mapped[datetime.datetime | None] = ( + sqlalchemy.orm.mapped_column( + nullable=True, + ) + ) + + +class IDMixin: + """A mixin which adds id field to model.""" + + pk_field: str = "id" + id: sqlalchemy.orm.Mapped[int] = sqlalchemy.orm.mapped_column( + primary_key=True, + ) + + +@dataclasses.dataclass +class M2MFilterConfig: + """Configuration for m2m filter.""" + + relation_field: str + filter_field: str + match_field: str + + +class BaseModel( + sqlalchemy.ext.asyncio.AsyncAttrs, + sqlalchemy.orm.DeclarativeBase, +): + """Base model class.""" + + __abstract__ = True + pk_field: str + m2m_filters: typing.ClassVar[dict[str, M2MFilterConfig]] = {} + + @property + def as_dict(self) -> dict[str, typing.Any]: + """Convert model to dict.""" + return { + column_name: getattr(self, column_name) + for column in self.__table__.columns + if (column_name := column.name) + } + + +class BaseIDModel(IDMixin, BaseModel): + """Base model with id.""" + + __abstract__ = True + + +class BaseTimeStampedModel(TimeStampedMixin, BaseModel): + """Base model with timestamps.""" + + __abstract__ = True + + +class BaseSoftDeleteModel(SoftDeleteMixin, BaseTimeStampedModel): + """Base model with support for soft deletion.""" + + __abstract__ = True + + +class TimeStampedBaseIDModel(BaseIDModel, BaseTimeStampedModel): + """Base id model with timestamp fields.""" + + __abstract__ = True + + +class SoftDeleteBaseIDModel(BaseIDModel, BaseSoftDeleteModel): + """Base id model with timestamp fields and support for soft-delete.""" + + __abstract__ = True diff --git a/saritasa_sqlalchemy_tools/models/types.py b/saritasa_sqlalchemy_tools/models/types.py new file mode 100644 index 0000000..2b1ee0e --- /dev/null +++ b/saritasa_sqlalchemy_tools/models/types.py @@ -0,0 +1,31 @@ +import collections.abc +import enum +import typing + +import sqlalchemy + +from . import core + +BaseModelT = typing.TypeVar( + "BaseModelT", + bound="core.BaseModel", +) +BaseSoftDeleteModelT = typing.TypeVar( + "BaseSoftDeleteModelT", + bound="core.BaseSoftDeleteModel", +) +FieldEnumT = typing.TypeVar( + "FieldEnumT", + bound="enum.Enum", +) +SQLAlchemyModel: typing.TypeAlias = sqlalchemy.orm.DeclarativeBase +SelectStatement: typing.TypeAlias = sqlalchemy.Select[tuple[BaseModelT]] +# For some reason mypy demands that orm.InstrumentedAttribute has two generic +# args +ModelAttribute: typing.TypeAlias = sqlalchemy.orm.InstrumentedAttribute[ # type: ignore + typing.Any +] +ModelAttributeSequence: typing.TypeAlias = collections.abc.Sequence[ + ModelAttribute +] +ModelType: typing.TypeAlias = sqlalchemy.sql.type_api.TypeEngine[typing.Any] # type: ignore diff --git a/saritasa_sqlalchemy_tools/py.typed b/saritasa_sqlalchemy_tools/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/saritasa_sqlalchemy_tools/repositories/__init__.py b/saritasa_sqlalchemy_tools/repositories/__init__.py new file mode 100644 index 0000000..ef19cbc --- /dev/null +++ b/saritasa_sqlalchemy_tools/repositories/__init__.py @@ -0,0 +1,17 @@ +from .core import BaseRepository, BaseRepositoryT, BaseSoftDeleteRepository +from .filters import ( + Filter, + SQLWhereFilter, + WhereFilter, + WhereFilters, + transform_search_filter, +) +from .ordering import OrderingClausesT, OrderingEnum, OrderingEnumMeta +from .types import ( + Annotation, + AnnotationSequence, + ComparisonOperator, + LazyLoaded, + LazyLoadedSequence, + SubQueryReturnT, +) diff --git a/saritasa_sqlalchemy_tools/repositories/core.py b/saritasa_sqlalchemy_tools/repositories/core.py new file mode 100644 index 0000000..cda15bc --- /dev/null +++ b/saritasa_sqlalchemy_tools/repositories/core.py @@ -0,0 +1,442 @@ +import abc +import collections.abc +import datetime +import typing + +import sqlalchemy +import sqlalchemy.ext.asyncio +import sqlalchemy.orm + +from .. import models +from . import filters, ordering, types + +BaseRepositoryT = typing.TypeVar( + "BaseRepositoryT", + bound="BaseRepository[typing.Any]", +) + + +class BaseRepository( + typing.Generic[models.BaseModelT], + metaclass=abc.ABCMeta, +): + """Abstract class for repositories. + + Repository provide the interface for interaction with DB table. + + """ + + model: type[models.BaseModelT] + # Sequence of field names that should be excluded during bulk creating + default_exclude_bulk_create_fields: collections.abc.Sequence[str] = () + # Sequence of field names that should be excluded during bulk updating + default_exclude_bulk_update_fields: collections.abc.Sequence[str] = () + + def __init__( + self, + db_session: sqlalchemy.ext.asyncio.AsyncSession, + ) -> None: + self._db_session = db_session + + def init_other( + self, + repository_class: type[BaseRepositoryT], + ) -> BaseRepositoryT: + """Init other repo from current.""" + return repository_class(db_session=self._db_session) + + async def commit(self) -> None: + """Commit transaction.""" + await self._db_session.commit() + + async def flush(self) -> None: + """Perform changes to database.""" + await self._db_session.flush() + + async def refresh( + self, + instance: models.BaseModelT, + attribute_names: collections.abc.Sequence[str] | None = None, + ) -> None: + """Refresh instance.""" + await self._db_session.refresh( + instance=instance, + attribute_names=attribute_names, + ) + + def expire(self, instance: models.BaseModelT) -> None: + """Expire instance. + + This mark instance as expired, which means all it's attrs need to be + fetched from db again. + + """ + self._db_session.expire(instance) + + async def get(self, pk: int | str) -> models.BaseModelT | None: + """Return entry from DB by primary key.""" + return await self._db_session.get(self.model, pk) + + async def save( + self, + instance: models.BaseModelT, + refresh: bool = False, + attribute_names: collections.abc.Sequence[str] | None = None, + ) -> models.BaseModelT: + """Save model instance into db.""" + self._db_session.add(instance=instance) + await self.flush() + if refresh: + await self.refresh(instance, attribute_names) + return instance + + async def delete(self, instance: models.BaseModelT) -> None: + """Delete model instance into db.""" + await self._db_session.delete(instance=instance) + await self.flush() + + async def delete_batch( + self, + where: filters.WhereFilters = (), + **filters_by: dict[str, typing.Any], + ) -> None: + """Delete batch of objects from db.""" + await self._db_session.execute( + statement=( + sqlalchemy.sql.delete(self.model) + .where(*self.process_where_filters(*where)) + .filter_by(**filters_by) + ), + ) + + def model_as_dict( + self, + instance: models.BaseModelT, + exclude_fields: collections.abc.Sequence[str] = (), + ) -> dict[str, typing.Any]: + """Convert model to dict except fields from `exclude_fields`.""" + return { + column_name: value + for column_name, value in instance.as_dict.items() + if column_name not in exclude_fields + } + + def objects_as_dict( + self, + objects: collections.abc.Sequence[models.BaseModelT], + exclude_fields: collections.abc.Sequence[str] = (), + ) -> list[dict[str, typing.Any]]: + """Convert objects to list of dicts with field values.""" + return [ + self.model_as_dict( + instance=obj, + exclude_fields=exclude_fields, + ) + for obj in objects + ] + + async def insert_batch( + self, + objects: collections.abc.Sequence[models.BaseModelT], + exclude_fields: collections.abc.Sequence[str] = (), + ) -> list[models.BaseModelT]: + """Create batch of objects in db.""" + if not objects: + return [] + + objects_as_dict = self.objects_as_dict( + objects=objects, + exclude_fields=( + exclude_fields or self.default_exclude_bulk_create_fields + ), + ) + created_objects = await self._db_session.scalars( + sqlalchemy.sql.insert(self.model) + .returning(self.model) + .values(objects_as_dict), + ) + await self.flush() + return list(created_objects.all()) + + async def update_batch( + self, + objects: collections.abc.Sequence[models.BaseModelT], + exclude_fields: collections.abc.Sequence[str] = (), + ) -> None: + """Update batch of objects in db.""" + if not objects: + return + + objects_as_dict = self.objects_as_dict( + objects=objects, + exclude_fields=( + exclude_fields or self.default_exclude_bulk_update_fields + ), + ) + await self._db_session.execute( + sqlalchemy.sql.update(self.model), + objects_as_dict, + ) + await self.flush() + + @property + def select_statement(self) -> models.SelectStatement[models.BaseModelT]: + """Generate empty select statement.""" + return sqlalchemy.select(self.model) + + def get_annotated_statement( + self, + statement: models.SelectStatement[models.BaseModelT] | None = None, + *annotations: types.Annotation, + ) -> models.SelectStatement[models.BaseModelT]: + """Pick annotations which should be returned.""" + if statement is not None: + select_statement = statement + else: + select_statement = self.select_statement + for annotation in annotations: + if isinstance(annotation, tuple): + select_statement = select_statement.options( + sqlalchemy.orm.with_expression(*annotation), + ) + else: + select_statement = select_statement.options( + sqlalchemy.orm.undefer(annotation), + ) + return select_statement + + def get_filter_statement( + self, + statement: models.SelectStatement[models.BaseModelT] | None = None, + *where_filters: filters.WhereFilter, + **filters_by: typing.Any, + ) -> models.SelectStatement[models.BaseModelT]: + """Get statement with filtering.""" + if statement is not None: + select_statement = statement + else: + select_statement = self.select_statement + return select_statement.where( + *self.process_where_filters(*where_filters), + ).filter_by(**filters_by) + + @classmethod + def process_where_filters( + cls, + *where_filters: filters.WhereFilter, + ) -> collections.abc.Sequence[filters.SQLWhereFilter]: + """Process where filters.""" + processed_where_filters: list[filters.SQLWhereFilter] = [] + for where_filter in where_filters: + if isinstance(where_filter, filters.Filter): + processed_where_filters.append( + where_filter.transform_filter(cls.model), # type: ignore + ) + continue + processed_where_filters.append(where_filter) + return processed_where_filters + + def get_order_statement( + self, + statement: models.SelectStatement[models.BaseModelT] | None = None, + *clauses: sqlalchemy.ColumnExpressionArgument[str] | str, + ) -> models.SelectStatement[models.BaseModelT]: + """Get statement with ordering.""" + if statement is not None: + select_statement = statement + else: + select_statement = self.select_statement + ordering_clauses = [ + ( + clause.db_clause + if isinstance(clause, ordering.OrderingEnum) + else clause + ) + for clause in clauses + ] + return select_statement.order_by(*ordering_clauses) + + def get_pagination_statement( + self, + statement: models.SelectStatement[models.BaseModelT] | None = None, + offset: int | None = None, + limit: int | None = None, + ) -> models.SelectStatement[models.BaseModelT]: + """Get statement with pagination.""" + if statement is not None: + select_statement = statement + else: + select_statement = self.select_statement + if offset: + select_statement = select_statement.offset(offset) + if limit: + select_statement = select_statement.limit(limit) + return select_statement + + def get_joined_load_statement( + self, + statement: models.SelectStatement[models.BaseModelT] | None = None, + *targets: types.LazyLoaded, + ) -> models.SelectStatement[models.BaseModelT]: + """Get statement which will load related models.""" + if statement is not None: + select_statement = statement + else: + select_statement = self.select_statement + for target in targets: + joined_load = [] + if isinstance(target, collections.abc.Sequence): + joined_load.append(sqlalchemy.orm.joinedload(*target)) + else: + joined_load.append(sqlalchemy.orm.joinedload(target)) + select_statement = select_statement.options( + *joined_load, + ) + return select_statement + + def get_select_in_load_statement( + self, + statement: models.SelectStatement[models.BaseModelT] | None = None, + *targets: types.LazyLoaded, + ) -> models.SelectStatement[models.BaseModelT]: + """Get statement which will load related models separately.""" + if statement is not None: + select_statement = statement + else: + select_statement = self.select_statement + for target in targets: + select_in_load = [] + if isinstance(target, collections.abc.Sequence): + select_in_load.append(sqlalchemy.orm.selectinload(*target)) + else: + select_in_load.append(sqlalchemy.orm.selectinload(target)) + select_statement = select_statement.options( + *select_in_load, + ) + return select_statement + + def get_fetch_statement( + self, + statement: models.SelectStatement[models.BaseModelT] | None = None, + offset: int | None = None, + limit: int | None = None, + joined_load: types.LazyLoadedSequence = (), + select_in_load: types.LazyLoadedSequence = (), + annotations: types.AnnotationSequence = (), + clauses: ordering.OrderingClausesT = (), + where: filters.WhereFilters = (), + **filters_by: dict[str, typing.Any], + ) -> models.SelectStatement[models.BaseModelT]: + """Prepare statement for fetching.""" + statement = self.get_joined_load_statement( + statement, + *joined_load, + ) + statement = self.get_select_in_load_statement( + statement, + *select_in_load, + ) + statement = self.get_annotated_statement( + statement, + *annotations, + ) + statement = self.get_order_statement( + statement, + *clauses, + ) + statement = self.get_filter_statement( + statement, + *where, + **filters_by, + ) + statement = self.get_pagination_statement( + statement, + offset=offset, + limit=limit, + ) + return statement + + async def fetch( + self, + statement: models.SelectStatement[models.BaseModelT] | None = None, + offset: int | None = None, + limit: int | None = None, + joined_load: types.LazyLoadedSequence = (), + select_in_load: types.LazyLoadedSequence = (), + annotations: types.AnnotationSequence = (), + clauses: ordering.OrderingClausesT = (), + where: filters.WhereFilters = (), + **filters_by: dict[str, typing.Any], + ) -> sqlalchemy.ScalarResult[models.BaseModelT]: + """Fetch entries.""" + return await self._db_session.scalars( + statement=self.get_fetch_statement( + statement=statement, + offset=offset, + limit=limit, + joined_load=joined_load, + select_in_load=select_in_load, + annotations=annotations, + clauses=clauses, + where=where, + **filters_by, + ), + ) + + async def count( + self, + where: filters.WhereFilters = (), + **filters_by: dict[str, typing.Any], + ) -> int: + """Get count of entries.""" + return ( + await self._db_session.scalar( + sqlalchemy.select(sqlalchemy.func.count()) + .select_from(self.model) + .where(*self.process_where_filters(*where)) + .filter_by(**filters_by), + ) + ) or 0 + + async def exists( + self, + where: filters.WhereFilters = (), + **filters_by: dict[str, typing.Any], + ) -> bool: + """Check existence of entries.""" + return ( + await self._db_session.scalar( + sqlalchemy.select( + sqlalchemy.sql.exists( + self.select_statement.where( + *self.process_where_filters(*where), + ).filter_by( + **filters_by, + ), + ), + ), + ) + ) or False + + +class BaseSoftDeleteRepository( + BaseRepository[models.BaseSoftDeleteModelT], +): + """Repository for model with soft delete feature.""" + + async def delete( + self, + instance: models.BaseSoftDeleteModelT, + ) -> None: + """Mark model as deleted model instance into db.""" + instance.deleted = datetime.datetime.now(datetime.UTC).replace( + tzinfo=None, + ) + await self.save(instance=instance) + + async def force_delete( + self, + instance: models.BaseSoftDeleteModelT, + ) -> None: + """Delete model for database.""" + await super().delete(instance=instance) diff --git a/saritasa_sqlalchemy_tools/repositories/filters.py b/saritasa_sqlalchemy_tools/repositories/filters.py new file mode 100644 index 0000000..63f77bc --- /dev/null +++ b/saritasa_sqlalchemy_tools/repositories/filters.py @@ -0,0 +1,147 @@ +import collections.abc +import dataclasses +import datetime +import typing + +import sqlalchemy +import sqlalchemy.dialects.postgresql +import sqlalchemy.orm + +from .. import models + +SQLWhereFilter = sqlalchemy.ColumnExpressionArgument[bool] +WhereFilter = typing.Union[SQLWhereFilter, "Filter"] +WhereFilters = collections.abc.Sequence[WhereFilter] +_FilterType: typing.TypeAlias = ( + str + | int + | bool + | list[str] + | list[int] + | list[models.FieldEnumT] + | list[None] + | sqlalchemy.dialects.postgresql.Range[typing.Any] + | datetime.date + | datetime.datetime + | collections.abc.Sequence[str] + | collections.abc.Sequence[int] + | collections.abc.Sequence[models.FieldEnumT] + | collections.abc.Sequence[None] + | collections.abc.Sequence[datetime.date] + | collections.abc.Sequence[datetime.datetime] + | None +) +FilterType: typing.TypeAlias = _FilterType[typing.Any] + + +@dataclasses.dataclass +class Filter: + """Define filter value.""" + + api_filter: str + value: FilterType + + def transform_filter( + self, + model: type[models.BaseModelT], + ) -> SQLWhereFilter: + """Transform filter valid for sqlalchemy.""" + field_name, filter_arg = self.api_filter.split("__") + if field_name in model.m2m_filters: + return self.transform_m2m_filter( + field_name=field_name, + filter_arg=filter_arg, + model=model, + value=self.value, + ) + return self.transform_simple_filter( + field_name=field_name, + filter_arg=filter_arg, + model=model, + value=self.value, + ) + + def transform_m2m_filter( + self, + field_name: str, + filter_arg: str, + model: type[models.BaseModelT], + value: FilterType, + ) -> SQLWhereFilter: + """Transform m2m filter for sqlalchemy.""" + m2m_config = model.m2m_filters[field_name] + m2m_model = getattr(model, m2m_config.relation_field).mapper.class_ + return sqlalchemy.and_( + self.transform_simple_filter( + m2m_config.filter_field, + filter_arg, + model=m2m_model, + value=value, + ), + getattr( + m2m_model, + m2m_config.match_field, + ) + == getattr( + model, + model.pk_field, + ), + ) + + def transform_simple_filter( + self, + field_name: str, + filter_arg: str, + model: type[models.BaseModelT], + value: FilterType, + ) -> SQLWhereFilter: + """Transform simple filter for sqlalchemy.""" + filter_args_mapping = { + "exact": "is_", + "in": "in_", + "overlaps": "overlaps", + "gt": "__gt__", + "gte": "__ge__", + "lt": "__lt__", + "lte": "__le__", + } + field: sqlalchemy.orm.attributes.InstrumentedAttribute[typing.Any] = ( + getattr( + model, + field_name, + ) + ) + filter_operator = getattr(field, filter_args_mapping[filter_arg])( + value, + ) + if ( + filter_arg + in ( + "gt", + "gte", + "lt", + "lte", + ) + and field.nullable + ): + filter_operator = sqlalchemy.or_( + filter_operator, + field.is_(None), + ) + return filter_operator + + +def transform_search_filter( + model: type[models.BaseModelT], + search_fields: collections.abc.Sequence[str], + value: FilterType, +) -> SQLWhereFilter: + """Prepare search filter sql alchemy.""" + search_filters = [ + sqlalchemy.cast(getattr(model, field), sqlalchemy.String).ilike( + f"%{str(value).strip()}%", + ) + for field in search_fields + if value + ] + return sqlalchemy.or_(*search_filters) diff --git a/saritasa_sqlalchemy_tools/repositories/ordering.py b/saritasa_sqlalchemy_tools/repositories/ordering.py new file mode 100644 index 0000000..fd61cea --- /dev/null +++ b/saritasa_sqlalchemy_tools/repositories/ordering.py @@ -0,0 +1,37 @@ +import collections.abc +import enum +import typing + +import sqlalchemy + + +class OrderingEnumMeta(enum.EnumMeta): + """Meta class for ordering enum.""" + + def __new__( # noqa: ANN204 + metacls, # noqa: N804 + cls, # noqa: ANN001 + bases, # noqa: ANN001 + classdict, # noqa: ANN001 + **kwds, + ): + """Extend enum with descending fields.""" + for name in list(classdict._member_names): + classdict[f"{name}_desc"] = f"-{classdict[name]}" + return super().__new__(metacls, cls, bases, classdict, **kwds) + + +class OrderingEnum(enum.StrEnum, metaclass=OrderingEnumMeta): + """Representation of ordering fields.""" + + @property + def db_clause(self) -> str | sqlalchemy.ColumnExpressionArgument[str]: + """Convert ordering value to sqlalchemy ordering clause.""" + if self.startswith("-"): + return sqlalchemy.desc(self[1:]) + return self + + +OrderingClausesT: typing.TypeAlias = collections.abc.Sequence[ + str | sqlalchemy.ColumnExpressionArgument[str] | OrderingEnum +] diff --git a/saritasa_sqlalchemy_tools/repositories/types.py b/saritasa_sqlalchemy_tools/repositories/types.py new file mode 100644 index 0000000..8e630fb --- /dev/null +++ b/saritasa_sqlalchemy_tools/repositories/types.py @@ -0,0 +1,30 @@ +import collections.abc +import typing + +import sqlalchemy.orm + +# For some reason mypy demands that orm.QueryableAttribute has two generic args +Annotation: typing.TypeAlias = ( + sqlalchemy.orm.QueryableAttribute[typing.Any] # type: ignore + | tuple[ + sqlalchemy.orm.QueryableAttribute[typing.Any], + sqlalchemy.ScalarSelect[typing.Any], + ] +) +AnnotationSequence: typing.TypeAlias = collections.abc.Sequence[Annotation] + +ComparisonOperator: typing.TypeAlias = collections.abc.Callable[ + [sqlalchemy.orm.InstrumentedAttribute[typing.Any], typing.Any], + sqlalchemy.ColumnExpressionArgument[bool], +] +LazyLoaded: typing.TypeAlias = ( + sqlalchemy.orm.InstrumentedAttribute[typing.Any] + | collections.abc.Sequence[ + sqlalchemy.orm.InstrumentedAttribute[typing.Any] + ] +) +LazyLoadedSequence: typing.TypeAlias = collections.abc.Sequence[LazyLoaded] +SubQueryReturnT = typing.TypeVar( + "SubQueryReturnT", + bound=typing.Any, +) diff --git a/saritasa_sqlalchemy_tools/session.py b/saritasa_sqlalchemy_tools/session.py new file mode 100644 index 0000000..3e047d8 --- /dev/null +++ b/saritasa_sqlalchemy_tools/session.py @@ -0,0 +1,162 @@ +import collections.abc +import contextlib +import typing + +import sqlalchemy +import sqlalchemy.event +import sqlalchemy.ext.asyncio +import sqlalchemy.orm + +Session: typing.TypeAlias = sqlalchemy.ext.asyncio.AsyncSession +SessionFactory: typing.TypeAlias = collections.abc.Callable[ + [], + collections.abc.AsyncIterator[Session], +] +SessionOnConnect = collections.abc.Callable[..., None] + + +def get_async_engine( + drivername: str, + username: str, + password: str, + host: str, + port: int, + database: str, + echo: bool = False, + on_connect: collections.abc.Sequence[SessionOnConnect] = (), + **query, +) -> sqlalchemy.ext.asyncio.AsyncEngine: + """Set up engine for working with database.""" + db_engine = sqlalchemy.ext.asyncio.create_async_engine( + sqlalchemy.engine.URL( + drivername=drivername, + username=username, + password=password, + host=host, + port=port, + database=database, + query=query, # type: ignore + ), + echo=echo, + ) + for on_connect_func in on_connect: + sqlalchemy.event.listens_for( + target=db_engine.sync_engine, + identifier="connect", + insert=True, + )(on_connect_func) + return db_engine + + +def get_async_session_factory( + drivername: str, + username: str, + password: str, + host: str, + port: int, + database: str, + echo: bool = False, + on_connect: collections.abc.Sequence[SessionOnConnect] = (), + autocommit: bool = False, + # The Session.commit() operation unconditionally issues Session.flush() + # before emitting COMMIT on relevant database connections. + # If no pending changes are detected, then no SQL is emitted to the + # database. This behavior is not configurable and is not affected by + # the Session.autoflush parameter. + autoflush: bool = False, + expire_on_commit: bool = False, + **query, +) -> sqlalchemy.ext.asyncio.async_sessionmaker[ + sqlalchemy.ext.asyncio.AsyncSession +]: + """Set up session factory.""" + return sqlalchemy.ext.asyncio.async_sessionmaker( + bind=get_async_engine( + drivername=drivername, + username=username, + password=password, + host=host, + port=port, + database=database, + on_connect=on_connect, + echo=echo, + **query, + ), + autocommit=autocommit, + autoflush=autoflush, + expire_on_commit=expire_on_commit, + ) + + +async def get_async_db_session( + drivername: str, + username: str, + password: str, + host: str, + port: int, + database: str, + echo: bool = False, + on_connect: collections.abc.Sequence[SessionOnConnect] = (), + autocommit: bool = False, + autoflush: bool = False, + expire_on_commit: bool = False, + **query, +) -> collections.abc.AsyncIterator[Session]: + """Set up and get db session.""" + async with get_async_session_factory( + drivername=drivername, + username=username, + password=password, + host=host, + port=port, + database=database, + on_connect=on_connect, + echo=echo, + autocommit=autocommit, + autoflush=autoflush, + expire_on_commit=expire_on_commit, + **query, + )() as session: + try: + yield session + except Exception as error: + await session.rollback() + raise error + else: + await session.commit() + + +@contextlib.asynccontextmanager +async def get_async_db_session_context( + drivername: str, + username: str, + password: str, + host: str, + port: int, + database: str, + echo: bool = False, + on_connect: collections.abc.Sequence[SessionOnConnect] = (), + autocommit: bool = False, + autoflush: bool = False, + expire_on_commit: bool = False, + **query, +) -> collections.abc.AsyncIterator[Session]: + """Init db session.""" + db_iterator = get_async_db_session( + drivername=drivername, + username=username, + password=password, + host=host, + port=port, + database=database, + on_connect=on_connect, + echo=echo, + autocommit=autocommit, + autoflush=autoflush, + expire_on_commit=expire_on_commit, + **query, + ) + try: + yield await anext(db_iterator) # type: ignore + finally: + await anext(db_iterator, None) diff --git a/saritasa_sqlalchemy_tools/testing/__init__.py b/saritasa_sqlalchemy_tools/testing/__init__.py new file mode 100644 index 0000000..7a80ddc --- /dev/null +++ b/saritasa_sqlalchemy_tools/testing/__init__.py @@ -0,0 +1 @@ +from .factories import AsyncSQLAlchemyModelFactory, AsyncSQLAlchemyOptions diff --git a/saritasa_sqlalchemy_tools/testing/factories.py b/saritasa_sqlalchemy_tools/testing/factories.py new file mode 100644 index 0000000..020c283 --- /dev/null +++ b/saritasa_sqlalchemy_tools/testing/factories.py @@ -0,0 +1,109 @@ +import collections.abc +import importlib +import typing + +import factory + +from .. import models, repositories, session + + +class AsyncSQLAlchemyOptions(factory.alchemy.SQLAlchemyOptions): + """Meta options for AsyncSQLAlchemyModelFactory.""" + + def _build_default_options(self) -> list[factory.base.OptionDefault]: + return [ + *super()._build_default_options(), + factory.base.OptionDefault("repository", None, inherit=True), + factory.base.OptionDefault("sub_factories", {}, inherit=True), + ] + + +class AsyncSQLAlchemyModelFactory( + factory.alchemy.SQLAlchemyModelFactory, + typing.Generic[models.BaseModelT], +): + """Factory with ability to create instances asynchronously.""" + + _options_class = AsyncSQLAlchemyOptions + + @classmethod + async def create_async( + cls, + session: session.Session, + **kwargs, + ) -> models.BaseModelT: + """Create instance in database.""" + kwargs.update( + **await cls._async_run_sub_factories( + session=session, + passed_fields=list(kwargs.keys()), + ), + ) + instance: models.BaseModelT = cls.build(**kwargs) + repository_class: ( + type[repositories.BaseRepository[models.BaseModelT,]] | None + ) = getattr( + cls._meta, + "repository", + None, + ) + if not repository_class: + raise ValueError("Repository class in not set in Meta class") + repository = repository_class(db_session=session) + + pk_attr: str = instance.pk_field + await repository.save(instance=instance) + instance_from_db = ( + await repository.fetch( + **{ + pk_attr: getattr(instance, pk_attr), + }, + ) + ).first() + if not instance_from_db: + raise ValueError("Created instance wasn't found in database") + return instance_from_db + + @classmethod + async def create_batch_async( + cls, + session: session.Session, + size: int, + **kwargs, + ) -> list[models.BaseModelT]: + """Create several instances.""" + instances: list[models.BaseModelT] = [] + for _ in range(size): + instances.append( + await cls.create_async( + session=session, + **kwargs, + ), + ) + return instances + + @classmethod + async def _async_run_sub_factories( + cls, + session: session.Session, + passed_fields: collections.abc.Sequence[str], + ) -> dict[str, models.BaseModel]: + """Generate objects from sub factories.""" + sub_factories_map: dict[str, str] = getattr( + cls._meta, + "sub_factories", + {}, + ) + generated_instances: dict[str, models.BaseModel] = {} + for field, sub_factory_path in sub_factories_map.items(): + if field in passed_fields or f"{field}_id" in passed_fields: + continue + *factory_module, sub_factory_name = sub_factory_path.split(".") + sub_factory: typing.Self = getattr( + importlib.import_module(".".join(factory_module)), + sub_factory_name, + ) + generated_instances[field] = await sub_factory.create_async( + session=session, + ) + return generated_instances diff --git a/tasks.py b/tasks.py new file mode 100644 index 0000000..072016e --- /dev/null +++ b/tasks.py @@ -0,0 +1,33 @@ +import invoke +import saritasa_invocations + +import invocations + +ns = invoke.Collection( # type: ignore + invocations.project, + saritasa_invocations.docker, + saritasa_invocations.git, + saritasa_invocations.github_actions, + saritasa_invocations.pre_commit, + saritasa_invocations.python, + saritasa_invocations.system, + saritasa_invocations.poetry, + saritasa_invocations.mypy, + saritasa_invocations.pytest, +) + +# Configurations for run command +ns.configure( + { + "run": { + "pty": True, + "echo": True, + }, + "saritasa_invocations": saritasa_invocations.Config( + project_name="saritasa-sqlalchemy-tools", + docker=saritasa_invocations.DockerSettings( + main_containers=("postgres",), + ), + ), + }, +)