From cd1a72ba339520c140d77bc793bd7177181edba1 Mon Sep 17 00:00:00 2001 From: Erik Date: Thu, 5 Sep 2024 22:25:35 -0700 Subject: [PATCH 01/70] Initialize new fastapi project --- api-v2/.env.example | 11 + api-v2/.gitignore | 4 + api-v2/README.md | 54 ++ api-v2/main.py | 20 + api-v2/poetry.lock | 1417 +++++++++++++++++++++++++++++++++++++++++ api-v2/pyproject.toml | 22 + 6 files changed, 1528 insertions(+) create mode 100644 api-v2/.env.example create mode 100644 api-v2/.gitignore create mode 100644 api-v2/README.md create mode 100644 api-v2/main.py create mode 100644 api-v2/poetry.lock create mode 100644 api-v2/pyproject.toml diff --git a/api-v2/.env.example b/api-v2/.env.example new file mode 100644 index 00000000..3d825e08 --- /dev/null +++ b/api-v2/.env.example @@ -0,0 +1,11 @@ +COGNITO_CLIENT_ID= +COGNITO_CLIENT_SECRET= +COGNITO_REGION= +COGNITO_REDIRECT_URI=http://localhost:4040/signin +COGNITO_USER_POOL_ID= +COGNITO_ACCESS_ID= +COGNITO_ACCESS_KEY= +SECRET_KEY= +CONFIG_PROFILE="personal" +ROOT_URL=http://localhost:4040 +DATABASE_URL=sqlite:///./homeuniteus.db \ No newline at end of file diff --git a/api-v2/.gitignore b/api-v2/.gitignore new file mode 100644 index 00000000..e928de15 --- /dev/null +++ b/api-v2/.gitignore @@ -0,0 +1,4 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class \ No newline at end of file diff --git a/api-v2/README.md b/api-v2/README.md new file mode 100644 index 00000000..b31147c0 --- /dev/null +++ b/api-v2/README.md @@ -0,0 +1,54 @@ +# Home Unite Us OpenAPI Server + +## Overview + +This is the _Home Unite Us_ web API server. + +This server uses: + +- [FastAPI](https://fastapi.tiangolo.com/). +- [SQLAlchemy](https://www.sqlalchemy.org/). +- [Pydantic](https://docs.pydantic.dev/latest/). +- [Poetry](https://python-poetry.org/docs/). + +## Requirements + +Python >= 3.12 + +Run `python -V` to check the Python version. + +**Note**: On some systems, you might need to use the `python3` and `pip3` commands. + +[Poetry](https://python-poetry.org/docs/#installation) is used to manage the project dependencies. Install the CLI globally. + +## Usage - Development + +### Getting Started + +#### Configuration + +The API application configuration must be specified before running the application. Configuration variables can be specified either as environment variables, or as entries within a `.env` file located within the `api` directory. To get started, copy the values from one of these configurations into a `.env` file: + +#### Setup and Run + +Once the `.env` file has been configured using the instructions outlined above and Poetry is installed, run the following commands in the `api` directory to install the required development dependencies and run the application. + +```shell +poetry install # Installs all dependencies + +poetry shell # Activates the virtual environment + +poetry run fastapi dev main.py # Runs this server in developer mode +``` + +Your server is now running at: + +``` +http://127.0.0.1:8000 +``` + +And your API docs at: + +``` +http://127.0.0.1:8000/docs +``` diff --git a/api-v2/main.py b/api-v2/main.py new file mode 100644 index 00000000..ce586810 --- /dev/null +++ b/api-v2/main.py @@ -0,0 +1,20 @@ +from fastapi import FastAPI +from contextlib import asynccontextmanager + + +# from api.main import api_router +# from core.config import settings +# from seed import init_db + + +# @asynccontextmanager +# async def lifespan(app: FastAPI): +# init_db() +# yield + + +# app = FastAPI(lifespan=lifespan) +app = FastAPI() + + +# app.include_router(api_router, prefix="/api") diff --git a/api-v2/poetry.lock b/api-v2/poetry.lock new file mode 100644 index 00000000..1d427f96 --- /dev/null +++ b/api-v2/poetry.lock @@ -0,0 +1,1417 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.4.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "boto3" +version = "1.35.13" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.35.13-py3-none-any.whl", hash = "sha256:6e220eae161a4c0ed21e2561edcb0fd9603fa621692c50bc099db318ed3e3ad4"}, + {file = "boto3-1.35.13.tar.gz", hash = "sha256:4af17bd7bada591ddaa835d774b242705210e5d45133e25bd73417daa42e53e7"}, +] + +[package.dependencies] +botocore = ">=1.35.13,<1.36.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.35.13" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.35.13-py3-none-any.whl", hash = "sha256:dd8a8bb1946187c8eb902a3b856d3b24df63917e4f2c61a6bce7f3ea9f112761"}, + {file = "botocore-1.35.13.tar.gz", hash = "sha256:f7ae62eab44d731a5ad8917788378316c79c7bceb530a8307ed0f3bca7037341"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.21.2)"] + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "cryptography" +version = "43.0.1" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, + {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, + {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, + {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, + {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, + {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, + {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "dnspython" +version = "2.6.1" +description = "DNS toolkit" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, + {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=0.9.25)"] +idna = ["idna (>=3.6)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "email-validator" +version = "2.2.0" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + +[[package]] +name = "fastapi" +version = "0.113.0" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi-0.113.0-py3-none-any.whl", hash = "sha256:c8d364485b6361fb643d53920a18d58a696e189abcb901ec03b487e35774c476"}, + {file = "fastapi-0.113.0.tar.gz", hash = "sha256:b7cf9684dc154dfc93f8b718e5850577b529889096518df44defa41e73caf50f"}, +] + +[package.dependencies] +email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"standard\""} +fastapi-cli = {version = ">=0.0.5", extras = ["standard"], optional = true, markers = "extra == \"standard\""} +httpx = {version = ">=0.23.0", optional = true, markers = "extra == \"standard\""} +jinja2 = {version = ">=2.11.2", optional = true, markers = "extra == \"standard\""} +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +python-multipart = {version = ">=0.0.7", optional = true, markers = "extra == \"standard\""} +starlette = ">=0.37.2,<0.39.0" +typing-extensions = ">=4.8.0" +uvicorn = {version = ">=0.12.0", extras = ["standard"], optional = true, markers = "extra == \"standard\""} + +[package.extras] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=2.11.2)", "python-multipart (>=0.0.7)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "fastapi-cli" +version = "0.0.5" +description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi_cli-0.0.5-py3-none-any.whl", hash = "sha256:e94d847524648c748a5350673546bbf9bcaeb086b33c24f2e82e021436866a46"}, + {file = "fastapi_cli-0.0.5.tar.gz", hash = "sha256:d30e1239c6f46fcb95e606f02cdda59a1e2fa778a54b64686b3ff27f6211ff9f"}, +] + +[package.dependencies] +typer = ">=0.12.3" +uvicorn = {version = ">=0.15.0", extras = ["standard"]} + +[package.extras] +standard = ["uvicorn[standard] (>=0.15.0)"] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httptools" +version = "0.6.1" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, + {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, + {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, + {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, + {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, + {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, + {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, +] + +[package.extras] +test = ["Cython (>=0.29.24,<0.30.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "idna" +version = "3.8" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "2.9.0" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.9.0-py3-none-any.whl", hash = "sha256:f66a7073abd93214a20c5f7b32d56843137a7a2e70d02111f3be287035c45370"}, + {file = "pydantic-2.9.0.tar.gz", hash = "sha256:c7a8a9fdf7d100afa49647eae340e2d23efa382466a8d177efcd1381e9be5598"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.23.2" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] +tzdata = {version = "*", markers = "python_version >= \"3.9\""} + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.23.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.23.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7d0324a35ab436c9d768753cbc3c47a865a2cbc0757066cb864747baa61f6ece"}, + {file = "pydantic_core-2.23.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:276ae78153a94b664e700ac362587c73b84399bd1145e135287513442e7dfbc7"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:964c7aa318da542cdcc60d4a648377ffe1a2ef0eb1e996026c7f74507b720a78"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1cf842265a3a820ebc6388b963ead065f5ce8f2068ac4e1c713ef77a67b71f7c"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae90b9e50fe1bd115b24785e962b51130340408156d34d67b5f8f3fa6540938e"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ae65fdfb8a841556b52935dfd4c3f79132dc5253b12c0061b96415208f4d622"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c8aa40f6ca803f95b1c1c5aeaee6237b9e879e4dfb46ad713229a63651a95fb"}, + {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c53100c8ee5a1e102766abde2158077d8c374bee0639201f11d3032e3555dfbc"}, + {file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6b9dd6aa03c812017411734e496c44fef29b43dba1e3dd1fa7361bbacfc1354"}, + {file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b18cf68255a476b927910c6873d9ed00da692bb293c5b10b282bd48a0afe3ae2"}, + {file = "pydantic_core-2.23.2-cp310-none-win32.whl", hash = "sha256:e460475719721d59cd54a350c1f71c797c763212c836bf48585478c5514d2854"}, + {file = "pydantic_core-2.23.2-cp310-none-win_amd64.whl", hash = "sha256:5f3cf3721eaf8741cffaf092487f1ca80831202ce91672776b02b875580e174a"}, + {file = "pydantic_core-2.23.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:7ce8e26b86a91e305858e018afc7a6e932f17428b1eaa60154bd1f7ee888b5f8"}, + {file = "pydantic_core-2.23.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e9b24cca4037a561422bf5dc52b38d390fb61f7bfff64053ce1b72f6938e6b2"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:753294d42fb072aa1775bfe1a2ba1012427376718fa4c72de52005a3d2a22178"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:257d6a410a0d8aeb50b4283dea39bb79b14303e0fab0f2b9d617701331ed1515"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8319e0bd6a7b45ad76166cc3d5d6a36c97d0c82a196f478c3ee5346566eebfd"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a05c0240f6c711eb381ac392de987ee974fa9336071fb697768dfdb151345ce"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d5b0ff3218858859910295df6953d7bafac3a48d5cd18f4e3ed9999efd2245f"}, + {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:96ef39add33ff58cd4c112cbac076726b96b98bb8f1e7f7595288dcfb2f10b57"}, + {file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0102e49ac7d2df3379ef8d658d3bc59d3d769b0bdb17da189b75efa861fc07b4"}, + {file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6612c2a844043e4d10a8324c54cdff0042c558eef30bd705770793d70b224aa"}, + {file = "pydantic_core-2.23.2-cp311-none-win32.whl", hash = "sha256:caffda619099cfd4f63d48462f6aadbecee3ad9603b4b88b60cb821c1b258576"}, + {file = "pydantic_core-2.23.2-cp311-none-win_amd64.whl", hash = "sha256:6f80fba4af0cb1d2344869d56430e304a51396b70d46b91a55ed4959993c0589"}, + {file = "pydantic_core-2.23.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4c83c64d05ffbbe12d4e8498ab72bdb05bcc1026340a4a597dc647a13c1605ec"}, + {file = "pydantic_core-2.23.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6294907eaaccf71c076abdd1c7954e272efa39bb043161b4b8aa1cd76a16ce43"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a801c5e1e13272e0909c520708122496647d1279d252c9e6e07dac216accc41"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cc0c316fba3ce72ac3ab7902a888b9dc4979162d320823679da270c2d9ad0cad"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b06c5d4e8701ac2ba99a2ef835e4e1b187d41095a9c619c5b185c9068ed2a49"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82764c0bd697159fe9947ad59b6db6d7329e88505c8f98990eb07e84cc0a5d81"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b1a195efd347ede8bcf723e932300292eb13a9d2a3c1f84eb8f37cbbc905b7f"}, + {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7efb12e5071ad8d5b547487bdad489fbd4a5a35a0fc36a1941517a6ad7f23e0"}, + {file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5dd0ec5f514ed40e49bf961d49cf1bc2c72e9b50f29a163b2cc9030c6742aa73"}, + {file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:820f6ee5c06bc868335e3b6e42d7ef41f50dfb3ea32fbd523ab679d10d8741c0"}, + {file = "pydantic_core-2.23.2-cp312-none-win32.whl", hash = "sha256:3713dc093d5048bfaedbba7a8dbc53e74c44a140d45ede020dc347dda18daf3f"}, + {file = "pydantic_core-2.23.2-cp312-none-win_amd64.whl", hash = "sha256:e1895e949f8849bc2757c0dbac28422a04be031204df46a56ab34bcf98507342"}, + {file = "pydantic_core-2.23.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:da43cbe593e3c87d07108d0ebd73771dc414488f1f91ed2e204b0370b94b37ac"}, + {file = "pydantic_core-2.23.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:64d094ea1aa97c6ded4748d40886076a931a8bf6f61b6e43e4a1041769c39dd2"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:084414ffe9a85a52940b49631321d636dadf3576c30259607b75516d131fecd0"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:043ef8469f72609c4c3a5e06a07a1f713d53df4d53112c6d49207c0bd3c3bd9b"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3649bd3ae6a8ebea7dc381afb7f3c6db237fc7cebd05c8ac36ca8a4187b03b30"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6db09153d8438425e98cdc9a289c5fade04a5d2128faff8f227c459da21b9703"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5668b3173bb0b2e65020b60d83f5910a7224027232c9f5dc05a71a1deac9f960"}, + {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c7b81beaf7c7ebde978377dc53679c6cba0e946426fc7ade54251dfe24a7604"}, + {file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:ae579143826c6f05a361d9546446c432a165ecf1c0b720bbfd81152645cb897d"}, + {file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:19f1352fe4b248cae22a89268720fc74e83f008057a652894f08fa931e77dced"}, + {file = "pydantic_core-2.23.2-cp313-none-win32.whl", hash = "sha256:e1a79ad49f346aa1a2921f31e8dbbab4d64484823e813a002679eaa46cba39e1"}, + {file = "pydantic_core-2.23.2-cp313-none-win_amd64.whl", hash = "sha256:582871902e1902b3c8e9b2c347f32a792a07094110c1bca6c2ea89b90150caac"}, + {file = "pydantic_core-2.23.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:743e5811b0c377eb830150d675b0847a74a44d4ad5ab8845923d5b3a756d8100"}, + {file = "pydantic_core-2.23.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6650a7bbe17a2717167e3e23c186849bae5cef35d38949549f1c116031b2b3aa"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56e6a12ec8d7679f41b3750ffa426d22b44ef97be226a9bab00a03365f217b2b"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810ca06cca91de9107718dc83d9ac4d2e86efd6c02cba49a190abcaf33fb0472"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:785e7f517ebb9890813d31cb5d328fa5eda825bb205065cde760b3150e4de1f7"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ef71ec876fcc4d3bbf2ae81961959e8d62f8d74a83d116668409c224012e3af"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50ac34835c6a4a0d456b5db559b82047403c4317b3bc73b3455fefdbdc54b0a"}, + {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16b25a4a120a2bb7dab51b81e3d9f3cde4f9a4456566c403ed29ac81bf49744f"}, + {file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:41ae8537ad371ec018e3c5da0eb3f3e40ee1011eb9be1da7f965357c4623c501"}, + {file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07049ec9306ec64e955b2e7c40c8d77dd78ea89adb97a2013d0b6e055c5ee4c5"}, + {file = "pydantic_core-2.23.2-cp38-none-win32.whl", hash = "sha256:086c5db95157dc84c63ff9d96ebb8856f47ce113c86b61065a066f8efbe80acf"}, + {file = "pydantic_core-2.23.2-cp38-none-win_amd64.whl", hash = "sha256:67b6655311b00581914aba481729971b88bb8bc7996206590700a3ac85e457b8"}, + {file = "pydantic_core-2.23.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:358331e21a897151e54d58e08d0219acf98ebb14c567267a87e971f3d2a3be59"}, + {file = "pydantic_core-2.23.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d9f15ffe68bcd3898b0ad7233af01b15c57d91cd1667f8d868e0eacbfe3f87"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0123655fedacf035ab10c23450163c2f65a4174f2bb034b188240a6cf06bb123"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6e3ccebdbd6e53474b0bb7ab8b88e83c0cfe91484b25e058e581348ee5a01a5"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc535cb898ef88333cf317777ecdfe0faac1c2a3187ef7eb061b6f7ecf7e6bae"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aab9e522efff3993a9e98ab14263d4e20211e62da088298089a03056980a3e69"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05b366fb8fe3d8683b11ac35fa08947d7b92be78ec64e3277d03bd7f9b7cda79"}, + {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7568f682c06f10f30ef643a1e8eec4afeecdafde5c4af1b574c6df079e96f96c"}, + {file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cdd02a08205dc90238669f082747612cb3c82bd2c717adc60f9b9ecadb540f80"}, + {file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a2ab4f410f4b886de53b6bddf5dd6f337915a29dd9f22f20f3099659536b2f6"}, + {file = "pydantic_core-2.23.2-cp39-none-win32.whl", hash = "sha256:0448b81c3dfcde439551bb04a9f41d7627f676b12701865c8a2574bcea034437"}, + {file = "pydantic_core-2.23.2-cp39-none-win_amd64.whl", hash = "sha256:4cebb9794f67266d65e7e4cbe5dcf063e29fc7b81c79dc9475bd476d9534150e"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e758d271ed0286d146cf7c04c539a5169a888dd0b57026be621547e756af55bc"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f477d26183e94eaafc60b983ab25af2a809a1b48ce4debb57b343f671b7a90b6"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da3131ef2b940b99106f29dfbc30d9505643f766704e14c5d5e504e6a480c35e"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329a721253c7e4cbd7aad4a377745fbcc0607f9d72a3cc2102dd40519be75ed2"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7706e15cdbf42f8fab1e6425247dfa98f4a6f8c63746c995d6a2017f78e619ae"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e64ffaf8f6e17ca15eb48344d86a7a741454526f3a3fa56bc493ad9d7ec63936"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dd59638025160056687d598b054b64a79183f8065eae0d3f5ca523cde9943940"}, + {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:12625e69b1199e94b0ae1c9a95d000484ce9f0182f9965a26572f054b1537e44"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d813fd871b3d5c3005157622ee102e8908ad6011ec915a18bd8fde673c4360e"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1eb37f7d6a8001c0f86dc8ff2ee8d08291a536d76e49e78cda8587bb54d8b329"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce7eaf9a98680b4312b7cebcdd9352531c43db00fca586115845df388f3c465"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f087879f1ffde024dd2788a30d55acd67959dcf6c431e9d3682d1c491a0eb474"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ce883906810b4c3bd90e0ada1f9e808d9ecf1c5f0b60c6b8831d6100bcc7dd6"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a8031074a397a5925d06b590121f8339d34a5a74cfe6970f8a1124eb8b83f4ac"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23af245b8f2f4ee9e2c99cb3f93d0e22fb5c16df3f2f643f5a8da5caff12a653"}, + {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c57e493a0faea1e4c38f860d6862ba6832723396c884fbf938ff5e9b224200e2"}, + {file = "pydantic_core-2.23.2.tar.gz", hash = "sha256:95d6bf449a1ac81de562d65d180af5d8c19672793c81877a2eda8fde5d08f2fd"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-settings" +version = "2.4.0" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"}, + {file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"}, +] + +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" + +[package.extras] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyjwt" +version = "2.9.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-multipart" +version = "0.0.9" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"}, + {file = "python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026"}, +] + +[package.extras] +dev = ["atomicwrites (==1.4.1)", "attrs (==23.2.0)", "coverage (==7.4.1)", "hatch", "invoke (==2.2.0)", "more-itertools (==10.2.0)", "pbr (==6.0.0)", "pluggy (==1.4.0)", "py (==1.11.0)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.2.0)", "pyyaml (==6.0.1)", "ruff (==0.2.1)"] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "rich" +version = "13.8.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.8.0-py3-none-any.whl", hash = "sha256:2e85306a063b9492dffc86278197a60cbece75bcb766022f3436f567cae11bdc"}, + {file = "rich-13.8.0.tar.gz", hash = "sha256:a5ac1f1cd448ade0d59cc3356f7db7a7ccda2c8cbae9c7a90c28ff463d3e91f4"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "s3transfer" +version = "0.10.2" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">=3.8" +files = [ + {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, + {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.34" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:95d0b2cf8791ab5fb9e3aa3d9a79a0d5d51f55b6357eecf532a120ba3b5524db"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:243f92596f4fd4c8bd30ab8e8dd5965afe226363d75cab2468f2c707f64cd83b"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ea54f7300553af0a2a7235e9b85f4204e1fc21848f917a3213b0e0818de9a24"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:173f5f122d2e1bff8fbd9f7811b7942bead1f5e9f371cdf9e670b327e6703ebd"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:196958cde924a00488e3e83ff917be3b73cd4ed8352bbc0f2989333176d1c54d"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd90c221ed4e60ac9d476db967f436cfcecbd4ef744537c0f2d5291439848768"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-win32.whl", hash = "sha256:3166dfff2d16fe9be3241ee60ece6fcb01cf8e74dd7c5e0b64f8e19fab44911b"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-win_amd64.whl", hash = "sha256:6831a78bbd3c40f909b3e5233f87341f12d0b34a58f14115c9e94b4cdaf726d3"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7db3db284a0edaebe87f8f6642c2b2c27ed85c3e70064b84d1c9e4ec06d5d84"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:430093fce0efc7941d911d34f75a70084f12f6ca5c15d19595c18753edb7c33b"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79cb400c360c7c210097b147c16a9e4c14688a6402445ac848f296ade6283bbc"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1b30f31a36c7f3fee848391ff77eebdd3af5750bf95fbf9b8b5323edfdb4ec"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fddde2368e777ea2a4891a3fb4341e910a056be0bb15303bf1b92f073b80c02"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80bd73ea335203b125cf1d8e50fef06be709619eb6ab9e7b891ea34b5baa2287"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-win32.whl", hash = "sha256:6daeb8382d0df526372abd9cb795c992e18eed25ef2c43afe518c73f8cccb721"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-win_amd64.whl", hash = "sha256:5bc08e75ed11693ecb648b7a0a4ed80da6d10845e44be0c98c03f2f880b68ff4"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:53e68b091492c8ed2bd0141e00ad3089bcc6bf0e6ec4142ad6505b4afe64163e"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bcd18441a49499bf5528deaa9dee1f5c01ca491fc2791b13604e8f972877f812"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:165bbe0b376541092bf49542bd9827b048357f4623486096fc9aaa6d4e7c59a2"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3330415cd387d2b88600e8e26b510d0370db9b7eaf984354a43e19c40df2e2b"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97b850f73f8abbffb66ccbab6e55a195a0eb655e5dc74624d15cff4bfb35bd74"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee4c6917857fd6121ed84f56d1dc78eb1d0e87f845ab5a568aba73e78adf83"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-win32.whl", hash = "sha256:fbb034f565ecbe6c530dff948239377ba859420d146d5f62f0271407ffb8c580"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-win_amd64.whl", hash = "sha256:707c8f44931a4facd4149b52b75b80544a8d824162602b8cd2fe788207307f9a"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:24af3dc43568f3780b7e1e57c49b41d98b2d940c1fd2e62d65d3928b6f95f021"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60ed6ef0a35c6b76b7640fe452d0e47acc832ccbb8475de549a5cc5f90c2c06"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:413c85cd0177c23e32dee6898c67a5f49296640041d98fddb2c40888fe4daa2e"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:25691f4adfb9d5e796fd48bf1432272f95f4bbe5f89c475a788f31232ea6afba"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:526ce723265643dbc4c7efb54f56648cc30e7abe20f387d763364b3ce7506c82"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-win32.whl", hash = "sha256:13be2cc683b76977a700948411a94c67ad8faf542fa7da2a4b167f2244781cf3"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-win_amd64.whl", hash = "sha256:e54ef33ea80d464c3dcfe881eb00ad5921b60f8115ea1a30d781653edc2fd6a2"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:43f28005141165edd11fbbf1541c920bd29e167b8bbc1fb410d4fe2269c1667a"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b68094b165a9e930aedef90725a8fcfafe9ef95370cbb54abc0464062dbf808f"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1e03db964e9d32f112bae36f0cc1dcd1988d096cfd75d6a588a3c3def9ab2b"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:203d46bddeaa7982f9c3cc693e5bc93db476ab5de9d4b4640d5c99ff219bee8c"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ae92bebca3b1e6bd203494e5ef919a60fb6dfe4d9a47ed2453211d3bd451b9f5"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9661268415f450c95f72f0ac1217cc6f10256f860eed85c2ae32e75b60278ad8"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-win32.whl", hash = "sha256:895184dfef8708e15f7516bd930bda7e50ead069280d2ce09ba11781b630a434"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-win_amd64.whl", hash = "sha256:6e7cde3a2221aa89247944cafb1b26616380e30c63e37ed19ff0bba5e968688d"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbcdf987f3aceef9763b6d7b1fd3e4ee210ddd26cac421d78b3c206d07b2700b"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce119fc4ce0d64124d37f66a6f2a584fddc3c5001755f8a49f1ca0a177ef9796"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a17d8fac6df9835d8e2b4c5523666e7051d0897a93756518a1fe101c7f47f2f0"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ebc11c54c6ecdd07bb4efbfa1554538982f5432dfb8456958b6d46b9f834bb7"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e6965346fc1491a566e019a4a1d3dfc081ce7ac1a736536367ca305da6472a8"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:220574e78ad986aea8e81ac68821e47ea9202b7e44f251b7ed8c66d9ae3f4278"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-win32.whl", hash = "sha256:b75b00083e7fe6621ce13cfce9d4469c4774e55e8e9d38c305b37f13cf1e874c"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-win_amd64.whl", hash = "sha256:c29d03e0adf3cc1a8c3ec62d176824972ae29b67a66cbb18daff3062acc6faa8"}, + {file = "SQLAlchemy-2.0.34-py3-none-any.whl", hash = "sha256:7286c353ee6475613d8beff83167374006c6b3e3f0e6491bfe8ca610eb1dec0f"}, + {file = "sqlalchemy-2.0.34.tar.gz", hash = "sha256:10d8f36990dd929690666679b0f42235c159a7051534adb135728ee52828dd22"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "starlette" +version = "0.38.4" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.8" +files = [ + {file = "starlette-0.38.4-py3-none-any.whl", hash = "sha256:526f53a77f0e43b85f583438aee1a940fd84f8fd610353e8b0c1a77ad8a87e76"}, + {file = "starlette-0.38.4.tar.gz", hash = "sha256:53a7439060304a208fea17ed407e998f46da5e5d9b1addfea3040094512a6379"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] + +[[package]] +name = "typer" +version = "0.12.5" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.7" +files = [ + {file = "typer-0.12.5-py3-none-any.whl", hash = "sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b"}, + {file = "typer-0.12.5.tar.gz", hash = "sha256:f592f089bedcc8ec1b974125d64851029c3b1af145f04aca64d69410f0c9b722"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvicorn" +version = "0.30.6" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, + {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.20.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9ebafa0b96c62881d5cafa02d9da2e44c23f9f0cd829f3a32a6aff771449c996"}, + {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:35968fc697b0527a06e134999eef859b4034b37aebca537daeb598b9d45a137b"}, + {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b16696f10e59d7580979b420eedf6650010a4a9c3bd8113f24a103dfdb770b10"}, + {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b04d96188d365151d1af41fa2d23257b674e7ead68cfd61c725a422764062ae"}, + {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94707205efbe809dfa3a0d09c08bef1352f5d3d6612a506f10a319933757c006"}, + {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89e8d33bb88d7263f74dc57d69f0063e06b5a5ce50bb9a6b32f5fcbe655f9e73"}, + {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e50289c101495e0d1bb0bfcb4a60adde56e32f4449a67216a1ab2750aa84f037"}, + {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e237f9c1e8a00e7d9ddaa288e535dc337a39bcbf679f290aee9d26df9e72bce9"}, + {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:746242cd703dc2b37f9d8b9f173749c15e9a918ddb021575a0205ec29a38d31e"}, + {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82edbfd3df39fb3d108fc079ebc461330f7c2e33dbd002d146bf7c445ba6e756"}, + {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:80dc1b139516be2077b3e57ce1cb65bfed09149e1d175e0478e7a987863b68f0"}, + {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f44af67bf39af25db4c1ac27e82e9665717f9c26af2369c404be865c8818dcf"}, + {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4b75f2950ddb6feed85336412b9a0c310a2edbcf4cf931aa5cfe29034829676d"}, + {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:77fbc69c287596880ecec2d4c7a62346bef08b6209749bf6ce8c22bbaca0239e"}, + {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6462c95f48e2d8d4c993a2950cd3d31ab061864d1c226bbf0ee2f1a8f36674b9"}, + {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649c33034979273fa71aa25d0fe120ad1777c551d8c4cd2c0c9851d88fcb13ab"}, + {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a609780e942d43a275a617c0839d85f95c334bad29c4c0918252085113285b5"}, + {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aea15c78e0d9ad6555ed201344ae36db5c63d428818b4b2a42842b3870127c00"}, + {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0e94b221295b5e69de57a1bd4aeb0b3a29f61be6e1b478bb8a69a73377db7ba"}, + {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fee6044b64c965c425b65a4e17719953b96e065c5b7e09b599ff332bb2744bdf"}, + {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:265a99a2ff41a0fd56c19c3838b29bf54d1d177964c300dad388b27e84fd7847"}, + {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10c2956efcecb981bf9cfb8184d27d5d64b9033f917115a960b83f11bfa0d6b"}, + {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e7d61fe8e8d9335fac1bf8d5d82820b4808dd7a43020c149b63a1ada953d48a6"}, + {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2beee18efd33fa6fdb0976e18475a4042cd31c7433c866e8a09ab604c7c22ff2"}, + {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8c36fdf3e02cec92aed2d44f63565ad1522a499c654f07935c8f9d04db69e95"}, + {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0fac7be202596c7126146660725157d4813aa29a4cc990fe51346f75ff8fde7"}, + {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d0fba61846f294bce41eb44d60d58136090ea2b5b99efd21cbdf4e21927c56a"}, + {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95720bae002ac357202e0d866128eb1ac82545bcf0b549b9abe91b5178d9b541"}, + {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:36c530d8fa03bfa7085af54a48f2ca16ab74df3ec7108a46ba82fd8b411a2315"}, + {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e97152983442b499d7a71e44f29baa75b3b02e65d9c44ba53b10338e98dedb66"}, + {file = "uvloop-0.20.0.tar.gz", hash = "sha256:4603ca714a754fc8d9b197e325db25b2ea045385e8a3ad05d3463de725fdf469"}, +] + +[package.extras] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + +[[package]] +name = "watchfiles" +version = "0.24.0" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0"}, + {file = "watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82ae557a8c037c42a6ef26c494d0631cacca040934b101d001100ed93d43f361"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:acbfa31e315a8f14fe33e3542cbcafc55703b8f5dcbb7c1eecd30f141df50db3"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74fdffce9dfcf2dc296dec8743e5b0332d15df19ae464f0e249aa871fc1c571"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:449f43f49c8ddca87c6b3980c9284cab6bd1f5c9d9a2b00012adaaccd5e7decd"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf4ad269856618f82dee296ac66b0cd1d71450fc3c98532d93798e73399b7a"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f895d785eb6164678ff4bb5cc60c5996b3ee6df3edb28dcdeba86a13ea0465e"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ae3e208b31be8ce7f4c2c0034f33406dd24fbce3467f77223d10cd86778471c"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2efec17819b0046dde35d13fb8ac7a3ad877af41ae4640f4109d9154ed30a188"}, + {file = "watchfiles-0.24.0-cp310-none-win32.whl", hash = "sha256:6bdcfa3cd6fdbdd1a068a52820f46a815401cbc2cb187dd006cb076675e7b735"}, + {file = "watchfiles-0.24.0-cp310-none-win_amd64.whl", hash = "sha256:54ca90a9ae6597ae6dc00e7ed0a040ef723f84ec517d3e7ce13e63e4bc82fa04"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bdcd5538e27f188dd3c804b4a8d5f52a7fc7f87e7fd6b374b8e36a4ca03db428"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2dadf8a8014fde6addfd3c379e6ed1a981c8f0a48292d662e27cabfe4239c83c"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6509ed3f467b79d95fc62a98229f79b1a60d1b93f101e1c61d10c95a46a84f43"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8360f7314a070c30e4c976b183d1d8d1585a4a50c5cb603f431cebcbb4f66327"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:316449aefacf40147a9efaf3bd7c9bdd35aaba9ac5d708bd1eb5763c9a02bef5"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73bde715f940bea845a95247ea3e5eb17769ba1010efdc938ffcb967c634fa61"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3770e260b18e7f4e576edca4c0a639f704088602e0bc921c5c2e721e3acb8d15"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0fd7248cf533c259e59dc593a60973a73e881162b1a2f73360547132742823"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7a2e3b7f5703ffbd500dabdefcbc9eafeff4b9444bbdd5d83d79eedf8428fab"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d831ee0a50946d24a53821819b2327d5751b0c938b12c0653ea5be7dea9c82ec"}, + {file = "watchfiles-0.24.0-cp311-none-win32.whl", hash = "sha256:49d617df841a63b4445790a254013aea2120357ccacbed00253f9c2b5dc24e2d"}, + {file = "watchfiles-0.24.0-cp311-none-win_amd64.whl", hash = "sha256:d3dcb774e3568477275cc76554b5a565024b8ba3a0322f77c246bc7111c5bb9c"}, + {file = "watchfiles-0.24.0-cp311-none-win_arm64.whl", hash = "sha256:9301c689051a4857d5b10777da23fafb8e8e921bcf3abe6448a058d27fb67633"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7211b463695d1e995ca3feb38b69227e46dbd03947172585ecb0588f19b0d87a"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b8693502d1967b00f2fb82fc1e744df128ba22f530e15b763c8d82baee15370"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdab9555053399318b953a1fe1f586e945bc8d635ce9d05e617fd9fe3a4687d6"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34e19e56d68b0dad5cff62273107cf5d9fbaf9d75c46277aa5d803b3ef8a9e9b"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41face41f036fee09eba33a5b53a73e9a43d5cb2c53dad8e61fa6c9f91b5a51e"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5148c2f1ea043db13ce9b0c28456e18ecc8f14f41325aa624314095b6aa2e9ea"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e4bd963a935aaf40b625c2499f3f4f6bbd0c3776f6d3bc7c853d04824ff1c9f"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c79d7719d027b7a42817c5d96461a99b6a49979c143839fc37aa5748c322f234"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:32aa53a9a63b7f01ed32e316e354e81e9da0e6267435c7243bf8ae0f10b428ef"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce72dba6a20e39a0c628258b5c308779b8697f7676c254a845715e2a1039b968"}, + {file = "watchfiles-0.24.0-cp312-none-win32.whl", hash = "sha256:d9018153cf57fc302a2a34cb7564870b859ed9a732d16b41a9b5cb2ebed2d444"}, + {file = "watchfiles-0.24.0-cp312-none-win_amd64.whl", hash = "sha256:551ec3ee2a3ac9cbcf48a4ec76e42c2ef938a7e905a35b42a1267fa4b1645896"}, + {file = "watchfiles-0.24.0-cp312-none-win_arm64.whl", hash = "sha256:b52a65e4ea43c6d149c5f8ddb0bef8d4a1e779b77591a458a893eb416624a418"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2e3ab79a1771c530233cadfd277fcc762656d50836c77abb2e5e72b88e3a48"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327763da824817b38ad125dcd97595f942d720d32d879f6c4ddf843e3da3fe90"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82010f8ab451dabe36054a1622870166a67cf3fce894f68895db6f74bbdc94"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d64ba08db72e5dfd5c33be1e1e687d5e4fcce09219e8aee893a4862034081d4e"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1cf1f6dd7825053f3d98f6d33f6464ebdd9ee95acd74ba2c34e183086900a827"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43e3e37c15a8b6fe00c1bce2473cfa8eb3484bbeecf3aefbf259227e487a03df"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88bcd4d0fe1d8ff43675360a72def210ebad3f3f72cabfeac08d825d2639b4ab"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:999928c6434372fde16c8f27143d3e97201160b48a614071261701615a2a156f"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:30bbd525c3262fd9f4b1865cb8d88e21161366561cd7c9e1194819e0a33ea86b"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edf71b01dec9f766fb285b73930f95f730bb0943500ba0566ae234b5c1618c18"}, + {file = "watchfiles-0.24.0-cp313-none-win32.whl", hash = "sha256:f4c96283fca3ee09fb044f02156d9570d156698bc3734252175a38f0e8975f07"}, + {file = "watchfiles-0.24.0-cp313-none-win_amd64.whl", hash = "sha256:a974231b4fdd1bb7f62064a0565a6b107d27d21d9acb50c484d2cdba515b9366"}, + {file = "watchfiles-0.24.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ee82c98bed9d97cd2f53bdb035e619309a098ea53ce525833e26b93f673bc318"}, + {file = "watchfiles-0.24.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fd92bbaa2ecdb7864b7600dcdb6f2f1db6e0346ed425fbd01085be04c63f0b05"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f83df90191d67af5a831da3a33dd7628b02a95450e168785586ed51e6d28943c"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fca9433a45f18b7c779d2bae7beeec4f740d28b788b117a48368d95a3233ed83"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b995bfa6bf01a9e09b884077a6d37070464b529d8682d7691c2d3b540d357a0c"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed9aba6e01ff6f2e8285e5aa4154e2970068fe0fc0998c4380d0e6278222269b"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5171ef898299c657685306d8e1478a45e9303ddcd8ac5fed5bd52ad4ae0b69b"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4933a508d2f78099162da473841c652ad0de892719043d3f07cc83b33dfd9d91"}, + {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95cf3b95ea665ab03f5a54765fa41abf0529dbaf372c3b83d91ad2cfa695779b"}, + {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01def80eb62bd5db99a798d5e1f5f940ca0a05986dcfae21d833af7a46f7ee22"}, + {file = "watchfiles-0.24.0-cp38-none-win32.whl", hash = "sha256:4d28cea3c976499475f5b7a2fec6b3a36208656963c1a856d328aeae056fc5c1"}, + {file = "watchfiles-0.24.0-cp38-none-win_amd64.whl", hash = "sha256:21ab23fdc1208086d99ad3f69c231ba265628014d4aed31d4e8746bd59e88cd1"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b665caeeda58625c3946ad7308fbd88a086ee51ccb706307e5b1fa91556ac886"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c51749f3e4e269231510da426ce4a44beb98db2dce9097225c338f815b05d4f"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b2509f08761f29a0fdad35f7e1638b8ab1adfa2666d41b794090361fb8b855"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a60e2bf9dc6afe7f743e7c9b149d1fdd6dbf35153c78fe3a14ae1a9aee3d98b"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7d9b87c4c55e3ea8881dfcbf6d61ea6775fffed1fedffaa60bd047d3c08c430"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78470906a6be5199524641f538bd2c56bb809cd4bf29a566a75051610bc982c3"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07cdef0c84c03375f4e24642ef8d8178e533596b229d32d2bbd69e5128ede02a"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d337193bbf3e45171c8025e291530fb7548a93c45253897cd764a6a71c937ed9"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ec39698c45b11d9694a1b635a70946a5bad066b593af863460a8e600f0dff1ca"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e28d91ef48eab0afb939fa446d8ebe77e2f7593f5f463fd2bb2b14132f95b6e"}, + {file = "watchfiles-0.24.0-cp39-none-win32.whl", hash = "sha256:7138eff8baa883aeaa074359daabb8b6c1e73ffe69d5accdc907d62e50b1c0da"}, + {file = "watchfiles-0.24.0-cp39-none-win_amd64.whl", hash = "sha256:b3ef2c69c655db63deb96b3c3e587084612f9b1fa983df5e0c3379d41307467f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:632676574429bee8c26be8af52af20e0c718cc7f5f67f3fb658c71928ccd4f7f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a2a9891723a735d3e2540651184be6fd5b96880c08ffe1a98bae5017e65b544b"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fa2bc0efef3e209a8199fd111b8969fe9db9c711acc46636686331eda7dd4"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01550ccf1d0aed6ea375ef259706af76ad009ef5b0203a3a4cce0f6024f9b68a"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:96619302d4374de5e2345b2b622dc481257a99431277662c30f606f3e22f42be"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:85d5f0c7771dcc7a26c7a27145059b6bb0ce06e4e751ed76cdf123d7039b60b5"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951088d12d339690a92cef2ec5d3cfd957692834c72ffd570ea76a6790222777"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fb58bcaa343fedc6a9e91f90195b20ccb3135447dc9e4e2570c3a39565853e"}, + {file = "watchfiles-0.24.0.tar.gz", hash = "sha256:afb72325b74fa7a428c009c1b8be4b4d7c2afedafb2982827ef2156646df2fe1"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "websockets" +version = "13.0.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websockets-13.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1841c9082a3ba4a05ea824cf6d99570a6a2d8849ef0db16e9c826acb28089e8f"}, + {file = "websockets-13.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c5870b4a11b77e4caa3937142b650fbbc0914a3e07a0cf3131f35c0587489c1c"}, + {file = "websockets-13.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f1d3d1f2eb79fe7b0fb02e599b2bf76a7619c79300fc55f0b5e2d382881d4f7f"}, + {file = "websockets-13.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15c7d62ee071fa94a2fc52c2b472fed4af258d43f9030479d9c4a2de885fd543"}, + {file = "websockets-13.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6724b554b70d6195ba19650fef5759ef11346f946c07dbbe390e039bcaa7cc3d"}, + {file = "websockets-13.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56a952fa2ae57a42ba7951e6b2605e08a24801a4931b5644dfc68939e041bc7f"}, + {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:17118647c0ea14796364299e942c330d72acc4b248e07e639d34b75067b3cdd8"}, + {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64a11aae1de4c178fa653b07d90f2fb1a2ed31919a5ea2361a38760192e1858b"}, + {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0617fd0b1d14309c7eab6ba5deae8a7179959861846cbc5cb528a7531c249448"}, + {file = "websockets-13.0.1-cp310-cp310-win32.whl", hash = "sha256:11f9976ecbc530248cf162e359a92f37b7b282de88d1d194f2167b5e7ad80ce3"}, + {file = "websockets-13.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c3c493d0e5141ec055a7d6809a28ac2b88d5b878bb22df8c621ebe79a61123d0"}, + {file = "websockets-13.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:699ba9dd6a926f82a277063603fc8d586b89f4cb128efc353b749b641fcddda7"}, + {file = "websockets-13.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf2fae6d85e5dc384bf846f8243ddaa9197f3a1a70044f59399af001fd1f51d4"}, + {file = "websockets-13.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:52aed6ef21a0f1a2a5e310fb5c42d7555e9c5855476bbd7173c3aa3d8a0302f2"}, + {file = "websockets-13.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eb2b9a318542153674c6e377eb8cb9ca0fc011c04475110d3477862f15d29f0"}, + {file = "websockets-13.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5df891c86fe68b2c38da55b7aea7095beca105933c697d719f3f45f4220a5e0e"}, + {file = "websockets-13.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac2d146ff30d9dd2fcf917e5d147db037a5c573f0446c564f16f1f94cf87462"}, + {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b8ac5b46fd798bbbf2ac6620e0437c36a202b08e1f827832c4bf050da081b501"}, + {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:46af561eba6f9b0848b2c9d2427086cabadf14e0abdd9fde9d72d447df268418"}, + {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b5a06d7f60bc2fc378a333978470dfc4e1415ee52f5f0fce4f7853eb10c1e9df"}, + {file = "websockets-13.0.1-cp311-cp311-win32.whl", hash = "sha256:556e70e4f69be1082e6ef26dcb70efcd08d1850f5d6c5f4f2bcb4e397e68f01f"}, + {file = "websockets-13.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:67494e95d6565bf395476e9d040037ff69c8b3fa356a886b21d8422ad86ae075"}, + {file = "websockets-13.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f9c9e258e3d5efe199ec23903f5da0eeaad58cf6fccb3547b74fd4750e5ac47a"}, + {file = "websockets-13.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6b41a1b3b561f1cba8321fb32987552a024a8f67f0d05f06fcf29f0090a1b956"}, + {file = "websockets-13.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f73e676a46b0fe9426612ce8caeca54c9073191a77c3e9d5c94697aef99296af"}, + {file = "websockets-13.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f613289f4a94142f914aafad6c6c87903de78eae1e140fa769a7385fb232fdf"}, + {file = "websockets-13.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f52504023b1480d458adf496dc1c9e9811df4ba4752f0bc1f89ae92f4f07d0c"}, + {file = "websockets-13.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:139add0f98206cb74109faf3611b7783ceafc928529c62b389917a037d4cfdf4"}, + {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:47236c13be337ef36546004ce8c5580f4b1150d9538b27bf8a5ad8edf23ccfab"}, + {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c44ca9ade59b2e376612df34e837013e2b273e6c92d7ed6636d0556b6f4db93d"}, + {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9bbc525f4be3e51b89b2a700f5746c2a6907d2e2ef4513a8daafc98198b92237"}, + {file = "websockets-13.0.1-cp312-cp312-win32.whl", hash = "sha256:3624fd8664f2577cf8de996db3250662e259bfbc870dd8ebdcf5d7c6ac0b5185"}, + {file = "websockets-13.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0513c727fb8adffa6d9bf4a4463b2bade0186cbd8c3604ae5540fae18a90cb99"}, + {file = "websockets-13.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1ee4cc030a4bdab482a37462dbf3ffb7e09334d01dd37d1063be1136a0d825fa"}, + {file = "websockets-13.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dbb0b697cc0655719522406c059eae233abaa3243821cfdfab1215d02ac10231"}, + {file = "websockets-13.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:acbebec8cb3d4df6e2488fbf34702cbc37fc39ac7abf9449392cefb3305562e9"}, + {file = "websockets-13.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63848cdb6fcc0bf09d4a155464c46c64ffdb5807ede4fb251da2c2692559ce75"}, + {file = "websockets-13.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:872afa52a9f4c414d6955c365b6588bc4401272c629ff8321a55f44e3f62b553"}, + {file = "websockets-13.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05e70fec7c54aad4d71eae8e8cab50525e899791fc389ec6f77b95312e4e9920"}, + {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e82db3756ccb66266504f5a3de05ac6b32f287faacff72462612120074103329"}, + {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4e85f46ce287f5c52438bb3703d86162263afccf034a5ef13dbe4318e98d86e7"}, + {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f3fea72e4e6edb983908f0db373ae0732b275628901d909c382aae3b592589f2"}, + {file = "websockets-13.0.1-cp313-cp313-win32.whl", hash = "sha256:254ecf35572fca01a9f789a1d0f543898e222f7b69ecd7d5381d8d8047627bdb"}, + {file = "websockets-13.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:ca48914cdd9f2ccd94deab5bcb5ac98025a5ddce98881e5cce762854a5de330b"}, + {file = "websockets-13.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b74593e9acf18ea5469c3edaa6b27fa7ecf97b30e9dabd5a94c4c940637ab96e"}, + {file = "websockets-13.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:132511bfd42e77d152c919147078460c88a795af16b50e42a0bd14f0ad71ddd2"}, + {file = "websockets-13.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:165bedf13556f985a2aa064309baa01462aa79bf6112fbd068ae38993a0e1f1b"}, + {file = "websockets-13.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e801ca2f448850685417d723ec70298feff3ce4ff687c6f20922c7474b4746ae"}, + {file = "websockets-13.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30d3a1f041360f029765d8704eae606781e673e8918e6b2c792e0775de51352f"}, + {file = "websockets-13.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67648f5e50231b5a7f6d83b32f9c525e319f0ddc841be0de64f24928cd75a603"}, + {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4f0426d51c8f0926a4879390f53c7f5a855e42d68df95fff6032c82c888b5f36"}, + {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ef48e4137e8799998a343706531e656fdec6797b80efd029117edacb74b0a10a"}, + {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:249aab278810bee585cd0d4de2f08cfd67eed4fc75bde623be163798ed4db2eb"}, + {file = "websockets-13.0.1-cp38-cp38-win32.whl", hash = "sha256:06c0a667e466fcb56a0886d924b5f29a7f0886199102f0a0e1c60a02a3751cb4"}, + {file = "websockets-13.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1f3cf6d6ec1142412d4535adabc6bd72a63f5f148c43fe559f06298bc21953c9"}, + {file = "websockets-13.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1fa082ea38d5de51dd409434edc27c0dcbd5fed2b09b9be982deb6f0508d25bc"}, + {file = "websockets-13.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a365bcb7be554e6e1f9f3ed64016e67e2fa03d7b027a33e436aecf194febb63"}, + {file = "websockets-13.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:10a0dc7242215d794fb1918f69c6bb235f1f627aaf19e77f05336d147fce7c37"}, + {file = "websockets-13.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59197afd478545b1f73367620407b0083303569c5f2d043afe5363676f2697c9"}, + {file = "websockets-13.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d20516990d8ad557b5abeb48127b8b779b0b7e6771a265fa3e91767596d7d97"}, + {file = "websockets-13.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1a2e272d067030048e1fe41aa1ec8cfbbaabce733b3d634304fa2b19e5c897f"}, + {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ad327ac80ba7ee61da85383ca8822ff808ab5ada0e4a030d66703cc025b021c4"}, + {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:518f90e6dd089d34eaade01101fd8a990921c3ba18ebbe9b0165b46ebff947f0"}, + {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:68264802399aed6fe9652e89761031acc734fc4c653137a5911c2bfa995d6d6d"}, + {file = "websockets-13.0.1-cp39-cp39-win32.whl", hash = "sha256:a5dc0c42ded1557cc7c3f0240b24129aefbad88af4f09346164349391dea8e58"}, + {file = "websockets-13.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b448a0690ef43db5ef31b3a0d9aea79043882b4632cfc3eaab20105edecf6097"}, + {file = "websockets-13.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:faef9ec6354fe4f9a2c0bbb52fb1ff852effc897e2a4501e25eb3a47cb0a4f89"}, + {file = "websockets-13.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:03d3f9ba172e0a53e37fa4e636b86cc60c3ab2cfee4935e66ed1d7acaa4625ad"}, + {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d450f5a7a35662a9b91a64aefa852f0c0308ee256122f5218a42f1d13577d71e"}, + {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f55b36d17ac50aa8a171b771e15fbe1561217510c8768af3d546f56c7576cdc"}, + {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14b9c006cac63772b31abbcd3e3abb6228233eec966bf062e89e7fa7ae0b7333"}, + {file = "websockets-13.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b79915a1179a91f6c5f04ece1e592e2e8a6bd245a0e45d12fd56b2b59e559a32"}, + {file = "websockets-13.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f40de079779acbcdbb6ed4c65af9f018f8b77c5ec4e17a4b737c05c2db554491"}, + {file = "websockets-13.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80e4ba642fc87fa532bac07e5ed7e19d56940b6af6a8c61d4429be48718a380f"}, + {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a02b0161c43cc9e0232711eff846569fad6ec836a7acab16b3cf97b2344c060"}, + {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6aa74a45d4cdc028561a7d6ab3272c8b3018e23723100b12e58be9dfa5a24491"}, + {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00fd961943b6c10ee6f0b1130753e50ac5dcd906130dcd77b0003c3ab797d026"}, + {file = "websockets-13.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d93572720d781331fb10d3da9ca1067817d84ad1e7c31466e9f5e59965618096"}, + {file = "websockets-13.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:71e6e5a3a3728886caee9ab8752e8113670936a193284be9d6ad2176a137f376"}, + {file = "websockets-13.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c4a6343e3b0714e80da0b0893543bf9a5b5fa71b846ae640e56e9abc6fbc4c83"}, + {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a678532018e435396e37422a95e3ab87f75028ac79570ad11f5bf23cd2a7d8c"}, + {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6716c087e4aa0b9260c4e579bb82e068f84faddb9bfba9906cb87726fa2e870"}, + {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e33505534f3f673270dd67f81e73550b11de5b538c56fe04435d63c02c3f26b5"}, + {file = "websockets-13.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acab3539a027a85d568c2573291e864333ec9d912675107d6efceb7e2be5d980"}, + {file = "websockets-13.0.1-py3-none-any.whl", hash = "sha256:b80f0c51681c517604152eb6a572f5a9378f877763231fddb883ba2f968e8817"}, + {file = "websockets-13.0.1.tar.gz", hash = "sha256:4d6ece65099411cfd9a48d13701d7438d9c34f479046b34c50ff60bb8834e43e"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.12" +content-hash = "3d3248e47a6330affa0b0f20cc19ce302477e01f15fe015963d9bf9d25d58243" diff --git a/api-v2/pyproject.toml b/api-v2/pyproject.toml new file mode 100644 index 00000000..2f75e12f --- /dev/null +++ b/api-v2/pyproject.toml @@ -0,0 +1,22 @@ +[tool.poetry] +name = "home unite us" +version = "0.1.0" +description = "" +authors = ["Your Name "] +license = "MIT" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.12" +fastapi = {extras = ["standard"], version = "^0.113.0"} +sqlalchemy = "^2.0.34" +botocore = "^1.35.13" +boto3 = "^1.35.13" +python-dotenv = "^1.0.1" +pydantic-settings = "^2.4.0" +pyjwt = {extras = ["crypto"], version = "^2.9.0"} + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" From 28cf2f35468e7f98aa068c5e35f70d2fd9093328 Mon Sep 17 00:00:00 2001 From: Erik Date: Thu, 5 Sep 2024 22:32:55 -0700 Subject: [PATCH 02/70] Add settings config and db --- api-v2/README.md | 2 +- api-v2/app/core/config.py | 25 +++++++++++++++++++++++++ api-v2/app/core/db.py | 13 +++++++++++++ api-v2/{ => app}/main.py | 2 +- 4 files changed, 40 insertions(+), 2 deletions(-) create mode 100644 api-v2/app/core/config.py create mode 100644 api-v2/app/core/db.py rename api-v2/{ => app}/main.py (90%) diff --git a/api-v2/README.md b/api-v2/README.md index b31147c0..0666c198 100644 --- a/api-v2/README.md +++ b/api-v2/README.md @@ -38,7 +38,7 @@ poetry install # Installs all dependencies poetry shell # Activates the virtual environment -poetry run fastapi dev main.py # Runs this server in developer mode +poetry run fastapi dev app/main.py # Runs this server in developer mode ``` Your server is now running at: diff --git a/api-v2/app/core/config.py b/api-v2/app/core/config.py new file mode 100644 index 00000000..71b98de5 --- /dev/null +++ b/api-v2/app/core/config.py @@ -0,0 +1,25 @@ +from pydantic_settings import BaseSettings +from dotenv import load_dotenv + +load_dotenv() + + +class Settings(BaseSettings): + COGNITO_CLIENT_ID: str + COGNITO_CLIENT_SECRET: str + COGNITO_REGION: str + COGNITO_REDIRECT_URI: str + COGNITO_USER_POOL_ID: str + COGNITO_ACCESS_ID: str + COGNITO_ACCESS_KEY: str + SECRET_KEY: str + CONFIG_PROFILE: str + ROOT_URL: str + ENV: str + DATABASE_URL: str + + class Config: + env_file = ".env" + + +settings = Settings() diff --git a/api-v2/app/core/db.py b/api-v2/app/core/db.py new file mode 100644 index 00000000..a6e2f98b --- /dev/null +++ b/api-v2/app/core/db.py @@ -0,0 +1,13 @@ +from sqlalchemy import create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker +from core.config import settings + + +engine = create_engine( + settings.DATABASE_URL, connect_args={"check_same_thread": False} +) + +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +Base = declarative_base() diff --git a/api-v2/main.py b/api-v2/app/main.py similarity index 90% rename from api-v2/main.py rename to api-v2/app/main.py index ce586810..c57a3fb8 100644 --- a/api-v2/main.py +++ b/api-v2/app/main.py @@ -3,7 +3,7 @@ # from api.main import api_router -# from core.config import settings +from core.config import settings # from seed import init_db From 6393cf76cbfb862eafb375db0dfee4565b279597 Mon Sep 17 00:00:00 2001 From: Erik Date: Thu, 5 Sep 2024 22:39:13 -0700 Subject: [PATCH 03/70] Add models, db seed operation, and api router --- api-v2/app/api/main.py | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 api-v2/app/api/main.py diff --git a/api-v2/app/api/main.py b/api-v2/app/api/main.py new file mode 100644 index 00000000..2bad46d9 --- /dev/null +++ b/api-v2/app/api/main.py @@ -0,0 +1,7 @@ +from fastapi import APIRouter + +# from api.routes import auth + + +api_router = APIRouter() +# api_router.include_router(auth.router, prefix="/auth", tags=["auth"]) From c37e7d1cc2be6aaa0353167fd2b31fbf9108709c Mon Sep 17 00:00:00 2001 From: Erik Date: Thu, 5 Sep 2024 22:39:40 -0700 Subject: [PATCH 04/70] Add models, db see operation, and api router --- api-v2/.gitignore | 5 ++++- api-v2/app/main.py | 17 ++++++++--------- api-v2/app/models.py | 34 ++++++++++++++++++++++++++++++++++ api-v2/app/seed.py | 24 ++++++++++++++++++++++++ 4 files changed, 70 insertions(+), 10 deletions(-) create mode 100644 api-v2/app/models.py create mode 100644 api-v2/app/seed.py diff --git a/api-v2/.gitignore b/api-v2/.gitignore index e928de15..ad1b4e75 100644 --- a/api-v2/.gitignore +++ b/api-v2/.gitignore @@ -1,4 +1,7 @@ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] -*$py.class \ No newline at end of file +*$py.class + +# Local database +homeuniteus.db \ No newline at end of file diff --git a/api-v2/app/main.py b/api-v2/app/main.py index c57a3fb8..aafa4a7d 100644 --- a/api-v2/app/main.py +++ b/api-v2/app/main.py @@ -2,19 +2,18 @@ from contextlib import asynccontextmanager -# from api.main import api_router +from api.main import api_router from core.config import settings -# from seed import init_db +from seed import init_db -# @asynccontextmanager -# async def lifespan(app: FastAPI): -# init_db() -# yield +@asynccontextmanager +async def lifespan(app: FastAPI): + init_db() + yield -# app = FastAPI(lifespan=lifespan) -app = FastAPI() +app = FastAPI(lifespan=lifespan) -# app.include_router(api_router, prefix="/api") +app.include_router(api_router, prefix="/api") diff --git a/api-v2/app/models.py b/api-v2/app/models.py new file mode 100644 index 00000000..4d48b243 --- /dev/null +++ b/api-v2/app/models.py @@ -0,0 +1,34 @@ +from sqlalchemy import Column, ForeignKey, Integer, String +from sqlalchemy.orm import relationship +from sqlalchemy.orm import validates as validates_sqlachemy + + +from core.db import Base + + +class User(Base): + __tablename__ = "user" + id = Column(Integer, primary_key=True, index=True) + email = Column(String, nullable=False, unique=True) + firstName = Column(String(255), nullable=False) + middleName = Column(String(255), nullable=True) + lastName = Column(String(255), nullable=True) + roleId = Column(Integer, ForeignKey("role.id"), nullable=False) + + role = relationship("Role", back_populates="users") + + @validates_sqlachemy("firstName") + def validate_first_name(self, key, value): + if not value or not value.strip(): + raise ValueError( + f"{key} must contain at least one non-space character" + ) + return value.strip() + + +class Role(Base): + __tablename__ = "role" + id = Column(Integer, primary_key=True, index=True) + type = Column(String, nullable=False, unique=True) + + users = relationship("User", back_populates="role") diff --git a/api-v2/app/seed.py b/api-v2/app/seed.py new file mode 100644 index 00000000..1831ebeb --- /dev/null +++ b/api-v2/app/seed.py @@ -0,0 +1,24 @@ +from core.db import Base, engine +from sqlalchemy import event +from models import Role + +INITIAL_ROLES = [ + {"type": "admin"}, + {"type": "guest"}, + {"type": "host"}, + {"type": "coordinator"}, +] + + +# This method receives a table, a connection and inserts data to that table. +def initialize_table(target, connection, **kw): + for role in INITIAL_ROLES: + connection.execute(target.insert(), role) + return + + +event.listen(Role.__table__, "after_create", initialize_table) + + +def init_db(): + Base.metadata.create_all(bind=engine, checkfirst=True) \ No newline at end of file From 9107b1081da7d07c889f0555bf26771270cf85c7 Mon Sep 17 00:00:00 2001 From: Erik Date: Thu, 5 Sep 2024 22:49:37 -0700 Subject: [PATCH 05/70] Add auth router, dependencies, and schemas --- api-v2/README.md | 8 +++- api-v2/app/api/deps.py | 89 +++++++++++++++++++++++++++++++++++ api-v2/app/api/main.py | 4 +- api-v2/app/api/routes/auth.py | 3 ++ api-v2/app/schemas.py | 49 +++++++++++++++++++ 5 files changed, 150 insertions(+), 3 deletions(-) create mode 100644 api-v2/app/api/deps.py create mode 100644 api-v2/app/api/routes/auth.py create mode 100644 api-v2/app/schemas.py diff --git a/api-v2/README.md b/api-v2/README.md index 0666c198..34753b51 100644 --- a/api-v2/README.md +++ b/api-v2/README.md @@ -38,7 +38,13 @@ poetry install # Installs all dependencies poetry shell # Activates the virtual environment -poetry run fastapi dev app/main.py # Runs this server in developer mode +poetry run fastapi dev app/main.py # Runs this server in developer mode +``` + +To exit the virtual environment, run from the shell: + +```shell +exit ``` Your server is now running at: diff --git a/api-v2/app/api/deps.py b/api-v2/app/api/deps.py new file mode 100644 index 00000000..5ca8c1da --- /dev/null +++ b/api-v2/app/api/deps.py @@ -0,0 +1,89 @@ +import boto3 +import jwt +import time + +from fastapi import Request, HTTPException +from fastapi.security import SecurityScopes + +from core.db import SessionLocal +from core.config import settings + +cognito_region = settings.COGNITO_REGION +cognito_access_id = settings.COGNITO_ACCESS_ID +cognito_access_key = settings.COGNITO_ACCESS_KEY + + +def get_db(): + db = SessionLocal() + try: + yield db + finally: + db.close() + + +def get_cognito_client(): + return boto3.client( + "cognito-idp", + region_name=cognito_region, + aws_access_key_id=cognito_access_id, + aws_secret_access_key=cognito_access_key, + ) + + +def requires_auth(request: Request): + # Check for Authorization header + auth_header = request.headers.get("Authorization") + if auth_header is None: + raise HTTPException( + status_code=401, detail="Missing Authorization header" + ) + + # Check for Bearer token + token = auth_header.split(" ")[1] + if token is None: + raise HTTPException(status_code=401, detail="Missing token") + + # Decode token + decoded_access_token = jwt.decode( + token, algorithms=["RS256"], options={"verify_signature": False} + ) + + # Check if token is expired + if decoded_access_token["exp"] < time.time(): + raise HTTPException(status_code=401, detail="Token expired") + + try: + cognito_client = get_cognito_client() + cognito_client.get_user(AccessToken=token) + except Exception: + raise HTTPException(status_code=401, detail="Invalid token") + + return True + + +role_to_cognito_group_map = { + "admin": "Admins", + "host": "Hosts", + "coordinator": "Coordinators", + "guest": "Guests", +} + + +def allow_roles(request: Request, security_scopes: SecurityScopes): + id_token = request.cookies.get("id_token") + if id_token is None: + raise HTTPException(status_code=401, detail="Missing id_token") + + decoded_id_token = jwt.decode( + id_token, algorithms=["RS256"], options={"verify_signature": False} + ) + + groups = decoded_id_token.get("cognito:groups") + contains_group = any( + role_to_cognito_group_map[scope] in groups + for scope in security_scopes.scopes + ) + + if not contains_group: + raise HTTPException(status_code=403, detail="Unauthorized") + return True diff --git a/api-v2/app/api/main.py b/api-v2/app/api/main.py index 2bad46d9..c96ebe12 100644 --- a/api-v2/app/api/main.py +++ b/api-v2/app/api/main.py @@ -1,7 +1,7 @@ from fastapi import APIRouter -# from api.routes import auth +from api.routes import auth api_router = APIRouter() -# api_router.include_router(auth.router, prefix="/auth", tags=["auth"]) +api_router.include_router(auth.router, prefix="/auth", tags=["auth"]) diff --git a/api-v2/app/api/routes/auth.py b/api-v2/app/api/routes/auth.py new file mode 100644 index 00000000..f3240acd --- /dev/null +++ b/api-v2/app/api/routes/auth.py @@ -0,0 +1,3 @@ +from fastapi import Depends, APIRouter, HTTPException, Response, Security + +router = APIRouter() diff --git a/api-v2/app/schemas.py b/api-v2/app/schemas.py new file mode 100644 index 00000000..97e22489 --- /dev/null +++ b/api-v2/app/schemas.py @@ -0,0 +1,49 @@ +from pydantic import BaseModel +from typing import Optional + +from enum import Enum + + +class UserRoleEnum(str, Enum): + ADMIN = "admin" + GUEST = "guest" + HOST = "host" + COORDINATOR = "coordinator" + + +class RoleBase(BaseModel): + id: int + type: UserRoleEnum + + class Config: + from_attributes = True + + +class UserBase(BaseModel): + email: str + firstName: str + middleName: Optional[str] = None + lastName: Optional[str] = None + + +class UserCreate(UserBase): + password: str + role: UserRoleEnum + + +class User(UserBase): + id: int + role: RoleBase + + class Config: + from_attributes = True + + +class UserSignIn(BaseModel): + email: str + password: str + + +class UserSignInResponse(BaseModel): + user: User + token: str From 4b32985186229bb8dd49ba90dcdd0b58e0045beb Mon Sep 17 00:00:00 2001 From: Erik Date: Thu, 5 Sep 2024 23:11:07 -0700 Subject: [PATCH 06/70] Create crud operations and auth routes --- api-v2/app/api/routes/auth.py | 153 ++++++++++++++++++++++++++++++++++ api-v2/app/crud.py | 38 +++++++++ api-v2/app/utils.py | 20 +++++ 3 files changed, 211 insertions(+) create mode 100644 api-v2/app/crud.py create mode 100644 api-v2/app/utils.py diff --git a/api-v2/app/api/routes/auth.py b/api-v2/app/api/routes/auth.py index f3240acd..6ab3691d 100644 --- a/api-v2/app/api/routes/auth.py +++ b/api-v2/app/api/routes/auth.py @@ -1,3 +1,156 @@ +import logging + from fastapi import Depends, APIRouter, HTTPException, Response, Security +from fastapi.responses import RedirectResponse +from sqlalchemy.orm import Session +from botocore.exceptions import ClientError + + +from schemas import UserCreate, UserSignIn, UserSignInResponse +from crud import create_user, delete_user, get_user +from api.deps import ( + get_db, + get_cognito_client, + requires_auth, + allow_roles, + role_to_cognito_group_map, +) + +from utils import calc_secret_hash +from core.config import settings router = APIRouter() + +cognito_client_id = settings.COGNITO_CLIENT_ID +root_url = settings.ROOT_URL + + +# Helper function to set session cookies +def set_session_cookie(response: Response, auth_response: dict): + refresh_token = auth_response["AuthenticationResult"]["RefreshToken"] + id_token = auth_response["AuthenticationResult"]["IdToken"] + + response.set_cookie("refresh_token", refresh_token) + response.set_cookie("id_token", id_token) + + +""" +# Sign up route + +This route is used to Sign up a new user +""" + + +@router.post("/signup") +def signup( + body: UserCreate, + db: Session = Depends(get_db), + cognito_client=Depends(get_cognito_client), +): + # Create user in database + user = create_user(db, body) + if user is None: + raise HTTPException(status_code=400, detail="User already exists") + + # Add user to cognito + try: + response = cognito_client.sign_up( + ClientId=cognito_client_id, + SecretHash=calc_secret_hash(body.email), + Username=user.email, + Password=body.password, + ClientMetadata={"url": root_url}, + ) + except Exception as e: + logging.error(f"Failed to create user: {e}") + delete_user(db, user.id) + raise HTTPException(status_code=400, detail="Failed to create user") + + # Add user to group + try: + cognito_client.admin_add_user_to_group( + UserPoolId=settings.COGNITO_USER_POOL_ID, + Username=user.email, + GroupName=role_to_cognito_group_map[body.role], + ) + except Exception as e: + print(e) + raise HTTPException(status_code=400, detail="Failed to confirm user") + + return response + + +""" +# Sign in route + +This route is used to sign in a user and start a new session +""" + + +@router.post( + "/signin", + response_model=UserSignInResponse, +) +def signin( + body: UserSignIn, + response: Response, + db: Session = Depends(get_db), + cognito_client=Depends(get_cognito_client), +): + try: + auth_response = cognito_client.initiate_auth( + ClientId=cognito_client_id, + AuthFlow="USER_PASSWORD_AUTH", + AuthParameters={ + "USERNAME": body.email, + "PASSWORD": body.password, + "SECRET_HASH": calc_secret_hash(body.email), + }, + ) + except ClientError as e: + raise HTTPException( + status_code=400, + detail={ + "code": e.response["Error"]["Code"], + "message": e.response["Error"]["Message"], + }, + ) + + if ( + auth_response.get("ChallengeName") + and auth_response["ChallengeName"] == "NEW_PASSWORD_REQUIRED" + ): + userId = auth_response["ChallengeParameters"]["USER_ID_FOR_SRP"] + sessionId = auth_response["Session"] + return RedirectResponse( + f"{root_url}/create-password?userId={userId}&sessionId={sessionId}" + ) + + user = get_user(db, body.email) + if user is None: + raise HTTPException(status_code=400, detail="User not found") + + set_session_cookie(response, auth_response) + + return { + "user": user, + "token": auth_response["AuthenticationResult"]["AccessToken"], + } + + +""" +# Secret route + +This route is a secret route that requires authentication and the guest role +""" + + +@router.get( + "/secret", + dependencies=[ + Depends(requires_auth), + Security(allow_roles, scopes=["guest"]), + ], +) +def secret(): + return {"message": "Welcome to the secret route"} diff --git a/api-v2/app/crud.py b/api-v2/app/crud.py new file mode 100644 index 00000000..dc099ea1 --- /dev/null +++ b/api-v2/app/crud.py @@ -0,0 +1,38 @@ +from sqlalchemy.orm import Session + +import models +import schemas + + +def get_role(db: Session, role: int): + return db.query(models.Role).filter(models.Role.type == role.value).first() + + +def get_user(db: Session, email: str): + return db.query(models.User).filter(models.User.email == email).first() + + +def create_user(db: Session, user: schemas.UserCreate): + role = get_role(db, user.role) + if role is None: + raise ValueError("Invalid role") + + db_user = models.User( + email=user.email, + firstName=user.firstName, + middleName=user.middleName, + lastName=user.lastName, + roleId=role.id, + ) + + db.add(db_user) + db.commit() + db.refresh(db_user) + return db_user + + +def delete_user(db: Session, user_id: int): + user = db.query(models.User).filter(models.User.id == user_id).first() + db.delete(user) + db.commit() + return user diff --git a/api-v2/app/utils.py b/api-v2/app/utils.py new file mode 100644 index 00000000..5138bb47 --- /dev/null +++ b/api-v2/app/utils.py @@ -0,0 +1,20 @@ +import hmac +import base64 + +from core.config import settings + + +# Helper function to calculate secret hash +def calc_secret_hash(email: str) -> str: + cognito_client_id = settings.COGNITO_CLIENT_ID + cognito_client_secret = settings.COGNITO_CLIENT_SECRET + + if cognito_client_id is None: + print("COGNITO_CLIENT_ID is not set") + + message = email + cognito_client_id + secret = bytearray(cognito_client_secret, "utf-8") + dig = hmac.new( + secret, msg=message.encode("utf-8"), digestmod="sha256" + ).digest() + return base64.b64encode(dig).decode() \ No newline at end of file From 0ad2bc8365df74642bf48f0eec1229e09a44b169 Mon Sep 17 00:00:00 2001 From: Erik Date: Thu, 5 Sep 2024 23:17:26 -0700 Subject: [PATCH 07/70] Update README --- api-v2/README.md | 27 ++++++++++++--------------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/api-v2/README.md b/api-v2/README.md index 34753b51..9706d7e1 100644 --- a/api-v2/README.md +++ b/api-v2/README.md @@ -6,10 +6,10 @@ This is the _Home Unite Us_ web API server. This server uses: -- [FastAPI](https://fastapi.tiangolo.com/). -- [SQLAlchemy](https://www.sqlalchemy.org/). -- [Pydantic](https://docs.pydantic.dev/latest/). -- [Poetry](https://python-poetry.org/docs/). +- [FastAPI](https://fastapi.tiangolo.com/) as the web framework for API development. +- [SQLAlchemy](https://www.sqlalchemy.org/) as the ORM for database operations. +- [Pydantic](https://docs.pydantic.dev/latest/) for data validation and serialization. +- [Poetry](https://python-poetry.org/docs/) for dependency management. ## Requirements @@ -19,7 +19,7 @@ Run `python -V` to check the Python version. **Note**: On some systems, you might need to use the `python3` and `pip3` commands. -[Poetry](https://python-poetry.org/docs/#installation) is used to manage the project dependencies. Install the CLI globally. +[Poetry](https://python-poetry.org/docs/#installation) is used to manage the project dependencies. Follow the [installation instructions](https://python-poetry.org/docs/#installation) to run the CLI globally. ## Usage - Development @@ -27,11 +27,11 @@ Run `python -V` to check the Python version. #### Configuration -The API application configuration must be specified before running the application. Configuration variables can be specified either as environment variables, or as entries within a `.env` file located within the `api` directory. To get started, copy the values from one of these configurations into a `.env` file: +The API application configuration must be specified before running the application. Configuration variables can be specified either as environment variables, or as entries within a `.env` file located within the `api-v2` directory. To get started, copy the values from one of `.env.example` into a `.env` file: #### Setup and Run -Once the `.env` file has been configured using the instructions outlined above and Poetry is installed, run the following commands in the `api` directory to install the required development dependencies and run the application. +Once the `.env` file has been configured using the instructions outlined above and Poetry is installed, run the following commands in the `api-v2` directory to install the required development dependencies and run the application. ```shell poetry install # Installs all dependencies @@ -41,20 +41,17 @@ poetry shell # Activates the virtual environment poetry run fastapi dev app/main.py # Runs this server in developer mode ``` -To exit the virtual environment, run from the shell: - -```shell -exit -``` - Your server is now running at: - ``` http://127.0.0.1:8000 ``` And your API docs at: - ``` http://127.0.0.1:8000/docs ``` + +To exit the virtual environment, run from the shell: +```shell +exit +``` From 0ff1d0c05e611953aae76033129d05376adb51d1 Mon Sep 17 00:00:00 2001 From: Erik Guntner Date: Fri, 6 Sep 2024 10:29:23 -0700 Subject: [PATCH 08/70] Update README.md --- api-v2/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api-v2/README.md b/api-v2/README.md index 9706d7e1..a085d1a8 100644 --- a/api-v2/README.md +++ b/api-v2/README.md @@ -51,7 +51,7 @@ And your API docs at: http://127.0.0.1:8000/docs ``` -To exit the virtual environment, run from the shell: +To exit the virtual environment, within the shell run: ```shell exit ``` From 3aea3e2ffe6b0b257d226b1cc47a597476d88dfc Mon Sep 17 00:00:00 2001 From: Erik Guntner Date: Fri, 6 Sep 2024 10:45:05 -0700 Subject: [PATCH 09/70] Update README.md --- api-v2/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api-v2/README.md b/api-v2/README.md index a085d1a8..ffd5ef31 100644 --- a/api-v2/README.md +++ b/api-v2/README.md @@ -27,7 +27,7 @@ Run `python -V` to check the Python version. #### Configuration -The API application configuration must be specified before running the application. Configuration variables can be specified either as environment variables, or as entries within a `.env` file located within the `api-v2` directory. To get started, copy the values from one of `.env.example` into a `.env` file: +The API application configuration must be specified before running the application. Configuration variables are specified as entries within a `.env` file located within the `api-v2` directory. To get started, create a '.env` file withing `/api-v2` and copy the values from `.env.example` into the new `.env` file. You may have to contact someone from the development team to get the necessary values. #### Setup and Run From d8319bf236d68a22378ba40e9585ff75c717cf6b Mon Sep 17 00:00:00 2001 From: Erik Guntner Date: Fri, 6 Sep 2024 10:45:26 -0700 Subject: [PATCH 10/70] Update README.md --- api-v2/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api-v2/README.md b/api-v2/README.md index ffd5ef31..de6002f6 100644 --- a/api-v2/README.md +++ b/api-v2/README.md @@ -27,7 +27,7 @@ Run `python -V` to check the Python version. #### Configuration -The API application configuration must be specified before running the application. Configuration variables are specified as entries within a `.env` file located within the `api-v2` directory. To get started, create a '.env` file withing `/api-v2` and copy the values from `.env.example` into the new `.env` file. You may have to contact someone from the development team to get the necessary values. +The API application configuration must be specified before running the application. Configuration variables are specified as entries within a `.env` file located within the `api-v2` directory. To get started, create a `.env` file withing `/api-v2` and copy the values from `.env.example` into the new `.env` file. You may have to contact someone from the development team to get the necessary values. #### Setup and Run From cfaed9878e5771d9a9052a08e24b481cfcd7bd72 Mon Sep 17 00:00:00 2001 From: Erik Guntner Date: Fri, 6 Sep 2024 10:46:44 -0700 Subject: [PATCH 11/70] Update README.md --- api-v2/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api-v2/README.md b/api-v2/README.md index de6002f6..56752337 100644 --- a/api-v2/README.md +++ b/api-v2/README.md @@ -31,7 +31,7 @@ The API application configuration must be specified before running the applicati #### Setup and Run -Once the `.env` file has been configured using the instructions outlined above and Poetry is installed, run the following commands in the `api-v2` directory to install the required development dependencies and run the application. +Once the `.env` file has been configured and Poetry is installed, run the following commands in the `api-v2` directory to install the required development dependencies and run the application. ```shell poetry install # Installs all dependencies From d84c2cd52ee2d0ac7e8b8385f09d37c158b9665d Mon Sep 17 00:00:00 2001 From: Erik Guntner Date: Fri, 6 Sep 2024 10:47:39 -0700 Subject: [PATCH 12/70] Update README.md --- api-v2/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api-v2/README.md b/api-v2/README.md index 56752337..23cee84a 100644 --- a/api-v2/README.md +++ b/api-v2/README.md @@ -27,7 +27,7 @@ Run `python -V` to check the Python version. #### Configuration -The API application configuration must be specified before running the application. Configuration variables are specified as entries within a `.env` file located within the `api-v2` directory. To get started, create a `.env` file withing `/api-v2` and copy the values from `.env.example` into the new `.env` file. You may have to contact someone from the development team to get the necessary values. +The API configuration must be specified before running the application. Configuration variables are specified as entries within a `.env` file located within the `api-v2` directory. To get started, create a `.env` file within `/api-v2` and copy the values from `.env.example` into the new `.env` file. You may have to contact someone from the development team to get the necessary values. #### Setup and Run From 061b0fca9aacb384eb619a6823f547dc8d87c78b Mon Sep 17 00:00:00 2001 From: Erik Guntner Date: Fri, 6 Sep 2024 10:52:00 -0700 Subject: [PATCH 13/70] Update README.md --- api-v2/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api-v2/README.md b/api-v2/README.md index 23cee84a..57e0a65b 100644 --- a/api-v2/README.md +++ b/api-v2/README.md @@ -13,7 +13,7 @@ This server uses: ## Requirements -Python >= 3.12 +You will need Python >= 3.8+ to install Poetry. Run `python -V` to check the Python version. From 946ee10c7bed916afe5cd8db05dca6fe90506e50 Mon Sep 17 00:00:00 2001 From: Erik Guntner Date: Fri, 6 Sep 2024 10:52:57 -0700 Subject: [PATCH 14/70] Update README.md --- api-v2/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api-v2/README.md b/api-v2/README.md index 57e0a65b..3b57a90c 100644 --- a/api-v2/README.md +++ b/api-v2/README.md @@ -13,7 +13,7 @@ This server uses: ## Requirements -You will need Python >= 3.8+ to install Poetry. +You will need Python 3.8+ to install Poetry. Run `python -V` to check the Python version. From 32ddd8d79f3a58e15c2bd2f1a56b94373121d450 Mon Sep 17 00:00:00 2001 From: Erik Guntner Date: Fri, 6 Sep 2024 10:56:16 -0700 Subject: [PATCH 15/70] Update README.md --- api-v2/README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/api-v2/README.md b/api-v2/README.md index 3b57a90c..3be64a81 100644 --- a/api-v2/README.md +++ b/api-v2/README.md @@ -6,10 +6,10 @@ This is the _Home Unite Us_ web API server. This server uses: -- [FastAPI](https://fastapi.tiangolo.com/) as the web framework for API development. -- [SQLAlchemy](https://www.sqlalchemy.org/) as the ORM for database operations. -- [Pydantic](https://docs.pydantic.dev/latest/) for data validation and serialization. -- [Poetry](https://python-poetry.org/docs/) for dependency management. +- [FastAPI](https://fastapi.tiangolo.com/) - Web framework for API development +- [SQLAlchemy](https://www.sqlalchemy.org/) - ORM for database operations +- [Pydantic](https://docs.pydantic.dev/latest/) - Data validation and serialization +- [Poetry](https://python-poetry.org/docs/) - Dependency management ## Requirements From feaef561828afbc81477bfb91e5bc955bf3ccedf Mon Sep 17 00:00:00 2001 From: Erik Guntner Date: Fri, 6 Sep 2024 10:57:15 -0700 Subject: [PATCH 16/70] Update README.md --- api-v2/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api-v2/README.md b/api-v2/README.md index 3be64a81..f168da9f 100644 --- a/api-v2/README.md +++ b/api-v2/README.md @@ -1,4 +1,4 @@ -# Home Unite Us OpenAPI Server +# Home Unite Us FastAPI Server ## Overview From 73f5ee51106d1ed9ff496f4f11c1ce371153581c Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Fri, 6 Sep 2024 11:32:33 -0700 Subject: [PATCH 17/70] fastapi-migration: poetry for dependency mgmt only --- api-v2/pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/api-v2/pyproject.toml b/api-v2/pyproject.toml index 2f75e12f..0adb5ae1 100644 --- a/api-v2/pyproject.toml +++ b/api-v2/pyproject.toml @@ -5,6 +5,7 @@ description = "" authors = ["Your Name "] license = "MIT" readme = "README.md" +package-mode = false [tool.poetry.dependencies] python = "^3.12" From ba0fa323f8a925ab35a129e976db07885e3f88e3 Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Fri, 6 Sep 2024 12:37:04 -0700 Subject: [PATCH 18/70] fastapi-migration: Moved tests and test config This commit moved the API `tests` folder and configured poetry with a test dependency group. `tox` was added along with configuration to run `poetry run pytest`. The test dependencies are installed using poetry: `poetry install --with test`. --- api-v2/.gitignore | 70 +- api-v2/poetry.lock | 683 +++++++++++++++++- api-v2/pyproject.toml | 51 +- api-v2/tests/__init__.py | 0 api-v2/tests/conftest.py | 161 +++++ api-v2/tests/setup_utils.py | 85 +++ api-v2/tests/test_alembic_migration.py | 34 + api-v2/tests/test_authentication.py | 329 +++++++++ api-v2/tests/test_configs.py | 207 ++++++ api-v2/tests/test_forms_repo.py | 127 ++++ api-v2/tests/test_forms_schema.py | 238 ++++++ api-v2/tests/test_host_controller.py | 66 ++ api-v2/tests/test_mocking.py | 208 ++++++ api-v2/tests/test_schema.py | 147 ++++ .../tests/test_service_provider_controller.py | 210 ++++++ .../tests/test_service_provider_repository.py | 112 +++ api-v2/tests/test_user_repo.py | 52 ++ api-v2/tox.ini | 20 + 18 files changed, 2787 insertions(+), 13 deletions(-) create mode 100644 api-v2/tests/__init__.py create mode 100644 api-v2/tests/conftest.py create mode 100644 api-v2/tests/setup_utils.py create mode 100644 api-v2/tests/test_alembic_migration.py create mode 100644 api-v2/tests/test_authentication.py create mode 100644 api-v2/tests/test_configs.py create mode 100644 api-v2/tests/test_forms_repo.py create mode 100644 api-v2/tests/test_forms_schema.py create mode 100644 api-v2/tests/test_host_controller.py create mode 100644 api-v2/tests/test_mocking.py create mode 100644 api-v2/tests/test_schema.py create mode 100644 api-v2/tests/test_service_provider_controller.py create mode 100644 api-v2/tests/test_service_provider_repository.py create mode 100644 api-v2/tests/test_user_repo.py create mode 100644 api-v2/tox.ini diff --git a/api-v2/.gitignore b/api-v2/.gitignore index ad1b4e75..cf8d83c2 100644 --- a/api-v2/.gitignore +++ b/api-v2/.gitignore @@ -4,4 +4,72 @@ __pycache__/ *$py.class # Local database -homeuniteus.db \ No newline at end of file +homeuniteus.db + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.venv/ +.python-version +.pytest_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints + +# Local Setup +.DS_Store +.vscode +personal.py +.env \ No newline at end of file diff --git a/api-v2/poetry.lock b/api-v2/poetry.lock index 1d427f96..9ef32e8c 100644 --- a/api-v2/poetry.lock +++ b/api-v2/poetry.lock @@ -1,5 +1,24 @@ # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +[[package]] +name = "alembic" +version = "1.13.2" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.8" +files = [ + {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, + {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] + [[package]] name = "annotated-types" version = "0.7.0" @@ -33,17 +52,17 @@ trio = ["trio (>=0.23)"] [[package]] name = "boto3" -version = "1.35.13" +version = "1.35.14" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.13-py3-none-any.whl", hash = "sha256:6e220eae161a4c0ed21e2561edcb0fd9603fa621692c50bc099db318ed3e3ad4"}, - {file = "boto3-1.35.13.tar.gz", hash = "sha256:4af17bd7bada591ddaa835d774b242705210e5d45133e25bd73417daa42e53e7"}, + {file = "boto3-1.35.14-py3-none-any.whl", hash = "sha256:c3e138e9041d59cd34cdc28a587dfdc899dba02ea26ebc3e10fb4bc88e5cf31b"}, + {file = "boto3-1.35.14.tar.gz", hash = "sha256:7bc78d7140c353b10a637927fe4bc4c4d95a464d1b8f515d5844def2ee52cbd5"}, ] [package.dependencies] -botocore = ">=1.35.13,<1.36.0" +botocore = ">=1.35.14,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -52,13 +71,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.13" +version = "1.35.14" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.13-py3-none-any.whl", hash = "sha256:dd8a8bb1946187c8eb902a3b856d3b24df63917e4f2c61a6bce7f3ea9f112761"}, - {file = "botocore-1.35.13.tar.gz", hash = "sha256:f7ae62eab44d731a5ad8917788378316c79c7bceb530a8307ed0f3bca7037341"}, + {file = "botocore-1.35.14-py3-none-any.whl", hash = "sha256:24823135232f88266b66ae8e1d0f3d40872c14cd976781f7fe52b8f0d79035a0"}, + {file = "botocore-1.35.14.tar.gz", hash = "sha256:8515a2fc7ca5bcf0b10016ba05ccf2d642b7cb77d8773026ff2fa5aa3bf38d2e"}, ] [package.dependencies] @@ -69,6 +88,17 @@ urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version > [package.extras] crt = ["awscrt (==0.21.2)"] +[[package]] +name = "cachetools" +version = "5.5.0" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, +] + [[package]] name = "certifi" version = "2024.8.30" @@ -159,6 +189,116 @@ files = [ [package.dependencies] pycparser = "*" +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + [[package]] name = "click" version = "8.1.7" @@ -184,6 +324,90 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coverage" +version = "7.6.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, +] + +[package.extras] +toml = ["tomli"] + [[package]] name = "cryptography" version = "43.0.1" @@ -233,6 +457,17 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + [[package]] name = "dnspython" version = "2.6.1" @@ -312,6 +547,22 @@ uvicorn = {version = ">=0.15.0", extras = ["standard"]} [package.extras] standard = ["uvicorn[standard] (>=0.15.0)"] +[[package]] +name = "filelock" +version = "3.15.4" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] +typing = ["typing-extensions (>=4.8)"] + [[package]] name = "greenlet" version = "3.0.3" @@ -499,6 +750,17 @@ files = [ {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + [[package]] name = "jinja2" version = "3.1.4" @@ -527,6 +789,42 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +[[package]] +name = "joserfc" +version = "1.0.0" +description = "The ultimate Python library for JOSE RFCs, including JWS, JWE, JWK, JWA, JWT" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joserfc-1.0.0-py3-none-any.whl", hash = "sha256:1de2c3ac203db8fceb2e84c1e78ba357030b195c21af046a1411711927654a09"}, + {file = "joserfc-1.0.0.tar.gz", hash = "sha256:298a9820c76576f8ca63375d1851cc092f3f225508305c7a36c4632cec38f7bc"}, +] + +[package.dependencies] +cryptography = "*" + +[package.extras] +drafts = ["pycryptodome"] + +[[package]] +name = "mako" +version = "1.3.5" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -631,6 +929,175 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "moto" +version = "5.0.13" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "moto-5.0.13-py2.py3-none-any.whl", hash = "sha256:984377a9c4536543fc09f49a1d5210c61c4a4f55c79719f7d9f8dcdd9bf55ea5"}, + {file = "moto-5.0.13.tar.gz", hash = "sha256:ddf8864f0d61af88fd07a4e5eac428c6bebf4fcd10023f8e756e65e9e7b7e4a5"}, +] + +[package.dependencies] +boto3 = ">=1.9.201" +botocore = ">=1.14.0" +cryptography = ">=3.3.1" +Jinja2 = ">=2.10.1" +joserfc = {version = ">=0.9.0", optional = true, markers = "extra == \"cognitoidp\""} +python-dateutil = ">=2.1,<3.0.0" +requests = ">=2.5" +responses = ">=0.15.0" +werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" +xmltodict = "*" + +[package.extras] +all = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] +apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] +appsync = ["graphql-core"] +awslambda = ["docker (>=3.0.0)"] +batch = ["docker (>=3.0.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +cognitoidp = ["joserfc (>=0.9.0)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.5)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.5)"] +events = ["jsonpath-ng"] +glue = ["pyparsing (>=3.0.7)"] +iotdata = ["jsondiff (>=1.1.2)"] +proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.5)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.5)"] +server = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +ssm = ["PyYAML (>=5.1)"] +stepfunctions = ["antlr4-python3-runtime", "jsonpath-ng"] +xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "psycopg2-binary" +version = "2.9.9" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, +] + [[package]] name = "pycparser" version = "2.22" @@ -820,6 +1287,92 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +[[package]] +name = "pyproject-api" +version = "1.7.1" +description = "API to interact with the python pyproject.toml based projects" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyproject_api-1.7.1-py3-none-any.whl", hash = "sha256:2dc1654062c2b27733d8fd4cdda672b22fe8741ef1dde8e3a998a9547b071eeb"}, + {file = "pyproject_api-1.7.1.tar.gz", hash = "sha256:7ebc6cd10710f89f4cf2a2731710a98abce37ebff19427116ff2174c9236a827"}, +] + +[package.dependencies] +packaging = ">=24.1" + +[package.extras] +docs = ["furo (>=2024.5.6)", "sphinx-autodoc-typehints (>=2.2.1)"] +testing = ["covdefaults (>=2.3)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=70.1)"] + +[[package]] +name = "pytest" +version = "8.3.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, + {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-alembic" +version = "0.11.1" +description = "A pytest plugin for verifying alembic migrations." +optional = false +python-versions = "<4,>=3.6" +files = [ + {file = "pytest_alembic-0.11.1-py3-none-any.whl", hash = "sha256:f83e8c1534d50ced053aa4b1dbf6e261f4674aa626cb852fc1dcb565049ae152"}, + {file = "pytest_alembic-0.11.1.tar.gz", hash = "sha256:a920d8770b5be77326c5c1b2bd8d4d4a0dd8fc2c2d57abbcd1fec28a21131b85"}, +] + +[package.dependencies] +alembic = "*" +pytest = ">=6.0" +sqlalchemy = "*" + +[[package]] +name = "pytest-cov" +version = "5.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "pytest-randomly" +version = "3.15.0" +description = "Pytest plugin to randomly order tests and control random.seed." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_randomly-3.15.0-py3-none-any.whl", hash = "sha256:0516f4344b29f4e9cdae8bce31c4aeebf59d0b9ef05927c33354ff3859eeeca6"}, + {file = "pytest_randomly-3.15.0.tar.gz", hash = "sha256:b908529648667ba5e54723088edd6f82252f540cc340d748d1fa985539687047"}, +] + +[package.dependencies] +pytest = "*" + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -924,6 +1477,46 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "responses" +version = "0.25.3" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "responses-0.25.3-py3-none-any.whl", hash = "sha256:521efcbc82081ab8daa588e08f7e8a64ce79b91c39f6e62199b19159bea7dbcb"}, + {file = "responses-0.25.3.tar.gz", hash = "sha256:617b9247abd9ae28313d57a75880422d55ec63c29d33d629697590a034358dba"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] + [[package]] name = "rich" version = "13.8.0" @@ -1096,6 +1689,32 @@ anyio = ">=3.4.0,<5" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] +[[package]] +name = "tox" +version = "4.18.0" +description = "tox is a generic virtualenv management and test command line tool" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tox-4.18.0-py3-none-any.whl", hash = "sha256:0a457400cf70615dc0627eb70d293e80cd95d8ce174bb40ac011011f0c03a249"}, + {file = "tox-4.18.0.tar.gz", hash = "sha256:5dfa1cab9f146becd6e351333a82f9e0ade374451630ba65ee54584624c27b58"}, +] + +[package.dependencies] +cachetools = ">=5.4" +chardet = ">=5.2" +colorama = ">=0.4.6" +filelock = ">=3.15.4" +packaging = ">=24.1" +platformdirs = ">=4.2.2" +pluggy = ">=1.5" +pyproject-api = ">=1.7.1" +virtualenv = ">=20.26.3" + +[package.extras] +docs = ["furo (>=2024.7.18)", "sphinx (>=7.4.7)", "sphinx-argparse-cli (>=1.16)", "sphinx-autodoc-typehints (>=2.2.3)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] +testing = ["build[virtualenv] (>=1.2.1)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=70.3)", "time-machine (>=2.14.2)", "wheel (>=0.43)"] + [[package]] name = "typer" version = "0.12.5" @@ -1221,6 +1840,26 @@ files = [ docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] +[[package]] +name = "virtualenv" +version = "20.26.3" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + [[package]] name = "watchfiles" version = "0.24.0" @@ -1411,7 +2050,35 @@ files = [ {file = "websockets-13.0.1.tar.gz", hash = "sha256:4d6ece65099411cfd9a48d13701d7438d9c34f479046b34c50ff60bb8834e43e"}, ] +[[package]] +name = "werkzeug" +version = "3.0.4" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.4-py3-none-any.whl", hash = "sha256:02c9eb92b7d6c06f31a782811505d2157837cea66aaede3e217c7c27c039476c"}, + {file = "werkzeug-3.0.4.tar.gz", hash = "sha256:34f2371506b250df4d4f84bfe7b0921e4762525762bbd936614909fe25cd7306"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.4" +files = [ + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, +] + [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "3d3248e47a6330affa0b0f20cc19ce302477e01f15fe015963d9bf9d25d58243" +content-hash = "3ed6c59ca289ab7d6ee202844938b6eba99cc023f4b9c24ca3e7cbfcff5c32bd" diff --git a/api-v2/pyproject.toml b/api-v2/pyproject.toml index 0adb5ae1..17ee8c61 100644 --- a/api-v2/pyproject.toml +++ b/api-v2/pyproject.toml @@ -1,8 +1,10 @@ [tool.poetry] name = "home unite us" version = "0.1.0" -description = "" -authors = ["Your Name "] +description = "Web API for Home Unite Us" +authors = [ + "Home Unite Us ", +] license = "MIT" readme = "README.md" package-mode = false @@ -10,14 +12,55 @@ package-mode = false [tool.poetry.dependencies] python = "^3.12" fastapi = {extras = ["standard"], version = "^0.113.0"} + +# SQLAlchemy is a Python SQL toolkit and Object Relational Mapper that is +# mainly used for its object relational mapper and database transactional +# features. sqlalchemy = "^2.0.34" -botocore = "^1.35.13" + +# alembic is a database migration tool provided by the SQLAlchemy project. +# alembic does not use SemVer. Changes in the middle number represents a +# "Significant Minor Release" that might be non-backward compatible. +# https://alembic.sqlalchemy.org/en/latest/front.html#versioning-scheme +alembic = "^1.13" + +# psycopg2 allows SQLAlchemy to communicate with PostgreSQL. +# PostgreSQL is this project's target production database. +psycopg2-binary = "^2.9" + +# boto3 is used for connecting to AWS resources boto3 = "^1.35.13" + +# python-dotenv allows the API to pull in external configuration from a .env file python-dotenv = "^1.0.1" + +# pydantic-settings is a Pydantic feature that is used to load settings/configurations +# from environment variables or secret files. pydantic-settings = "^2.4.0" + +# pyjwt is used to decode information contained in a JSON Web Token pyjwt = {extras = ["crypto"], version = "^2.9.0"} +[tool.poetry.group.test.dependencies] +# tox builds and runs tests in an isolated environment. +# It has its own configuration file named `tox.ini`. +tox = "^4.18" + +# pytest runs the tests implemented in this project. +pytest = "^8.3" + +# pytest-cov will report the amount of test coverage implemented. +pytest-cov = "^5.0" + +# pytest-randomly will cause tests to be run in random order. +pytest-randomly = "^3.15" + +# moto mocks out AWS Services +moto = {extras = ["cognitoidp"], version = "^5.0"} + +# pytest-alembic is a pytest plugin that verifies alembic migrations +pytest-alembic = "^0.11.0" [build-system] requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/api-v2/tests/__init__.py b/api-v2/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/tests/conftest.py b/api-v2/tests/conftest.py new file mode 100644 index 00000000..8b811068 --- /dev/null +++ b/api-v2/tests/conftest.py @@ -0,0 +1,161 @@ +import os + +import pytest +import secrets +from collections.abc import Generator +from pytest import MonkeyPatch +import sqlalchemy +from sqlalchemy.orm import Session + +from openapi_server.configs.staging import StagingHUUConfig +from openapi_server.configs.development import DevelopmentHUUConfig +from openapi_server.app import create_app +from openapi_server.configs.mock_aws import AWSMockService, AWSTemporaryUserpool +from openapi_server.models.database import DataAccessLayer + +def pytest_addoption(parser: pytest.Parser) -> None: + ''' + pytest hook used to register argparse-style options and ini-style config values, + called once at the beginning of a test run. + ''' + parser.addoption( + "--mode", + action="store", + default="debug", + help="run tests in debug or release mode", + ) + +def pytest_configure(config: pytest.Config) -> None: + ''' + pytest hook used to perform initial test application configuration, + called at the beginning of a test run, within conftest.py file. + ''' + mode = config.getoption("mode", default='debug').lower() + if mode == 'debug': + # All application configurations are defined explicitly in code. The + # system environment is not used. All resources that can be safely + # mocked, will be mocked (e.g. mock AWS cognito API calls) + with MonkeyPatch().context() as m: + for env_var in os.environ.keys(): + m.delenv(env_var) + app_config = DevelopmentHUUConfig( + TESTING=True, + FLASK_DEBUG=True, + DATABASE_URL = 'sqlite:///:memory:' + ) + config.mock_aws = True + elif mode == 'release': + # Load configuration from the environment, to allow the use of + # secrets, and disable the mocking of any resources + from dotenv import load_dotenv, find_dotenv + dot_env = find_dotenv() + if dot_env: + load_dotenv(dot_env) + with MonkeyPatch().context() as m: + # The real userpool should never be used while testing + # Our test infrastructure will create temporary user + # pools for each test. + m.setenv("COGNITO_CLIENT_ID", "Totally fake client id") + m.setenv("COGNITO_CLIENT_SECRET", "Yet another fake secret12") + m.setenv("COGNITO_REDIRECT_URI", "Redirect your way back to writing more test cases") + m.setenv("COGNITO_USER_POOL_ID", "Water's warm. IDs are fake") + m.setenv("SECRET_KEY", secrets.token_urlsafe(32)) + m.setenv("DATABASE_URL", "sqlite:///:memory:") + app_config = StagingHUUConfig( + TESTING=True, + FLASK_DEBUG=True + ) + config.mock_aws = False + else: + raise KeyError(f"pytest application configuration mode {mode} not" + "recognized. Only debug and release modes supported.") + + config.app_config = app_config + +@pytest.fixture(scope="session") +def app_config(request): + return request.config.app_config + +@pytest.fixture(scope="session") +def is_mocking(pytestconfig): + return pytestconfig.mock_aws + +@pytest.fixture() +def app(pytestconfig, empty_db_session): + flask_app = create_app(pytestconfig.app_config).app + + # Tests will never operate on real user data, so provide a + # temporary userpool even if mocking is disabled + app_environment_cls = AWSMockService if pytestconfig.mock_aws else AWSTemporaryUserpool + + with app_environment_cls(flask_app): + yield flask_app + +@pytest.fixture +def alembic_engine(): + ''' + Override the pytest-alembic default engine to use an in-memory + database at the base revision. + ''' + return sqlalchemy.create_engine("sqlite:///:memory:") + +@pytest.fixture() +def empty_db_session(alembic_runner, alembic_engine) -> Generator[Session, None, None]: + ''' + SetUp and TearDown an empty in-memory database for + database repository tests. + + This fixture does not initialize the full application. + ''' + # Upgrade the database to the current head revision + # This applies all of our alembic migration scripts + # to the empty database + alembic_runner.migrate_up_to("heads") + DataAccessLayer._engine = alembic_engine + + yield DataAccessLayer.session() + + test_engine, DataAccessLayer._engine = DataAccessLayer._engine, None + test_engine.dispose() + +@pytest.fixture() +def empty_db_session_provider(empty_db_session): + class _provider: + def session(): return empty_db_session + + return _provider + +@pytest.fixture() +def client(app): + return app.test_client() + +@pytest.fixture +def empty_environment(monkeypatch: MonkeyPatch) -> MonkeyPatch: + ''' + Create an isolated environment for testing purposes. + The environment variables are cleared to ensure the + configuration object is not dependent on the machine configuration. + ''' + for env_var in os.environ.keys(): + monkeypatch.delenv(env_var) + return monkeypatch + +@pytest.fixture +def fake_prod_env(empty_environment: MonkeyPatch) -> MonkeyPatch: + ''' + Define a fake production environment by setting each of the required + production configuration variables with fake values. + ''' + empty_environment.setenv("ENV", "production") + empty_environment.setenv("FLASK_DEBUG", "False") + empty_environment.setenv("TESTING", "False") + empty_environment.setenv("SECRET_KEY", "A completely made up fake secret !@#$12234") + empty_environment.setenv("DATABASE_URL", "sqlite:///:memory:") + empty_environment.setenv("COGNITO_CLIENT_ID", "Totally fake client id") + empty_environment.setenv("COGNITO_CLIENT_SECRET", "Yet another fake secret12") + empty_environment.setenv("COGNITO_REGION", "Not even the region actually exists") + empty_environment.setenv("COGNITO_REDIRECT_URI", "Redirect your way back to writing more test cases") + empty_environment.setenv("COGNITO_USER_POOL_ID", "Water's warm. IDs are fake") + empty_environment.setenv("COGNITO_ACCESS_ID", "If you need fake access, use this ID") + empty_environment.setenv("COGNITO_ACCESS_KEY", "WARNING: This is a real-ly fake key 12345a6sdf") + return empty_environment \ No newline at end of file diff --git a/api-v2/tests/setup_utils.py b/api-v2/tests/setup_utils.py new file mode 100644 index 00000000..b780b07b --- /dev/null +++ b/api-v2/tests/setup_utils.py @@ -0,0 +1,85 @@ +from typing import List + +from openapi_server.repositories.service_provider_repository import HousingProviderRepository + +def populate_test_database(num_entries) -> List[int]: + ''' + Add num_entries rows to the test database and return the + created Ids. Fail test if any of the creation requests + fails. + + Note: Providers are created using SQLAlchemy commands, + not API requests. + ''' + ids = [] + db_helper = HousingProviderRepository() + for i in range(num_entries): + provider = db_helper.create_service_provider(f"Provider No {i}") + assert provider is not None, ( + f"Test setup failure. Failed to create Provider No {i}." + "Cannot perform endpoint test!") + ids.append(provider.id) + return ids + +def signup_user(app, email: str, password: str, firstName: str = None, + middleName: str = None, lastName: str = None) -> None: + if not firstName: firstName = "firstName" + if not lastName: lastName = "lastName" + if not middleName: middleName = "" + + signup_response = app.test_client().post( + '/api/auth/signup/host', + json = { + 'email': email, + 'password': password, + 'firstName': firstName, + 'middleName': middleName, + 'lastName': lastName + } + ) + # Currently the signup returns different response structures for auth + # errors and "Bad Request" errors. Ideally the structure of the response + # would always be the same where there is an error. + assert signup_response.status_code != 400, f"User factory failed to signup user: {signup_response.status}, {signup_response.text}" + assert signup_response.status_code == 200, f"User factory failed to signup user: {signup_response.json['message']}" + +def confirm_user(app, email: str) -> None: + confirm_response = app.boto_client.admin_confirm_sign_up( + UserPoolId=app.config["COGNITO_USER_POOL_ID"], + Username=email + ) + assert confirm_response['ResponseMetadata']['HTTPStatusCode'] == 200, f"User factory failed to confirm user" + +def create_user(test_client, email: str, password: str, firstName: str = None, + middleName: str = None, lastName: str = None) -> None: + ''' + Signup and confirm a new user. Fail the test if the + signup or confirm operation fails. + ''' + app = test_client.application + signup_user(app, email, password, firstName, middleName, lastName) + confirm_user(app, email) + +def signin_user(test_client, email: str, password: str) -> str: + ''' + Signin a user and return the JWT. Fail the test if the + signin operation fails. + ''' + response = test_client.post( + '/api/auth/signin', + json = { + 'email': email, + 'password': password + } + ) + assert response.status_code == 200, "Signin failed" + assert "token" in response.json, "Signin succeeded but no token provided" + return response.json['token'] + +def create_and_signin_user(test_client, email: str, password: str) -> (str, str): + ''' + Signup, confirm, and signin a new user. Return the JWT. + Fail the test if the signup, confirm, or signin operation fails. + ''' + create_user(test_client, email, password) + return signin_user(test_client, email, password) \ No newline at end of file diff --git a/api-v2/tests/test_alembic_migration.py b/api-v2/tests/test_alembic_migration.py new file mode 100644 index 00000000..500fa552 --- /dev/null +++ b/api-v2/tests/test_alembic_migration.py @@ -0,0 +1,34 @@ +from openapi_server.models.database import DataAccessLayer +from openapi_server.models.user_roles import UserRole +from openapi_server.repositories.user_repo import UserRepository + +# Importing these tests will register them within our test project +# These tests do an excellent job of detecting errors in the alembic +# downgrade and upgrade scripts. +from pytest_alembic.tests import test_single_head_revision +from pytest_alembic.tests import test_upgrade +from pytest_alembic.tests import test_model_definitions_match_ddl +from pytest_alembic.tests import test_up_down_consistency + +def test_db_session_version(empty_db_session): + ''' + Test that the pytest in-memory database is at the most + up-to-date alembic migration version. This will ensure all + the require database objects and pre-populated fields will + be available. + ''' + # Adding a new database revision will break this test case + + # Before updating to the new revision please add additional + # test cases below that check the integrity of your new migration + assert DataAccessLayer.revision_id() == 'cfc4e41b69d3' + +def test_user_roles_available(empty_db_session): + ''' + Test that all of the UserRole types are pre-populated within + the Role table after migrating the database to the HEAD revision. + ''' + user_repo = UserRepository(empty_db_session) + for role in UserRole: + db_role = user_repo._get_role(role) + assert db_role.name == role.value \ No newline at end of file diff --git a/api-v2/tests/test_authentication.py b/api-v2/tests/test_authentication.py new file mode 100644 index 00000000..b59a9dea --- /dev/null +++ b/api-v2/tests/test_authentication.py @@ -0,0 +1,329 @@ +import string +import re +import pytest +from werkzeug.http import parse_cookie +from openapi_server.models.database import DataAccessLayer, User + +from tests.setup_utils import create_user, create_and_signin_user + + +def strip_punctuation(text): + return text.translate(str.maketrans("", "", string.punctuation)) + +def test_signin_with_fake_credentials(client): + response = client.post( + '/api/auth/signin', + json = { + 'email': 'inbox928@placeholder.org', + 'password': '_pp#FXo;h$i~' + } + ) + assert response.status_code == 401 + +def test_signin_without_email_format(client): + ''' + Attempts to login using an email field that + does not follow the email format will return a + 400 error instead of 401. + ''' + response = client.post( + '/api/auth/signin', + json = { + 'email': 'notta_email', + 'password': '_pp#FXo;h$i~' + } + ) + + assert response.status_code == 400 + assert "is not a email" in strip_punctuation(response.json["detail"].lower()) + +@pytest.mark.parametrize('endpoint', ['/api/auth/signup/host','/api/auth/signup/coordinator']) +def test_signup_with_missing_fields(client, endpoint): + ''' + Attempts to login without all required fields returns + a bad request error. + ''' + BAD_SIGNUP_REQUESTS = [ + { + 'email': 'inbox928@placeholder.org', + 'password': 'Fakepass%^&7!asdf' + }, + { + 'email': 'inbox928@placeholder.org', + 'password': 'Fakepass%^&7!asdf', + 'lastName': 'Josh' + }, + { + 'email': 'inbox928@placeholder.org', + 'firstName': 'Josh', + 'lastName': 'Douglas' + }, + { + 'password': 'Fakepass%^&7!asdf', + 'firstName': 'Josh', + 'lastName': 'Douglas' + }, + { + } + ] + + for req in BAD_SIGNUP_REQUESTS: + response = client.post(endpoint, json = req) + assert response.status_code == 400, req + assert 'detail' in response.json and 'required property' in response.json['detail'], req + +def test_refresh_without_cookie(client): + ''' + Attempts to use the refresh endpoint without a session + cookie attached should return a 'cookie missing' + error instead of an authentication failure. + ''' + response = client.get( + 'api/auth/refresh' + ) + assert response.status_code == 400 + assert "missing cookie" in response.json['detail'].lower() + +def test_session_without_cookie(client): + ''' + Attempts to use the refresh endpoint without a session + cookie attached should return a 'cookie missing' + error instead of an authentication failure. + ''' + response = client.get( + 'api/auth/session', + headers={"Authorization": "Bearer fake_jwt_token_here"} + ) + assert response.status_code == 400 + assert re.search(r"missing cookie.*session", response.json['detail'], flags=re.IGNORECASE) + +def test_incorrect_JWT_fail_auth(client): + ''' + Attempts to use an incorrect JWT with the user endpoint returns + and authentication error. + ''' + response = client.get( + 'api/auth/user', + headers={"Authorization": "Bearer fake_jwt_token_here"} + ) + assert response.status_code == 401 + assert re.search(r"invalid.*token", response.json['message'], flags=re.IGNORECASE) + +def _signup_unconfirmed(signup_endpoint, client, is_mocking): + email = 'inbox928@placeholder.org' + password = 'Fakepass%^&7!asdf' + signup_response = client.post( + signup_endpoint, + json = { + 'email': email, + 'password': password, + "firstName": "valid name", + "lastName": "valid name" + } + ) + + assert signup_response.status_code == 200, "Signup attempt failed" + expect_user_confirmed = is_mocking + assert signup_response.json["UserConfirmed"] == expect_user_confirmed, ( + "When using the real AWS service newly signed up users should not be confirmed. " + "Mocked users, however, should be auto-confirmed for convenience.") + + signin_response = client.post( + '/api/auth/signin', + json = { + 'email': email, + 'password': password + } + ) + + + if expect_user_confirmed: + assert signin_response.status_code == 200, "Mocked users should be able to signin without confirmation." + assert "token" in signin_response.json, "Signin succeeded but no token provided" + else: + assert signin_response.status_code == 401, ( + "When using the real AWS service signin should fail since user is unconfirmed. ") + assert signin_response.json["code"] == "UserNotConfirmedException" + +def test_signup_unconfirmed_host(client, is_mocking): + ''' + Use the host signup endpoint to + test that unconfirmed accounts cannot be used to login to the API. + Mocked users are automatically confirmed. + ''' + _signup_unconfirmed('/api/auth/signup/host', client, is_mocking) + +def test_signup_unconfirmed_coordinator(client, is_mocking): + ''' + Use the coordinator signup endpoint to + test that unconfirmed accounts cannot be used to login to the API. + Mocked users are automatically confirmed. + ''' + _signup_unconfirmed('/api/auth/signup/coordinator', client, is_mocking) + +def test_signup_confirmed(client): + ''' + Test that confirmed accounts can be used to login to the API. + ''' + EMAIL = 'inbox928@placeholder.org' + PASSWORD = 'Fakepass%^&7!asdf' + create_user(client, EMAIL, PASSWORD) + + signin_response = client.post( + '/api/auth/signin', + json = { + 'email': EMAIL, + 'password': PASSWORD + } + ) + assert signin_response.status_code == 200, "Signup attempt failed" + assert "token" in signin_response.json, "Signin succeeded but no token provided" + assert len(signin_response.json["token"]) > 0 + +def test_weak_passwords_rejected(client): + ''' + Test that attempting to signup a new user with a password + that does not meet AWS Cognito password complexity requirements + returns a valid error. + ''' + email = 'inbox928@placeholder.org' + password = 'weakpa55' + signup_response = client.post( + '/api/auth/signup/host', + json = { + 'email': email, + 'password': password, + 'firstName': 'unqiue', + 'lastName': 'name' + } + ) + + assert signup_response.status_code == 400, "The weak password worked for signup!" + assert "password did not conform with policy" in signup_response.json["message"].lower() + +# TODO: This test is currently disabled because the token returned from moto is different from the token returned from the real AWS service. +@pytest.mark.skip(reason="There is a bug involving the contents of the token being returned from moto being different from the token returned from the real AWS service.") +def test_basic_auth_flow(client): + ''' + Create a new user, confirm it, and login using the + /signin endpoint, and use the returned JWT to access + a protected endpoint. + ''' + EMAIL = 'inbox928@placeholder.org' + PASSWORD = 'Fake4!@#$2589FFF' + FIRST_NAME = "PNAU" + LAST_NAME = "Hyperbolic" + create_user(client, EMAIL, PASSWORD, firstName=FIRST_NAME, lastName=LAST_NAME) + + response = client.post( + '/api/auth/signin', + json = { + 'email': EMAIL, + 'password': PASSWORD + } + ) + + assert response.status_code == 200, "Signin failed" + assert 'token' in response.json, 'Signin succeeded but token field missing from response' + jwt = response.json['token'] + assert jwt is not None, 'Signin succeeded but returned empty jwt' + assert len(jwt) > 0 + + response = client.get( + 'api/auth/user', + headers={"Authorization": f"Bearer {jwt}"} + ) + + assert response.status_code == 200, '/user authentication failed' + assert 'user' in response.json + assert 'email' in response.json['user'] + assert response.json['user']['email'] == EMAIL + assert response.json['user']['firstName'] == FIRST_NAME + assert response.json['user']['middleName'] == '' + assert response.json['user']['lastName'] == LAST_NAME + +def test_signin_returns_session_cookie(client): + ''' + Test that the /signin endpoint returns a session cookie. + The session cookie stores the refresh token. + ''' + EMAIL = 'inbox928@placeholder.org' + PASSWORD = 'Fake4!@#$2589FFF' + create_user(client, EMAIL, PASSWORD) + response = client.post( + '/api/auth/signin', + json = { + 'email': EMAIL, + 'password': PASSWORD + } + ) + + assert response.status_code == 200, "Signin failed" + all_cookies = map(parse_cookie, response.headers.getlist("Set-Cookie")) + session_cookie_filter = filter(lambda cookie: "session" in cookie, all_cookies) + session_cookie = next(session_cookie_filter) + assert len(session_cookie["session"]) > 0, "Session cookie is empty" + with pytest.raises(StopIteration): + # Only one session cookie should be available + next(session_cookie_filter) + +def test_refresh_endpoint(client): + ''' + Test refreshing a JWT using the /refresh endpoint. + ''' + EMAIL = 'inbox928@placeholder.org' + PASSWORD = 'Fake4!@#$2589FFF' + create_and_signin_user(client, EMAIL, PASSWORD) + + # The test_client automatically attaches the session cookie to the request + # The session cookie stores the refresh token. + response = client.get( + 'api/auth/refresh', + ) + + assert response.status_code == 200, f"refresh failed: {response.json}" + assert 'token' in response.json, 'refresh succeeded but token field missing from response' + +def test_session_endpoint(client): + ''' + Test refreshing a JWT using the /session endpoint. + ''' + EMAIL = 'inbox928@placeholder.org' + PASSWORD = 'Fake4!@#$2589FFF' + jwt = create_and_signin_user(client, EMAIL, PASSWORD) + + # The test_client automatically attaches the session cookie to the request + # The session cookie stores the refresh token. + response = client.get( + 'api/auth/session', + headers={"Authorization": f"Bearer {jwt}"} + ) + + assert response.status_code == 200, f"session failed: {response.json}" + assert 'token' in response.json, 'session succeeded but token field missing from response' + +def test_user_signup_rollback(app): + """ Verify that a failed signup with cognito + reverts the local DB entry of the user's email.""" + + + rollback_email = 'test_user_signup_rollback@fake.com' + signup_response = app.test_client().post( + '/api/auth/signup/host', + json = { + 'email': rollback_email, + 'password': 'lol', + 'firstName': 'firstname', + 'lastName': 'lastname' + } + ) + assert signup_response.status_code == 400 + with pytest.raises(app.boto_client.exceptions.UserNotFoundException): + app.boto_client.admin_delete_user( + UserPoolId=app.config['COGNITO_USER_POOL_ID'], + Username=rollback_email + ) + with DataAccessLayer.session() as sess: + rolledback_user = sess.query(User).filter_by(email=rollback_email).first() + # This assertion will fail on `main` because no rollback is happening + assert rolledback_user is None \ No newline at end of file diff --git a/api-v2/tests/test_configs.py b/api-v2/tests/test_configs.py new file mode 100644 index 00000000..f2ad286c --- /dev/null +++ b/api-v2/tests/test_configs.py @@ -0,0 +1,207 @@ +import pytest +from pytest import MonkeyPatch +from sqlalchemy.engine import make_url + +from openapi_server.app import create_app, HUUFlaskApp, HUUConnexionApp +from openapi_server.configs.production import ProductionHUUConfig +from openapi_server.configs.development import DevelopmentHUUConfig +from openapi_server.models.database import DataAccessLayer + +def create_dev_app() -> HUUConnexionApp: + ''' + Create our app without reading the .env file. The DevelopmentHUUConfig + will read values from the environment, so monkey patching can be used + to set the values. + ''' + return create_app(DevelopmentHUUConfig()) + +def create_prod_app() -> HUUConnexionApp: + ''' + Create the production app without reading the .env file. + Fake production secrets must be set using monkey patching, otherwise + the production configuration will raise errors during its + internal validation. + ''' + return create_app(ProductionHUUConfig()) + +def test_create_app_default_dev(empty_db_session, empty_environment: MonkeyPatch): + ''' + Test that create_app with development config creates a Flask app with + a default development configuration, available as app.config. + ''' + connexion_app = create_app(DevelopmentHUUConfig()) + config = connexion_app.app.config + + assert "DATABASE_URL" in config + assert "PORT" in config + assert "HOST" in config + assert "TESTING" in config + assert "SECRET_KEY" in config + assert "ROOT_URL" in config + + for key in config: + assert "cognito" not in key.lower() + + assert make_url(config["DATABASE_URL"]) is not None + assert isinstance(config["PORT"], int) + assert config["PORT"] > 0 and config["PORT"] <= 65535 + assert config["ROOT_URL"] + +def test_flask_app_override(empty_db_session, empty_environment: MonkeyPatch): + ''' + Test that the create_app properly overrides the connexion app constructor + to return our custom application type that contains global configuration. + ''' + connexion_app = create_app(DevelopmentHUUConfig()) + assert isinstance(connexion_app, HUUConnexionApp) + assert isinstance(connexion_app.app, HUUFlaskApp) + +def test_missing_secret_throws_err(fake_prod_env: MonkeyPatch): + ''' + Test that failing to set a configuration field that is marked as a + secret field throws an error. + ''' + fake_prod_env.delenv("SECRET_KEY") + with pytest.raises(ValueError): + create_app(ProductionHUUConfig()) + +def test_hardcoding_secret_throws_err(fake_prod_env: MonkeyPatch): + def check_with_hardcoded_secret(**kwargs): + with pytest.raises(ValueError): + ProductionHUUConfig(**kwargs) + + check_with_hardcoded_secret(SECRET_KEY="My Hard Coded Fake Secret") + check_with_hardcoded_secret(COGNITO_CLIENT_ID="My Hard Coded Fake Secret") + check_with_hardcoded_secret(COGNITO_CLIENT_SECRET="My Hard Coded Fake Secret") + check_with_hardcoded_secret(COGNITO_REGION="My Hard Coded Fake Secret") + check_with_hardcoded_secret(COGNITO_REDIRECT_URI="My Hard Coded Fake Secret") + check_with_hardcoded_secret(COGNITO_USER_POOL_ID="My Hard Coded Fake Secret") + check_with_hardcoded_secret(COGNITO_ACCESS_ID="My Hard Coded Fake Secret") + check_with_hardcoded_secret(COGNITO_ACCESS_KEY="My Hard Coded Fake Secret") + +def test_config_reads_from_env(empty_db_session, empty_environment: MonkeyPatch): + ''' + Test that hard-coded values are overwritten using values from the system + environment variables. + ''' + env_port = 9000 + hardcoded_port = 7777 + env_DEBUG = False + hardcoded_DEBUG = True + env_secret = "Extremely Cryptographically Insecure Key" + hardcoded_secret = "Equally Insecure Key" + + empty_environment.setenv("FLASK_DEBUG", str(env_DEBUG)) + empty_environment.setenv("PORT", str(env_port)) + empty_environment.setenv("SECRET_KEY", env_secret) + + config = DevelopmentHUUConfig( + FLASK_DEBUG=hardcoded_DEBUG, + PORT=hardcoded_port, + SECRET_KEY=hardcoded_secret + ) + + assert config.FLASK_DEBUG == env_DEBUG + assert config.PORT == env_port + assert config.SECRET_KEY == env_secret + + app = create_app(config).app + app_config = app.config + + assert app_config["DEBUG"] == env_DEBUG + assert app_config["PORT"] == env_port + assert app_config["SECRET_KEY"] == env_secret + assert app.is_debug_app == env_DEBUG + +def test_invalid_port_throws(empty_environment: MonkeyPatch): + empty_environment.setenv("PORT", "-1") + with pytest.raises(ValueError): + create_dev_app() + empty_environment.setenv("PORT", "66000") + with pytest.raises(ValueError): + create_dev_app() + +def test_env_var_bool_parsing(empty_db_session, empty_environment: MonkeyPatch): + def check_bool_parsing(actual: str, expected: bool, msg: str): + empty_environment.setenv("FLASK_DEBUG", actual) + assert create_dev_app().app.config["FLASK_DEBUG"] == expected, msg + + check_bool_parsing("True", True, "match case") + check_bool_parsing("true", True, "lower case") + check_bool_parsing("1", True, "one") + check_bool_parsing("tRuE", True, "mixed case") + check_bool_parsing(" True ", True, "extra padding") + + check_bool_parsing("False", False, "match case") + check_bool_parsing("false", False, "lower case") + check_bool_parsing("0", False, "zero") + check_bool_parsing("fAlSe", False, "mixed case") + check_bool_parsing(" False ", False, "extra padding") + + empty_environment.setenv("FLASK_DEBUG", "") + with pytest.raises(ValueError): + create_dev_app() + +def test_database_url_config(empty_db_session, empty_environment: MonkeyPatch): + ''' + Test that setting the DATABASE_URL initializes the database + using the specified URL. + ''' + empty_environment.setenv("DATABASE_URL", "sqlite:///:memory:") + create_dev_app() + db_engine = DataAccessLayer._engine + assert db_engine is not None + assert db_engine.url.database == ":memory:" + +def test_root_url_required(empty_environment: MonkeyPatch): + with pytest.raises(ValueError, match="ROOT_URL"): + create_app(DevelopmentHUUConfig( + ROOT_URL="" + )) + + with pytest.raises(ValueError, match="ROOT_URL"): + create_app(DevelopmentHUUConfig( + ROOT_URL=None + )) + + empty_environment.setenv("ROOT_URL", "") + with pytest.raises(ValueError, match="ROOT_URL"): + create_app(DevelopmentHUUConfig()) + +def test_prod_app_disables_development(empty_db_session, fake_prod_env: MonkeyPatch): + def check_development_disabled(enable_testing: bool, enable_debug: bool): + fake_prod_env.setenv("FLASK_DEBUG", str(enable_debug)) + fake_prod_env.setenv("TESTING", str(enable_testing)) + if enable_debug or enable_testing: + with pytest.raises(ValueError): + create_prod_app() + else: + create_prod_app() + + check_development_disabled(True, True) + check_development_disabled(True, False) + check_development_disabled(False, True) + check_development_disabled(False, False) + +def test_prod_secret_key_requirements(empty_db_session, fake_prod_env: MonkeyPatch): + def check_insecure_secret(secret: str): + fake_prod_env.setenv("SECRET_KEY", secret) + with pytest.raises(ValueError): + create_prod_app() + def check_secure_secret(secret: str): + fake_prod_env.setenv("SECRET_KEY", secret) + create_prod_app() + + check_insecure_secret("hi") + check_insecure_secret("") + check_insecure_secret("aaaaaaaaaaaaaaaaaaaaaaaaaa") + check_insecure_secret("asdfasdfasdfasdfasdfasdfa") + check_insecure_secret("12312132132132132132132132") + check_insecure_secret("123456789asdfqwe") + check_insecure_secret("123456789ASDFQWERTG") + + check_secure_secret("3-nTeYX6Zi2T6XlvN2m93cNdDHSB6NC0") + check_secure_secret("QiWYHC1St0pPOEXY1ChiwKrYLJQr9yWH") + check_secure_secret("wd-4FBhuf2TYP4T6FrAxaCvRLItXlIK5") + check_secure_secret("omMTDTPUXTcizyka2AtOg570XqWFlFfP") + check_secure_secret("iEIGSrC6jSh6QdLNib0io8sz_60lZ_BE") \ No newline at end of file diff --git a/api-v2/tests/test_forms_repo.py b/api-v2/tests/test_forms_repo.py new file mode 100644 index 00000000..14f665a1 --- /dev/null +++ b/api-v2/tests/test_forms_repo.py @@ -0,0 +1,127 @@ +from types import MappingProxyType + +from openapi_server.repositories.forms import FormsRepository +from openapi_server.repositories.user_repo import UserRepository, UserRole + +TEST_FORM_READ_ONLY = MappingProxyType({ + "title": "Employee Onboarding", + "description": "Collect necessary employee data.", + "field_groups": [ + { + "title": "Personal Details", + "description": "Please enter your personal details.", + "fields": [ + { + "ref": "position", + "properties": { + "description": "Position in the company", + "field_type": "dropdown", + "choices": ['Manager', 'Developer', 'Designer'], + }, + "validations": { + "required": True, + "max_length": 12 + } + }, + { + "ref": "service_length", + "properties": { + "description": "Years in the company", + "field_type": "number", + "choices": None, + }, + "validations": { + "required": False, + "max_length": None + } + } + ] + }, + { + "title": "Second Group", + "description": "A second field group.", + "fields": [ + { + "ref": "start date", + "properties": { + "description": "Start date", + "field_type": "date", + "choices": "11-22-2005", + }, + "validations": { + "required": True, + "max_length": 12 + } + } + ] + } + ] +}) + +def assert_form_equal(actual_form: dict, expected_form: dict): + ''' + Do a deep equality check of a form, excluding dynamically + assigned values like timestamps and primary key ids. + ''' + actual_copy = actual_form.copy() + del actual_copy['created_at'] + for group in actual_copy['field_groups']: + del group['form'] + for field in group['fields']: + del field['field_id'] + del field['group'] + + assert actual_copy == expected_form + +def test_add_form_valid_json(empty_db_session_provider): + form_json = dict(TEST_FORM_READ_ONLY) + + form_repo = FormsRepository(empty_db_session_provider.session()) + created_form_id = form_repo.add_form(form_json) + retrieved_form = form_repo.get_form_json(created_form_id) + + assert_form_equal(retrieved_form, form_json) + +def test_add_get_responses(empty_db_session_provider): + with empty_db_session_provider.session() as session: + user_repo = UserRepository(session) + form_repo = FormsRepository(session) + + user_repo.add_user('fake@email.com', UserRole.COORDINATOR, 'firstname') + user_id = user_repo.get_user_id('fake@email.com') + created_form_id = form_repo.add_form(TEST_FORM_READ_ONLY) + retrieved_form = form_repo.get_form_json(created_form_id) + + def _get_field_id(lcl_form, ref): + for group in lcl_form['field_groups']: + for field in group['fields']: + if field['ref'] == ref: + return int(field['field_id']) + raise ValueError(f'ref {ref} not found in test form') + + expected_responses = [ + { + "user_id": user_id, + "field_id": _get_field_id(retrieved_form, 'position'), + "answer_text": "Designer" + }, + { + "user_id": user_id, + "field_id": _get_field_id(retrieved_form, 'service_length'), + "answer_text": "5" + }, + { + "user_id": user_id, + "field_id": _get_field_id(retrieved_form, 'start date'), + "answer_text": '2024-05-19' + } + ] + form_repo.add_user_responses(user_id, expected_responses) + + retrieved_answers = form_repo.get_user_responses(user_id, created_form_id) + + assert len(retrieved_answers) == 3 + for expected, actual in zip(expected_responses, retrieved_answers): + assert expected['answer_text'] == actual['answer_text'] + assert expected['user_id'] == actual['user']['id'] + assert expected['field_id'] == actual['field']['field_id'] \ No newline at end of file diff --git a/api-v2/tests/test_forms_schema.py b/api-v2/tests/test_forms_schema.py new file mode 100644 index 00000000..66aef7b3 --- /dev/null +++ b/api-v2/tests/test_forms_schema.py @@ -0,0 +1,238 @@ +from types import MappingProxyType +import pytest +from marshmallow import ValidationError + +from openapi_server.models.schema import ( + form_schema, + FieldSchema, + FieldValidationsSchema, + FieldPropertiesSchema, + FieldGroupSchema +) + +VALID_FORM_JSON = MappingProxyType({ + "title": "Employee Onboarding", + "description": "Collect necessary employee data.", + "field_groups": [ + { + "title": "Personal Details", + "description": "Please enter your personal details.", + "fields": [ + { + "ref": "position", + "properties": { + "description": "Position in the company", + "field_type": "dropdown", + "choices": ['Manager', 'Developer', 'Designer'], + }, + "validations": { + "required": True, + "max_length": 12 + } + }, + { + "ref": "service_length", + "properties": { + "description": "Years in the company", + "field_type": "number", + "choices": None, + }, + "validations": { + "required": False, + "max_length": None + } + } + ] + }, + { + "title": "Second Group", + "description": "A second field group.", + "fields": [ + { + "ref": "start date", + "properties": { + "description": "Start date", + "field_type": "date", + "choices": "11-22-2005", + }, + "validations": { + "required": True, + "max_length": 12 + } + } + ] + } + ] + } + ) + + +def test_serialize_form_no_questions(empty_db_session): + form_json = {"title": "mytitle", "description": "mydesc", "field_groups": []} + form = form_schema.load(form_json, session=empty_db_session) + + assert "mytitle" == form.title + assert "mydesc" == form.description + assert list() == form.field_groups + +def test_deserialize_field_validations(empty_db_session): + validation_json = { + "required": True, + "max_length": None + } + validation = FieldValidationsSchema().load(validation_json, session=empty_db_session) + assert validation.required + assert validation.max_length is None + +def test_deserialize_field_property(empty_db_session): + property_json = { + "description": "sample desc", + "field_type": "long_text", + "choices": ['one', 'two','three'] + } + property = FieldPropertiesSchema().load(property_json, session=empty_db_session) + assert property_json["field_type"] == property.field_type + assert property_json["description"] == property.description + +def test_deserialize_field(empty_db_session): + single_field_json = { + "ref": "position", + "properties": { + "description": "Position in the company", + "field_type": "dropdown", + "choices": ['Manager', 'Developer', 'Designer'], + }, + "validations": { + "required": True, + "max_length": 12 + } + } + field = FieldSchema().load(single_field_json, session=empty_db_session) + assert single_field_json["ref"] == field.ref + assert single_field_json["properties"]["description"] == field.properties.description + assert single_field_json["properties"]["choices"] == field.properties.choices + assert single_field_json["validations"]["max_length"] == field.validations.max_length + assert field.validations.required + +def test_deserialize_fields(empty_db_session): + multiple_fields = [ + { + "ref": "position", + "properties": { + "description": "Position in the company", + "field_type": "dropdown", + "choices": ['Manager', 'Developer', 'Designer'], + }, + "validations": { + "required": True, + "max_length": 12 + } + }, + { + "ref": "service_length", + "properties": { + "description": "Years in the company", + "field_type": "number", + "choices": None, + }, + "validations": { + "required": False, + "max_length": None + } + } + ] + fields = FieldSchema(many=True).load(multiple_fields, session=empty_db_session) + assert 2 == len(fields) + for expected, actual in zip(multiple_fields, fields): + assert expected['properties']['description'] == actual.properties.description + assert expected['properties']['field_type'] == actual.properties.field_type + +def test_deserialize_field_group(empty_db_session): + group_json = [ + { + "title": "Personal Details", + "description": "Please enter your personal details.", + "fields": [ + { + "ref": "position", + "properties": { + "description": "Position in the company", + "field_type": "dropdown", + "choices": ['Manager', 'Developer', 'Designer'], + }, + "validations": { + "required": True, + "max_length": 12 + } + }, + { + "ref": "service_length", + "properties": { + "description": "Years in the company", + "field_type": "number", + "choices": None, + }, + "validations": { + "required": False, + "max_length": None + } + } + ] + }, + { + "title": "Second Group", + "description": "A second field group.", + "fields": [ + { + "ref": "start date", + "properties": { + "description": "Start date", + "field_type": "date", + "choices": "11-22-2005", + }, + "validations": { + "required": True, + "max_length": 12 + } + } + ] + } + ] + groups = FieldGroupSchema(many=True).load(group_json, session=empty_db_session) + assert len(group_json) == len(groups) + for expected_group, actual_group in zip(group_json, groups): + assert expected_group['title'] == actual_group.title + assert expected_group['description'] == actual_group.description + for expected_fields, actual_fields in zip(expected_group['fields'], actual_group.fields): + assert expected_fields['ref'] == actual_fields.ref + assert expected_fields['validations']['required'] == actual_fields.validations.required + assert expected_fields['validations']['max_length'] == actual_fields.validations.max_length + assert expected_fields['properties']['description'] == actual_fields.properties.description + assert expected_fields['properties']['field_type'] == actual_fields.properties.field_type + assert expected_fields['properties']['choices'] == actual_fields.properties.choices + +def test_deserialize_form_happypath(empty_db_session): + form_json = dict(VALID_FORM_JSON) + form = form_schema.load(form_json, session=empty_db_session) + assert form_json["title"] == form.title + assert form_json["description"] == form.description + assert 2 == len(form.field_groups) + for expected, actual in zip(form_json["field_groups"], form.field_groups): + assert expected["title"] == actual.title + assert expected["description"] == actual.description + assert len(expected["fields"]) == len(actual.fields) + + +def test_deserialize_form_extra_key(empty_db_session): + invalid_form_json = dict(VALID_FORM_JSON) + invalid_form_json['extra_key'] = 'extra_value' + + with pytest.raises(ValidationError, match=r"Unknown field"): + form_schema.load(invalid_form_json, session=empty_db_session) + +def test_deserialize_form_missing_key(empty_db_session): + invalid_form_json = dict(VALID_FORM_JSON) + del invalid_form_json['title'] + + with pytest.raises(ValidationError, match=r"Missing data for required field"): + form_schema.load(invalid_form_json, session=empty_db_session) \ No newline at end of file diff --git a/api-v2/tests/test_host_controller.py b/api-v2/tests/test_host_controller.py new file mode 100644 index 00000000..bca493f7 --- /dev/null +++ b/api-v2/tests/test_host_controller.py @@ -0,0 +1,66 @@ +from openapi_server.models.database import User, DataAccessLayer +from openapi_server.repositories.user_repo import UserRepository +from openapi_server.models.user_roles import UserRole + +def test_signup_host(client): + """ + Test creating a new host using a simulated post request. Verify that the + response is correct, and that the app database was properly updated. + """ + + NEW_HOST = { + "email" : "test@email.com", + "password": "Test!@123", + "firstName": "Josh", + "middleName": "Ray", + "lastName": "Douglas" + } + response = client.post( + '/api/auth/signup/host', + json=NEW_HOST) + + assert response.status_code == 200, f'Response body is: {response.json}' + + # Make sure the database was updated to persist the values + with DataAccessLayer.session() as session: + user_repo = UserRepository(session) + test_host = user_repo.get_user(NEW_HOST['email']) + assert test_host is not None + assert test_host.email == NEW_HOST['email'] + assert test_host.firstName == NEW_HOST['firstName'] + assert test_host.middleName == NEW_HOST['middleName'] + assert test_host.lastName == NEW_HOST['lastName'] + assert test_host.role.name == UserRole.HOST.value + +def test_get_hosts(client): + """ + Test that get_hosts returns all hosts available in the database. The endpoint + should properly filter out all other user roles. + """ + # Arrange + with DataAccessLayer.session() as session: + user_repo = UserRepository(session) + user_repo.add_user(email="host0@email.com", role=UserRole.HOST, firstName="host0", middleName = None, lastName="host_last0") + user_repo.add_user(email="host1@email.com", role=UserRole.HOST, firstName="host1", middleName = None, lastName="host_last1") + user_repo.add_user(email="host2@email.com", role=UserRole.HOST, firstName="host2", middleName = None, lastName="host_last2") + user_repo.add_user(email="guest1@email.com", role=UserRole.GUEST, firstName="guest0", middleName = None, lastName="guest_last0") + user_repo.add_user(email="Admin2@email.com", role=UserRole.ADMIN, firstName="Admin0", middleName = None, lastName="cdmin_last0") + user_repo.add_user(email="Coordinator3@email.com", role=UserRole.COORDINATOR, firstName="coodinator0", middleName = None, lastName="coordinator_last0") + + # Act + response = client.get('/api/host') + + # Assert + assert response.status_code == 200, f'Response body is: {response.json}' + assert isinstance(response.json, list) + assert len(response.json) == 3 + host_emails_set = set() + for host in response.json: + assert 'host' in host["email"] + assert 'host' in host["firstName"] + assert 'host_last' in host["lastName"] + assert host["role"]["name"] == UserRole.HOST.value + assert host["middleName"] == None + host_emails_set.add(host["email"]) + + assert len(host_emails_set) == 3, "Duplicate hosts were returned!" \ No newline at end of file diff --git a/api-v2/tests/test_mocking.py b/api-v2/tests/test_mocking.py new file mode 100644 index 00000000..ce7a5a70 --- /dev/null +++ b/api-v2/tests/test_mocking.py @@ -0,0 +1,208 @@ +import pytest +import json +import requests +from pathlib import Path + +from openapi_server.configs.mock_aws import AWSTemporaryUserpool, AWSMockService +from tests.setup_utils import signup_user, signin_user + +def get_user_pools(boto_client): + """Helper function to count the number of user pools.""" + MAXRESULT = 60 + response = boto_client.list_user_pools(MaxResults=60) + result = response['UserPools'] + assert len(result) < MAXRESULT, ("Number of userpools exceeds 60. " + + "To get an accurate count delete user pools or implement pagination.") + return result + +def delete_temporary_userpools(app): + ''' + Delete all of the AWS Cognito temporary userpools, except + for the one in use by the current app. + + Please proceed with caution before using or modifying + this method because production userpools can be deleted + if the name is modified (unless delete protection is in place). + ''' + cur_app_poolid = app.config["COGNITO_USER_POOL_ID"] + for pool in get_user_pools(app.boto_client): + if (AWSTemporaryUserpool.is_temp_pool(pool["Name"]) + and pool["Id"] != cur_app_poolid): + app.boto_client.delete_user_pool( + UserPoolId=pool["Id"] + ) + +def count_user_pools(boto_client): + return len(get_user_pools(boto_client)) + +def tmp_userpool_count(boto_client): + user_pools = get_user_pools(boto_client) + return sum(AWSTemporaryUserpool.is_temp_pool(pool["Name"]) for pool in user_pools) + +def count_users_in_userpool(app): + user_count = 0 + pagination_token = None + userpool_id = app.config["COGNITO_USER_POOL_ID"] + while True: + if pagination_token: + response = app.boto_client.list_users(UserPoolId=userpool_id, PaginationToken=pagination_token) + else: + response = app.boto_client.list_users(UserPoolId=userpool_id) + + user_count += len(response['Users']) + + pagination_token = response.get('PaginationToken') + if not pagination_token: + break + + return user_count + +def test_AWSTemporaryUserpool_cleanup(app): + ''' + Test the temporary userpool is deleted when + destroy() is called. + ''' + initial_count = count_user_pools(app.boto_client) + + # Using the context manager to automatically create and destroy the user pool + with AWSTemporaryUserpool(app): + assert count_user_pools(app.boto_client) == (initial_count + 1), "Userpool was not created!" + + # After exiting the block, the user pool should be destroyed + final_count = count_user_pools(app.boto_client) + + assert initial_count == final_count, "User pool was not properly deleted" + +def test_AWSTemporaryUserpool_is_temp_pool_strs(): + ''' + Test that the AWSTemporaryUserpool.is_temp_pool strictly + matches the temporary user pool naming format. These tests + safegaurd against accidentally removing a production user + pool in the event of a resource leak. + ''' + istmp = AWSTemporaryUserpool.is_temp_pool + assert istmp("TestUserPoola809bcbf-800a-4da0-870f-a1205e8bf40a"), "t1" + assert istmp("TestUserPoola908dc0b-afb6-4f8a-aa50-96ca9b813b05"), "t2" + assert istmp("TestUserPoolca0ddbf1-53fe-4bdc-bbf7-262e97d32399"), "t3" + assert istmp("TestUserPool6eaa346c-3b55-456d-86b5-2f48ffee0b9a"), "t4" + assert istmp("TestUserPoolced3909b-36b9-4479-8584-087cfe8d7479"), "t5" + assert istmp("TestUserPool46eec7e1-10fb-46fe-8303-46310c63406c"), "t6" + + assert not istmp(""), "f1" + assert not istmp("Home Unite Us"), "f2" + assert not istmp("testUserPoola809bcbf-800a-4da0-870f-a1205e8bf40a"), "f3" + assert not istmp("estUserPoola809bcbf-800a-4da0-870f-a1205e8bf40a"), "f4" + assert not istmp("TestuserPoola809bcbf-800a-4da0-870f-a1205e8bf40a"), "f5" + assert not istmp("TestUserPool a809bcbf-800a-4da0-870f-a1205e8bf40a"), "f6" + assert not istmp("TestUserPool_a809bcbf-800a-4da0-870f-a1205e8bf40a"), "f7" + assert not istmp("TestUserPoola809bcbf_800a-4da0-870f-a1205e8bf40a"), "f8" + assert not istmp("TestUserPoola809bcbf-800ab-4da0-870f-a1205e8bf40a"), "f9" + assert not istmp("TestUserPoola809bcbf-800a-4da01-870f-a1205e8bf40a"), "f9" + assert not istmp("TestUserPoola809bcbf-800a-4da0a-870f-a1205e8bf40a"), "f10" + assert not istmp("TestUserPoola809bcbf-800a-4da0-870f1-a1205e8bf40a"), "f11" + assert not istmp("TestUserPoola809bcbf-800-4da0-870f-a1205e8bf40a"), "f12" + assert not istmp("TestUserPoola809bcbf-800a-4da0-870f-a1205e8bf40a1"), "f13" + assert not istmp("TestUserPoola809bcbf-800a-4da0-870f-a1205e8bf40aa"), "f14" + assert not istmp("TestUserPoola809bcbf-800a-4da0-870f-a1205e8bf40"), "f15" + + with pytest.raises(TypeError): + istmp(None) + +def test_AWSTemporaryUserpool_is_temp_pool_real(app): + ''' + Test that is_temp_pool properly identifies newly + created temporary userpools. + ''' + def _poolset(userpools: dict) -> set: + return set((pool["Name"] for pool in userpools)) + + existing_pools = _poolset(get_user_pools(app.boto_client)) + with AWSTemporaryUserpool(app): + new_pool = _poolset(get_user_pools(app.boto_client)).difference(existing_pools) + assert len(new_pool) == 1, "More than one temp user pool was created!" + assert AWSTemporaryUserpool.is_temp_pool(new_pool.pop()) + +def test_AWSTemporaryUserpool_count(app): + ''' + Test that the application and/or test suites do not + have a resource leak that is polluting the AWS Cognito + user pools with temporary user pools. We'll set the limit + to 30 userpools. If this number is exceeded then the developer + needs to delete the userpools. + ''' + # This function can be used to cleanup leaked + # temporary tools. Leave it commented out before + # pushing, however, to make sure we can detect + # resource leaks when they occur. + #delete_temporary_userpools(app) + assert tmp_userpool_count(app.boto_client) <= 30, ("AWS Cognito has a large number of temporary " + "userpools. We may have a userpool resource leak. " + "Delete the unused pools and search for a resource leak.") + +def test_AWSMockService(app, is_mocking): + ''' + Test that starting the AWSMocking service properly + activates and deactivates the moto mocking service. + + Ensure that calls to AWSCognito are properly + intercepted. + ''' + # Moto uses regex patterns to intercept all cognito public key requests + moto_fake_key_url = "https://cognito-idp.us-west-2.amazonaws.com/somekey/.well-known/jwks.json" + if is_mocking: + import moto + moto_dir = Path(moto.__file__).parent + jwks_file_path = moto_dir / "cognitoidp" / "resources" / "jwks-public.json" + assert jwks_file_path.is_file(), "Moto public key not found. Can't proceed with test" + with open(jwks_file_path, 'r') as file: + moto_jwks = json.load(file) + + actual_jwks = get_json_from_url(moto_fake_key_url) + assert actual_jwks is not None + assert actual_jwks == moto_jwks, ("The mocking service does not appear to have been started correctly " + "moto should intercept calls to AWS cognito jwks.json and return the " + "fake public key stored in the moto resources folder.") + else: + # If mocking is not enabled then our fake url request will fail + with pytest.raises(requests.exceptions.HTTPError): + get_json_from_url(moto_fake_key_url) + +def get_json_from_url(url): + response = requests.get(url) + # Raises an HTTPError if the response was an unsuccessful status code + response.raise_for_status() + return response.json() + +def test_signup_confirmation(client, is_mocking): + ''' + Test that the signup confirmation works with any confirmation + code when authentication mocking is enabled. + + When mocking is disabled a real confirmation code will be + required, so the confirmation should fail. + ''' + email = 'nottaemail@gmail.com' + signup_user(client.application, email, 'Passw0rd!') + + response = client.post( + '/api/auth/confirm', + json = { + 'email': email, + 'code': 'fakeCode' + } + ) + + if is_mocking: + assert response.status_code == 200 + else: + assert response.status_code == 401 + assert "invalid code" in response.json["message"].lower() + +def test_mock_config_includes_test_users(client, is_mocking): + ''' + Test that the mock configuration includes test users. + ''' + if not is_mocking: + pytest.skip("Test only applies to mock configurations") + for user in AWSMockService.TEST_USERS: + signin_user(client, user["email"], user["password"]) \ No newline at end of file diff --git a/api-v2/tests/test_schema.py b/api-v2/tests/test_schema.py new file mode 100644 index 00000000..3fb2d01f --- /dev/null +++ b/api-v2/tests/test_schema.py @@ -0,0 +1,147 @@ +import json +import pytest +from marshmallow.exceptions import ValidationError + +from openapi_server.models.schema import user_schema, users_schema, HousingProgramServiceProviderSchema +from openapi_server.models.database import User, Role +from openapi_server.models.user_roles import UserRole +from openapi_server.repositories.user_repo import UserRepository + +def test_housing_program_service_provider(): + test_housing_program_service_provide_string = "{\"id\": 5, \"provider_name\": \"test\"}" + + housing_program_service_provider = HousingProgramServiceProviderSchema(many=True) + + housing_program_service_provider.fields["id"] = 5 + housing_program_service_provider.fields["provider_name"] = "test" + + assert housing_program_service_provider is not None, "HousingProgramServiceProviderSchema is null" + + assert housing_program_service_provider.fields["id"] == 5, "HousingProgramServiceProviderSchema id field did not match what was input" + + assert housing_program_service_provider.fields["provider_name"] == "test", "HousingProgramServiceProviderSchema provider_name field did not match what was input" + + jsonresult = json.dumps(housing_program_service_provider.fields) + + assert jsonresult == test_housing_program_service_provide_string, "HousingProgramServiceProvider json did not match test string " + +def test_deserialize_host(empty_db_session): + ''' + Verify that the host schema can be deserialized from json. + ''' + json_from_request = '{"role": {"name": "Host"}, "email": "realemail@fakedomain.com", "firstName": "first", "middleName": "middle", "lastName": "last"}' + host = user_schema.load(json.loads(json_from_request), session=empty_db_session) + + assert host is not None, "Host is null" + assert isinstance(host, User), "host is not of type User" + assert host.firstName == 'first' + assert host.middleName == 'middle' + assert host.lastName == 'last' + assert host.email == 'realemail@fakedomain.com' + assert isinstance(host.role, Role) + assert host.role.name == 'Host' + +def test_serialize_host(empty_db_session): + ''' + Verify that the host schema can be serialized to json. + ''' + user_repo = UserRepository(empty_db_session) + new_host = user_repo.add_user("realemail@fakedomain.com", UserRole.HOST, "first", "middle", "last") + json_from_host = user_schema.dump(new_host) + assert json_from_host is not None, "Json from host is null" + assert 'name' in json_from_host["role"] + assert json_from_host['role']['name'] == 'Host' + assert json_from_host['firstName'] == 'first' + assert json_from_host['middleName'] == 'middle' + assert json_from_host['lastName'] == 'last' + assert json_from_host['email'] == "realemail@fakedomain.com" + assert 'id' not in json_from_host, "The user Id should be excluded from serialization" + assert 'role_id' not in json_from_host, "The role Id should be excluded from serialization" + +def test_deserialize_multiplehost(empty_db_session): + ''' + Verify that the user schema can be deserialize multiple users. + ''' + json_from_request = '[{"role": {"name": "Host"}, "email": "realemail@fakedomain.com0", "firstName": "first0", "middleName": "middle0", "lastName": "last0"}, ' + \ + ' {"role": {"name": "Guest"}, "email": "realemail@fakedomain.com1", "firstName": "first1", "middleName": "middle1", "lastName": "last1"}, ' + \ + ' {"role": {"name": "Admin"}, "email": "realemail@fakedomain.com2", "firstName": "first2", "middleName": "middle2", "lastName": "last2"}, ' + \ + ' {"role": {"name": "Coordinator"}, "email": "realemail@fakedomain.com3", "firstName": "first3", "middleName": "middle3", "lastName": "last3"}, ' + \ + ' {"role": {"name": "Guest"}, "email": "realemail@fakedomain.com4", "firstName": "first4", "middleName": "middle4", "lastName": "last4"} ]' + users = users_schema.load(json.loads(json_from_request), session=empty_db_session) + + expected_role = ("Host", "Guest", "Admin", "Coordinator", "Guest") + assert len(users) == len(expected_role) + for idx, (actual_user, expected_role) in enumerate(zip(users, expected_role)): + assert actual_user is not None + assert actual_user.role.name == expected_role + assert actual_user.email == f"realemail@fakedomain.com{idx}" + assert actual_user.firstName == f"first{idx}" + assert actual_user.middleName == f"middle{idx}" + assert actual_user.lastName == f"last{idx}" + +def test_serialize_multiplehost(empty_db_session): + ''' + Verify that the host schema can be serialized to multiple hosts. + ''' + user_repo = UserRepository(empty_db_session) + hosts_to_respond_with = [ + user_repo.add_user("realemail@fakedomain.com0", UserRole.HOST, "first0", "middle0", "last0"), + user_repo.add_user("realemail@fakedomain.com1", UserRole.GUEST, "first1", "middle1", "last1"), + user_repo.add_user("realemail@fakedomain.com2", UserRole.ADMIN, "first2", "middle2", "last2"), + user_repo.add_user("realemail@fakedomain.com3", UserRole.COORDINATOR, "first3", "middle3", "last3"), + user_repo.add_user("realemail@fakedomain.com4", UserRole.GUEST, "first4", "middle4", "last4") + ] + users = users_schema.dump(hosts_to_respond_with) + + expected_role = ("Host", "Guest", "Admin", "Coordinator", "Guest") + assert len(users) == len(expected_role) + for idx, (actual_user, expected_role) in enumerate(zip(users, expected_role)): + assert actual_user is not None + assert actual_user["role"]["name"] == expected_role + assert actual_user["email"] == f"realemail@fakedomain.com{idx}" + assert actual_user["firstName"] == f"first{idx}" + assert actual_user["middleName"] == f"middle{idx}" + assert actual_user["lastName"] == f"last{idx}" + +def test_deserializejson_extrafield_noerror(empty_db_session): + ''' + Verify that json with extra fields will not raise a validation error. + This allows us to map request json directly into model objects. + ''' + json_from_request = '{"extra_field": "extra", "role": {"name": "Host"}, "email": "realemail@fakedomain.com", "firstName": "first", "lastName": "last"}' + data_from_request = json.loads(json_from_request) + user = user_schema.load(data_from_request, session=empty_db_session) + assert user.role.name == UserRole.HOST.value + assert user.email == "realemail@fakedomain.com" + assert user.firstName == "first" + assert user.middleName == None + assert user.lastName == "last" + +def test_deserializeuser_missingfield_error(empty_db_session): + ''' + Verify that json with a missing field will raise a validation error. + ''' + # Missing First name + json_from_request = '{"role": {"name": "Host"}, "email": "realemail@fakedomain.com", "middleName": "middle", "lastName": "last"}' + data_from_request = json.loads(json_from_request) + with pytest.raises(ValidationError): + user_schema.load(data_from_request, session=empty_db_session) + +def test_deserializeuser_missingrelationship_error(empty_db_session): + ''' + Verify that json with a missing field will raise a validation error. + ''' + # Missing role + json_from_request = '{"email": "realemail@fakedomain.com", "firstName": "first", "middleName": "middle", "lastName": "last"}' + data_from_request = json.loads(json_from_request) + with pytest.raises(ValidationError): + user_schema.load(data_from_request, session=empty_db_session) + +def test_deserialize_nonexistantrole_err(empty_db_session): + ''' + Verify that json with a missing id will not raise a validation error. + ''' + json_from_request = '{"role": {"name": "FakeRole"}, "email": "realemail@fakedomain.com", "firstName": "first", "middleName": "middle", "lastName": "last"}' + data_from_request = json.loads(json_from_request) + with pytest.raises(ValidationError, match="Role FakeRole does not exist"): + user_schema.load(data_from_request, session=empty_db_session) diff --git a/api-v2/tests/test_service_provider_controller.py b/api-v2/tests/test_service_provider_controller.py new file mode 100644 index 00000000..00ace823 --- /dev/null +++ b/api-v2/tests/test_service_provider_controller.py @@ -0,0 +1,210 @@ +from __future__ import absolute_import + +from openapi_server.repositories.service_provider_repository import HousingProviderRepository +from tests.setup_utils import populate_test_database + +def test_create_service_provider(client): + """ + Test creating a new service provider using a + simulated post request. Verify that the + response is correct, and that the app + database was properly updated. + """ + REQUESTED_PROVIDER = { + "provider_name" : "-123ASCII&" + } + response = client.post( + '/api/serviceProviders', + json=REQUESTED_PROVIDER) + + assert response.status_code ==201, f'Response body is: {response.json}' + assert 'provider_name' in response.json + assert 'id' in response.json + assert response.json['provider_name'] == REQUESTED_PROVIDER['provider_name'] + + db_entry = HousingProviderRepository().get_service_provider_by_id(response.json['id']) + assert db_entry is not None, "Request succeeeded but the database was not updated!" + assert db_entry.provider_name == REQUESTED_PROVIDER['provider_name'] + +def test_create_with_extra_data(client): + ''' + Test that sending an create POST request with extra + json entries in the body does not disrupt the update. + + We should safely ignore additional fields. + ''' + create_request = { + "provider_name": "A new provider", + "extra_int": 1, + "extra_bool": True, + "extra_string": "I'm notta name" + } + + response = client.post( + '/api/serviceProviders', + json=create_request) + + assert response.status_code ==201, f'Response body is: {response.json}' + assert 'provider_name' in response.json + assert 'id' in response.json + assert response.json['provider_name'] == create_request['provider_name'] + assert 'extra_int' not in response.json, "We should not send back request json extra fields" + assert 'extra_bool' not in response.json, "We should not send back request json extra fields" + assert 'extra_string' not in response.json, "We should not send back request json extra fields" + + db_entry = HousingProviderRepository().get_service_provider_by_id(response.json['id']) + assert db_entry is not None, "Request succeeeded but the database was not updated!" + assert db_entry.provider_name == create_request['provider_name'] + +def test_create_bad_json_invalid_type(client): + bad_create_request = { + "provider_name": 1 + } + response = client.post( + '/api/serviceProviders', + json=bad_create_request) + + assert response.status_code == 400, f'Response body is: {response.json}' + +def test_create_bad_json_missing_name(client): + bad_create_request = { + "provider_namez": 1 + } + response = client.post( + '/api/serviceProviders', + json=bad_create_request) + + assert response.status_code == 400, f'Response body is: {response.json}' + +def test_delete_service_provider(client): + """ + Test deleting a service provider that we know exists, + using a simulated delete request. Verify that the request + succeeds and check that the provider is no longer + availabe within the database. + """ + # Test database is empty at start. Create an entry to delete + ids = populate_test_database(num_entries=1) + response = client.delete(f'/api/serviceProviders/{ids[0]}') + assert response.status_code == 200, f'Response body is: {response.json}' + + deleted_provider = HousingProviderRepository().get_service_provider_by_id(ids[0]) + assert deleted_provider is None, "Request succeeded, but provider is still in the database!" + +def test_delete_nonexistant_provider(client): + """ + Test that deleting a nonexistant provider responds with the + correct status code and does not modify the db. + """ + NUM_ROWS = 4 + ids = populate_test_database(num_entries=NUM_ROWS) + assert HousingProviderRepository().provider_count() == NUM_ROWS, "Test setup failure" + + response = client.delete(f'/api/serviceProviders/{999}') + assert response.status_code == 404, f'Response body is: {response.json}' + + assert HousingProviderRepository().provider_count() == NUM_ROWS, ( + "Request failed, but the row count changed!" + ) + +def test_get_service_provider_by_id(client): + """Test case for get_service_provider_by_id + + Get details about a housing program service provider from an ID + """ + ids = populate_test_database(num_entries=8) + ID_TO_TEST = ids[3] + provider_in_db = HousingProviderRepository().get_service_provider_by_id(ID_TO_TEST) + + response = client.get(f"/api/serviceProviders/{ID_TO_TEST}") + assert response.status_code == 200, f'Response body is : {response.json}' + + assert 'provider_name' in response.json + assert 'id' in response.json + assert response.json['provider_name'] == provider_in_db.provider_name + assert response.json['id'] == ID_TO_TEST + +def test_get_nonexistent_provider(client): + populate_test_database(num_entries=8) + response = client.get(f"/api/serviceProviders/{999}") + assert response.status_code == 404, f'Response body is : {response.json}' + + assert 'provider_name' not in response.json + +def test_get_service_providers(client): + """Test case for get_service_providers + + Get a list of housing program service providers. + """ + expected_provider_count = 12 + populate_test_database(num_entries=expected_provider_count) + + response = client.get('/api/serviceProviders') + assert response.status_code == 200, f"Response body is : {response.json}" + assert len(response.json) == expected_provider_count + +def test_get_service_provider_empty_db(client): + response = client.get('/api/serviceProviders') + assert response.status_code == 200, f"Response body is : {response.json}" + assert len(response.json) == 0 + +def test_update_service_provider(client): + """Test case for update_service_provider + + Update a housing program service provider + """ + ids = populate_test_database(num_entries=1) + updated_provider = { + "provider_name" : "Rebranded Provider~~~" + } + response = client.put( + f"/api/serviceProviders/{ids[0]}", + json=updated_provider) + assert response.status_code == 200, f'Response body is: {response.json}' + + assert 'provider_name' in response.json + assert 'id' in response.json + + assert response.json['provider_name'] == updated_provider["provider_name"] + assert response.json['id'] == ids[0] + +def test_update_with_extra_data(client): + ''' + Test that sending an update PUT request with extra + json entries in the body does not disrupt the update. + + We should safely ignore additional fields. + ''' + ids = populate_test_database(num_entries=1) + update_request = { + "provider_name": "A brand new name", + "extra_int": 1, + "extra_bool": True, + "extra_string": "I'm notta name" + } + response = client.put( + f"/api/serviceProviders/{ids[0]}", + json=update_request) + + assert response.status_code == 200, f'Response body is: {response.json}' + + assert 'provider_name' in response.json + assert 'id' in response.json + assert 'extra_int' not in response.json, "We should not send back request json extra fields" + assert 'extra_bool' not in response.json, "We should not send back request json extra fields" + assert 'extra_string' not in response.json, "We should not send back request json extra fields" + + assert response.json['provider_name'] == update_request["provider_name"] + assert response.json['id'] == ids[0] + +def test_update_nonexistant_service_provider(client): + ids = populate_test_database(num_entries=1) + failed_update_request = { + "provider_name" : "Failed Update Name" + } + response = client.put( + f"/api/serviceProviders/{999}", + json=failed_update_request) + assert response.status_code == 404, f'Response body is: {response.json}' + + assert 'provider_name' not in response.json \ No newline at end of file diff --git a/api-v2/tests/test_service_provider_repository.py b/api-v2/tests/test_service_provider_repository.py new file mode 100644 index 00000000..61f9c1d8 --- /dev/null +++ b/api-v2/tests/test_service_provider_repository.py @@ -0,0 +1,112 @@ +# Third Party +import pytest +from collections.abc import Generator +# Local +from openapi_server.repositories.service_provider_repository import HousingProviderRepository + +@pytest.fixture +def empty_housing_repo(empty_db_session) -> Generator[HousingProviderRepository, None, None]: + ''' + SetUp and TearDown an empty housing repository for + testing purposes. + ''' + yield HousingProviderRepository() + +@pytest.fixture +def housing_repo_5_entries(empty_housing_repo: HousingProviderRepository) -> Generator[HousingProviderRepository, None, None]: + ''' + SetUp and TearDown a housing repository with five service providers. + The providers will have ids [1-5] and names Provider 1...Provider5 + ''' + for i in range(1, 6): + new = empty_housing_repo.create_service_provider(f"Provider {i}") + assert new is not None, f"Test Setup Failure! Failed to create provider {i}" + assert new.id == i, "The test ids are expected to go from 1-5" + yield empty_housing_repo + +def test_empty_db_count(empty_housing_repo: HousingProviderRepository): + ''' + Test our test setup, to ensure that newly created repos are in fact empty. + ''' + assert empty_housing_repo.provider_count() == 0 + +def test_create_provider(empty_housing_repo: HousingProviderRepository): + ''' + Test creating a new provider within an empty database. + ''' + EXPECTED_NAME = "MyFancyProvider" + + newProvider = empty_housing_repo.create_service_provider(EXPECTED_NAME) + + assert newProvider is not None, "Repo create method failed" + assert newProvider.id == 1, "Expected id 1 since this is the first created provider" + assert newProvider.provider_name == EXPECTED_NAME, "Created provider name did not match request" + +def test_delete_nonexistent_provider(empty_housing_repo: HousingProviderRepository): + ''' + Attempt to delete a service provider that does + not exist. Verify that the deletion gracefully + fails. + ''' + assert empty_housing_repo.delete_service_provider(42) == False + +def test_delete_newly_created_provider(empty_housing_repo: HousingProviderRepository): + ''' + Test creating and then deleting a new service provider, without error. + ''' + new = empty_housing_repo.create_service_provider("Doomed Provider") + assert new is not None, "Test setup failure! Initial create failed." + assert empty_housing_repo.delete_service_provider(new.id) + +def test_get_existing_provider_by_id(housing_repo_5_entries: HousingProviderRepository): + ''' + Test getting a provider by id. + ''' + for i in range(1, 6): + provider = housing_repo_5_entries.get_service_provider_by_id(i) + assert provider.provider_name == f"Provider {i}" + assert provider.id == i + +def test_get_all_providers(housing_repo_5_entries: HousingProviderRepository): + ''' + Test getting all available service providers + ''' + all = housing_repo_5_entries.get_service_providers() + assert all is not None + assert len(all) == 5 + + for i in range(1, 6): + provider = all[i-1] + assert provider.id == i + assert provider.provider_name == f"Provider {i}" + +def test_get_all_providers_empty_db(empty_housing_repo: HousingProviderRepository): + all = empty_housing_repo.get_service_providers() + assert all is not None + assert len(all) == 0 + +def test_get_nonexisting_provider_by_id(housing_repo_5_entries: HousingProviderRepository): + failed_get = housing_repo_5_entries.get_service_provider_by_id(42) + assert failed_get is None + +def test_update_existing_service_provider(housing_repo_5_entries: HousingProviderRepository): + UPDATED_NAME = "Rad New Name" + UPDATED_ID = 3 + returned_provider = housing_repo_5_entries.update_service_provider(UPDATED_NAME, UPDATED_ID) + retrieved_provider = housing_repo_5_entries.get_service_provider_by_id(UPDATED_ID) + + assert retrieved_provider is not None + assert retrieved_provider is not None + + assert returned_provider.id == UPDATED_ID + assert returned_provider.provider_name == UPDATED_NAME + + assert retrieved_provider.id == UPDATED_ID + assert retrieved_provider.provider_name == UPDATED_NAME + +def test_update_nonexistent_provider(housing_repo_5_entries: HousingProviderRepository): + returned_provider = housing_repo_5_entries.update_service_provider(9999, "Failed Update Name") + assert returned_provider is None + +def test_provider_count(housing_repo_5_entries: HousingProviderRepository): + assert housing_repo_5_entries.provider_count() == 5 \ No newline at end of file diff --git a/api-v2/tests/test_user_repo.py b/api-v2/tests/test_user_repo.py new file mode 100644 index 00000000..a929e808 --- /dev/null +++ b/api-v2/tests/test_user_repo.py @@ -0,0 +1,52 @@ +import pytest +from sqlalchemy.exc import IntegrityError + +from openapi_server.models.database import User +from openapi_server.models.user_roles import UserRole +from openapi_server.repositories.user_repo import UserRepository + +def test_user_role_required(empty_db_session): + new_user = User(email="realemail@fakedomain.com", firstName="realemail@fakedomain.com", middleName="realemail@fakedomain.com", + lastName="realemail@fakedomain.com") + empty_db_session.add(new_user) + with pytest.raises(IntegrityError, match="NOT NULL constraint failed"): + empty_db_session.commit() + + with pytest.raises(TypeError): + repo = UserRepository(empty_db_session) + repo.add_user(email="realemail@fakedomain.com", firstName="realemail@fakedomain.com", middleName="realemail@fakedomain.com", + lastName="realemail@fakedomain.com") + +def test_add_user_firstname_only(empty_db_session): + ''' + Verify that user middle and last name are not required. + In some cultures, such as Indonesian and Icelandic, people may have only one name. + ''' + repo = UserRepository(empty_db_session) + new_user = repo.add_user(email="realemail@fakedomain.com", firstName="name", role=UserRole.GUEST) + assert new_user.role.name == UserRole.GUEST.value + assert new_user.firstName == "name" + assert new_user.middleName == None + assert new_user.lastName == None + assert new_user.email == "realemail@fakedomain.com" + +def test_single_char_name(empty_db_session): + ''' + Verify that user names can be just one character, per the + US Web Design System Guidance. + ''' + repo = UserRepository(empty_db_session) + new_user = repo.add_user(email="realemail@fakedomain.com", firstName="n", role=UserRole.GUEST) + assert new_user.role.name == UserRole.GUEST.value + assert new_user.firstName == "n" + assert new_user.middleName == None + assert new_user.lastName == None + assert new_user.email == "realemail@fakedomain.com" + +def test_firstname_required(empty_db_session): + ''' + Test that the firstname must at least contain one non-space character. + ''' + repo = UserRepository(empty_db_session) + with pytest.raises(ValueError, match="firstName must contain at least one non-space character"): + repo.add_user(email="realemail@fakedomain.com", firstName=" ", role=UserRole.GUEST) \ No newline at end of file diff --git a/api-v2/tox.ini b/api-v2/tox.ini new file mode 100644 index 00000000..b4aee3ae --- /dev/null +++ b/api-v2/tox.ini @@ -0,0 +1,20 @@ +[tox] +env_list = + py312 +minversion = 4.6.4 + +[testenv] +description = run tests with mocking using pytest + +skip_install = true +allowlist_externals = poetry +commands_pre = + poetry install +commands = + poetry run pytest {tty:--color=yes} {posargs:tests} --cov=app --mode=debug + +[testenv:releasetest] +description = run tests without mocking using pytest +passenv = COGNITO_REGION,COGNITO_ACCESS_ID,COGNITO_ACCESS_KEY +commands = + poetry run pytest {tty:--color=yes} {posargs} --cov=openapi_server --mode=release \ No newline at end of file From 2c83714d901304ddc07aac537ddfecba3720ce59 Mon Sep 17 00:00:00 2001 From: Erik Date: Fri, 6 Sep 2024 15:09:29 -0700 Subject: [PATCH 19/70] Fix package name --- api-v2/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api-v2/pyproject.toml b/api-v2/pyproject.toml index 17ee8c61..1810b339 100644 --- a/api-v2/pyproject.toml +++ b/api-v2/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "home unite us" +name = "homeuniteus-api" version = "0.1.0" description = "Web API for Home Unite Us" authors = [ From 10c9048afa014a3047550068fe9db89df6e5fcf9 Mon Sep 17 00:00:00 2001 From: Erik Date: Fri, 6 Sep 2024 16:23:39 -0700 Subject: [PATCH 20/70] Add session routes and schemas --- api-v2/app/api/routes/auth.py | 84 +++++++++++++++++++++++++++++++++-- api-v2/app/schemas.py | 6 ++- 2 files changed, 86 insertions(+), 4 deletions(-) diff --git a/api-v2/app/api/routes/auth.py b/api-v2/app/api/routes/auth.py index 6ab3691d..eb4837eb 100644 --- a/api-v2/app/api/routes/auth.py +++ b/api-v2/app/api/routes/auth.py @@ -1,12 +1,14 @@ import logging +import jwt -from fastapi import Depends, APIRouter, HTTPException, Response, Security +from fastapi import Depends, APIRouter, HTTPException, Response, Security, Request from fastapi.responses import RedirectResponse from sqlalchemy.orm import Session from botocore.exceptions import ClientError +from typing import Annotated -from schemas import UserCreate, UserSignIn, UserSignInResponse +from schemas import UserCreate, UserSignInRequest, UserSignInResponse, RefreshTokenResponse from crud import create_user, delete_user, get_user from api.deps import ( get_db, @@ -92,7 +94,7 @@ def signup( response_model=UserSignInResponse, ) def signin( - body: UserSignIn, + body: UserSignInRequest, response: Response, db: Session = Depends(get_db), cognito_client=Depends(get_cognito_client), @@ -154,3 +156,79 @@ def signin( ) def secret(): return {"message": "Welcome to the secret route"} + + +''' +# Current session route + +This route is used to get the current session and user info upon page refresh + +''' +@router.get("/session", response_model=UserSignInResponse) +def current_session(request: Request, cognito_client=Depends(get_cognito_client), db: Session = Depends(get_db)): + id_token = request.cookies.get('id_token') + refresh_token = request.cookies.get('refresh_token') + if None in (id_token, refresh_token): + raise HTTPException(status_code=401, detail="Missing session cookies") + + decoded_id_token = jwt.decode( + id_token, algorithms=["RS256"], options={"verify_signature": False} + ) + + user = get_user(db, decoded_id_token['email']) + + try: + auth_response = cognito_client.initiate_auth( + ClientId=cognito_client_id, + AuthFlow='REFRESH_TOKEN', + AuthParameters={ + 'REFRESH_TOKEN': refresh_token, + 'SECRET_HASH': calc_secret_hash(decoded_id_token["cognito:username"]) + } + ) + except ClientError as e: + code = e.response['Error']['Code'] + message = e.response['Error']['Message'] + raise HTTPException(status_code=400, detail={"code": code, "message": message}) + + return { + "user": user, + "token": auth_response['AuthenticationResult']['AccessToken'], + } + + +''' +# Refresh route + +This route is used to refresh the current access token during session +''' +@router.get("/refresh", response_model=RefreshTokenResponse) +def refresh(request: Request, cognito_client=Depends(get_cognito_client)): + refresh_token = request.cookies.get('refresh_token') + id_token = request.cookies.get('id_token') + + if None in (refresh_token, id_token): + raise HTTPException(status_code=401, detail="Missing refresh token or id token") + + decoded = jwt.decode(id_token, algorithms=["RS256"], options={"verify_signature": False}) + + try: + response = cognito_client.initiate_auth( + ClientId=cognito_client_id, + AuthFlow='REFRESH_TOKEN', + AuthParameters={ + 'REFRESH_TOKEN': refresh_token, + 'SECRET_HASH': calc_secret_hash(decoded["cognito:username"]) + } + ) + except ClientError as e: + code = e.response['Error']['Code'] + message = e.response['Error']['Message'] + raise HTTPException(status_code=400, detail={"code": code, "message": message}) + + access_token = response['AuthenticationResult']['AccessToken'] + + # Return access token + return { + "token": access_token + } \ No newline at end of file diff --git a/api-v2/app/schemas.py b/api-v2/app/schemas.py index 97e22489..d7b42b0a 100644 --- a/api-v2/app/schemas.py +++ b/api-v2/app/schemas.py @@ -39,7 +39,7 @@ class Config: from_attributes = True -class UserSignIn(BaseModel): +class UserSignInRequest(BaseModel): email: str password: str @@ -47,3 +47,7 @@ class UserSignIn(BaseModel): class UserSignInResponse(BaseModel): user: User token: str + + +class RefreshTokenResponse(BaseModel): + token: str \ No newline at end of file From a0a6afd87c48e8d15873f0d6f50f6793a4d9e048 Mon Sep 17 00:00:00 2001 From: Erik Date: Fri, 6 Sep 2024 16:24:35 -0700 Subject: [PATCH 21/70] Removed unused import --- api-v2/app/api/routes/auth.py | 1 - 1 file changed, 1 deletion(-) diff --git a/api-v2/app/api/routes/auth.py b/api-v2/app/api/routes/auth.py index eb4837eb..9b9c3f85 100644 --- a/api-v2/app/api/routes/auth.py +++ b/api-v2/app/api/routes/auth.py @@ -5,7 +5,6 @@ from fastapi.responses import RedirectResponse from sqlalchemy.orm import Session from botocore.exceptions import ClientError -from typing import Annotated from schemas import UserCreate, UserSignInRequest, UserSignInResponse, RefreshTokenResponse From 0682d0f07b55c4dc04c2653e82d881345d8709ee Mon Sep 17 00:00:00 2001 From: Erik Date: Fri, 6 Sep 2024 16:26:19 -0700 Subject: [PATCH 22/70] Formattin --- api-v2/app/api/routes/auth.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/api-v2/app/api/routes/auth.py b/api-v2/app/api/routes/auth.py index 9b9c3f85..83a20384 100644 --- a/api-v2/app/api/routes/auth.py +++ b/api-v2/app/api/routes/auth.py @@ -161,8 +161,9 @@ def secret(): # Current session route This route is used to get the current session and user info upon page refresh - ''' + + @router.get("/session", response_model=UserSignInResponse) def current_session(request: Request, cognito_client=Depends(get_cognito_client), db: Session = Depends(get_db)): id_token = request.cookies.get('id_token') @@ -201,6 +202,8 @@ def current_session(request: Request, cognito_client=Depends(get_cognito_client) This route is used to refresh the current access token during session ''' + + @router.get("/refresh", response_model=RefreshTokenResponse) def refresh(request: Request, cognito_client=Depends(get_cognito_client)): refresh_token = request.cookies.get('refresh_token') From aeede1164fcedd5827cc9d0ce32b831431df909b Mon Sep 17 00:00:00 2001 From: Erik Date: Fri, 6 Sep 2024 16:37:46 -0700 Subject: [PATCH 23/70] Add /user route --- api-v2/app/api/routes/auth.py | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/api-v2/app/api/routes/auth.py b/api-v2/app/api/routes/auth.py index 83a20384..53146f0f 100644 --- a/api-v2/app/api/routes/auth.py +++ b/api-v2/app/api/routes/auth.py @@ -7,7 +7,7 @@ from botocore.exceptions import ClientError -from schemas import UserCreate, UserSignInRequest, UserSignInResponse, RefreshTokenResponse +from schemas import UserCreate, UserSignInRequest, UserSignInResponse, RefreshTokenResponse, User from crud import create_user, delete_user, get_user from api.deps import ( get_db, @@ -233,4 +233,25 @@ def refresh(request: Request, cognito_client=Depends(get_cognito_client)): # Return access token return { "token": access_token - } \ No newline at end of file + } + +''' +# Get user route + +This route is used to get the current user info +''' + +@router.get("/user", response_model=User) +def get_user_info(request: Request, db: Session = Depends(get_db)): + id_token = request.cookies.get('id_token') + if(id_token is None): + raise HTTPException(status_code=401, detail="Missing id token") + + decoded = jwt.decode(id_token, algorithms=["RS256"], options={"verify_signature": False}) + email = decoded['email'] + if(email is None): + raise HTTPException(status_code=401, detail="Email not found in token") + + user = get_user(db, email) + + return user \ No newline at end of file From b57c0981c99a45f16e6338b0a2096849fd91b29c Mon Sep 17 00:00:00 2001 From: Erik Date: Fri, 6 Sep 2024 16:46:27 -0700 Subject: [PATCH 24/70] Create user router --- api-v2/app/api/main.py | 3 ++- api-v2/app/api/routes/auth.py | 20 -------------------- api-v2/app/api/routes/user.py | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 36 insertions(+), 21 deletions(-) create mode 100644 api-v2/app/api/routes/user.py diff --git a/api-v2/app/api/main.py b/api-v2/app/api/main.py index c96ebe12..d243a99e 100644 --- a/api-v2/app/api/main.py +++ b/api-v2/app/api/main.py @@ -1,7 +1,8 @@ from fastapi import APIRouter -from api.routes import auth +from api.routes import auth, user api_router = APIRouter() api_router.include_router(auth.router, prefix="/auth", tags=["auth"]) +api_router.include_router(user.router, prefix="/user", tags=["user"]) diff --git a/api-v2/app/api/routes/auth.py b/api-v2/app/api/routes/auth.py index 53146f0f..21ff8393 100644 --- a/api-v2/app/api/routes/auth.py +++ b/api-v2/app/api/routes/auth.py @@ -235,23 +235,3 @@ def refresh(request: Request, cognito_client=Depends(get_cognito_client)): "token": access_token } -''' -# Get user route - -This route is used to get the current user info -''' - -@router.get("/user", response_model=User) -def get_user_info(request: Request, db: Session = Depends(get_db)): - id_token = request.cookies.get('id_token') - if(id_token is None): - raise HTTPException(status_code=401, detail="Missing id token") - - decoded = jwt.decode(id_token, algorithms=["RS256"], options={"verify_signature": False}) - email = decoded['email'] - if(email is None): - raise HTTPException(status_code=401, detail="Email not found in token") - - user = get_user(db, email) - - return user \ No newline at end of file diff --git a/api-v2/app/api/routes/user.py b/api-v2/app/api/routes/user.py new file mode 100644 index 00000000..e0c65c67 --- /dev/null +++ b/api-v2/app/api/routes/user.py @@ -0,0 +1,34 @@ +import jwt + +from fastapi import APIRouter, Request, Depends, HTTPException +from sqlalchemy.orm import Session + + +from schemas import User +from crud import get_user +from api.deps import get_db + + +router = APIRouter() + + +''' +# Get user route + +This route is used to get the current user info +''' + +@router.get("/", response_model=User) +def get_user_info(request: Request, db: Session = Depends(get_db)): + id_token = request.cookies.get('id_token') + if(id_token is None): + raise HTTPException(status_code=401, detail="Missing id token") + + decoded = jwt.decode(id_token, algorithms=["RS256"], options={"verify_signature": False}) + email = decoded['email'] + if(email is None): + raise HTTPException(status_code=401, detail="Email not found in token") + + user = get_user(db, email) + + return user \ No newline at end of file From 860553a3c128d16ccc3b0d2634507e00129b0709 Mon Sep 17 00:00:00 2001 From: Erik Date: Fri, 6 Sep 2024 16:59:17 -0700 Subject: [PATCH 25/70] Refactor env config and remove python-dotenv package --- api-v2/app/core/config.py | 12 +++++------- api-v2/poetry.lock | 2 +- api-v2/pyproject.toml | 3 --- 3 files changed, 6 insertions(+), 11 deletions(-) diff --git a/api-v2/app/core/config.py b/api-v2/app/core/config.py index 71b98de5..b5452375 100644 --- a/api-v2/app/core/config.py +++ b/api-v2/app/core/config.py @@ -1,10 +1,11 @@ -from pydantic_settings import BaseSettings -from dotenv import load_dotenv - -load_dotenv() +from pydantic_settings import BaseSettings, SettingsConfigDict class Settings(BaseSettings): + model_config = SettingsConfigDict( + env_file=".env" + ) + COGNITO_CLIENT_ID: str COGNITO_CLIENT_SECRET: str COGNITO_REGION: str @@ -18,8 +19,5 @@ class Settings(BaseSettings): ENV: str DATABASE_URL: str - class Config: - env_file = ".env" - settings = Settings() diff --git a/api-v2/poetry.lock b/api-v2/poetry.lock index 9ef32e8c..95b24ec4 100644 --- a/api-v2/poetry.lock +++ b/api-v2/poetry.lock @@ -2081,4 +2081,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "3ed6c59ca289ab7d6ee202844938b6eba99cc023f4b9c24ca3e7cbfcff5c32bd" +content-hash = "8d1850b557390ef1a13451ff6b99189c5f60d9d3cb588d60ce5535e9cc19c0d4" diff --git a/api-v2/pyproject.toml b/api-v2/pyproject.toml index 1810b339..a771a5d9 100644 --- a/api-v2/pyproject.toml +++ b/api-v2/pyproject.toml @@ -31,9 +31,6 @@ psycopg2-binary = "^2.9" # boto3 is used for connecting to AWS resources boto3 = "^1.35.13" -# python-dotenv allows the API to pull in external configuration from a .env file -python-dotenv = "^1.0.1" - # pydantic-settings is a Pydantic feature that is used to load settings/configurations # from environment variables or secret files. pydantic-settings = "^2.4.0" From 5253cb65a853d062c6713bf1080dc15b72b1a754 Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Sat, 7 Sep 2024 11:26:55 -0700 Subject: [PATCH 26/70] fastapi-migration: Organize by workflow sections This commit is part of the migration from connexion to FastAPI. The commit contains a directory structure based on workflow responsibilities. Tests still need to be updated along with code refactoring. Files have been moved to their respective areas of responsibilities but have not been refactored to work within. --- api-v2/alembic.ini | 110 ++++++ api-v2/alembic/env.py | 91 +++++ api-v2/alembic/script.py.mako | 24 ++ api-v2/alembic/versions/3ceec084158f_.py | 367 ++++++++++++++++++ .../versions/cfc4e41b69d3_initial_form_api.py | 85 ++++ .../versions/e4c8bb426528_add_user_types.py | 72 ++++ .../ec8b1c17739a_drop_unused_tables.py | 299 ++++++++++++++ api-v2/app/access/user_repo.py | 87 +++++ api-v2/app/access/user_roles.py | 13 + api-v2/app/api/main.py | 1 + api-v2/app/core/db.py | 2 +- api-v2/app/intake_profile/__init__.py | 0 api-v2/app/intake_profile/controller.py | 34 ++ api-v2/app/intake_profile/forms/forms.py | 79 ++++ api-v2/app/intake_profile/model.py | 21 + api-v2/app/intake_profile/repository.py | 30 ++ api-v2/app/intake_profile/schemas.py | 207 ++++++++++ api-v2/app/matching/__init__.py | 0 api-v2/app/matching/controller.py | 0 api-v2/app/matching/model.py | 0 api-v2/app/matching/schemas.py | 0 api-v2/app/models.py | 61 ++- api-v2/app/onboarding/__init__.py | 0 api-v2/app/onboarding/controller.py | 0 api-v2/app/onboarding/model.py | 0 api-v2/app/onboarding/schemas.py | 0 api-v2/app/schemas.py | 158 ++++++++ api-v2/app/stays/__init__.py | 0 api-v2/app/stays/controller.py | 0 api-v2/app/stays/model.py | 0 api-v2/app/stays/schemas.py | 0 .../app/tenant_housing_provider/__init__.py | 0 .../app/tenant_housing_provider/controller.py | 0 api-v2/app/tenant_housing_provider/model.py | 20 + api-v2/app/tenant_housing_provider/schemas.py | 0 .../service_provider_repository.py | 98 +++++ api-v2/poetry.lock | 36 +- api-v2/tests/access/__init_.py | 0 .../tests/{ => access}/test_authentication.py | 0 .../{ => access}/test_host_controller.py | 0 api-v2/tests/{ => access}/test_mocking.py | 0 api-v2/tests/{ => access}/test_user_repo.py | 0 api-v2/tests/conftest.py | 161 -------- api-v2/tests/intake_profile/__init__.py | 0 api-v2/tests/intake_profile/test_forms.py | 134 +++++++ .../{ => intake_profile}/test_forms_schema.py | 0 api-v2/tests/matching/__init__.py | 0 api-v2/tests/onboarding/__init__.py | 0 api-v2/tests/stays/__init__.py | 0 .../tests/tenant_housing_provider/__init__.py | 0 .../test_service_provider_controller.py | 0 .../test_service_provider_repository.py | 0 api-v2/tests/test_alembic_migration.py | 26 +- api-v2/tests/test_forms_repo.py | 127 ------ api-v2/tox.ini | 2 +- api/openapi_server/models/database.py | 12 +- api/tests/test_forms_repo.py | 2 +- 57 files changed, 2028 insertions(+), 331 deletions(-) create mode 100644 api-v2/alembic.ini create mode 100644 api-v2/alembic/env.py create mode 100644 api-v2/alembic/script.py.mako create mode 100644 api-v2/alembic/versions/3ceec084158f_.py create mode 100644 api-v2/alembic/versions/cfc4e41b69d3_initial_form_api.py create mode 100644 api-v2/alembic/versions/e4c8bb426528_add_user_types.py create mode 100644 api-v2/alembic/versions/ec8b1c17739a_drop_unused_tables.py create mode 100644 api-v2/app/access/user_repo.py create mode 100644 api-v2/app/access/user_roles.py create mode 100644 api-v2/app/intake_profile/__init__.py create mode 100644 api-v2/app/intake_profile/controller.py create mode 100644 api-v2/app/intake_profile/forms/forms.py create mode 100644 api-v2/app/intake_profile/model.py create mode 100644 api-v2/app/intake_profile/repository.py create mode 100644 api-v2/app/intake_profile/schemas.py create mode 100644 api-v2/app/matching/__init__.py create mode 100644 api-v2/app/matching/controller.py create mode 100644 api-v2/app/matching/model.py create mode 100644 api-v2/app/matching/schemas.py create mode 100644 api-v2/app/onboarding/__init__.py create mode 100644 api-v2/app/onboarding/controller.py create mode 100644 api-v2/app/onboarding/model.py create mode 100644 api-v2/app/onboarding/schemas.py create mode 100644 api-v2/app/stays/__init__.py create mode 100644 api-v2/app/stays/controller.py create mode 100644 api-v2/app/stays/model.py create mode 100644 api-v2/app/stays/schemas.py create mode 100644 api-v2/app/tenant_housing_provider/__init__.py create mode 100644 api-v2/app/tenant_housing_provider/controller.py create mode 100644 api-v2/app/tenant_housing_provider/model.py create mode 100644 api-v2/app/tenant_housing_provider/schemas.py create mode 100644 api-v2/app/tenant_housing_provider/service_provider_repository.py create mode 100644 api-v2/tests/access/__init_.py rename api-v2/tests/{ => access}/test_authentication.py (100%) rename api-v2/tests/{ => access}/test_host_controller.py (100%) rename api-v2/tests/{ => access}/test_mocking.py (100%) rename api-v2/tests/{ => access}/test_user_repo.py (100%) delete mode 100644 api-v2/tests/conftest.py create mode 100644 api-v2/tests/intake_profile/__init__.py create mode 100644 api-v2/tests/intake_profile/test_forms.py rename api-v2/tests/{ => intake_profile}/test_forms_schema.py (100%) create mode 100644 api-v2/tests/matching/__init__.py create mode 100644 api-v2/tests/onboarding/__init__.py create mode 100644 api-v2/tests/stays/__init__.py create mode 100644 api-v2/tests/tenant_housing_provider/__init__.py rename api-v2/tests/{ => tenant_housing_provider}/test_service_provider_controller.py (100%) rename api-v2/tests/{ => tenant_housing_provider}/test_service_provider_repository.py (100%) delete mode 100644 api-v2/tests/test_forms_repo.py diff --git a/api-v2/alembic.ini b/api-v2/alembic.ini new file mode 100644 index 00000000..ef70e5f2 --- /dev/null +++ b/api-v2/alembic.ini @@ -0,0 +1,110 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = sqlite:///./homeuniteus.db + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/api-v2/alembic/env.py b/api-v2/alembic/env.py new file mode 100644 index 00000000..8901a3f3 --- /dev/null +++ b/api-v2/alembic/env.py @@ -0,0 +1,91 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +import sys +import os +print(os.getcwd()) +sys.path.append(os.getcwd()) + +from app import models as db + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = db.Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + print("ONLINE") + # Check for an existing connection before creating a new engine. + # pytest-alembic will hook into alembic by creating a connection + # with the test engine configuration. + connectable = context.config.attributes.get("connection", None) + if connectable is None: + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/api-v2/alembic/script.py.mako b/api-v2/alembic/script.py.mako new file mode 100644 index 00000000..55df2863 --- /dev/null +++ b/api-v2/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/api-v2/alembic/versions/3ceec084158f_.py b/api-v2/alembic/versions/3ceec084158f_.py new file mode 100644 index 00000000..b5518d1b --- /dev/null +++ b/api-v2/alembic/versions/3ceec084158f_.py @@ -0,0 +1,367 @@ +"""empty message + +Revision ID: 3ceec084158f +Revises: +Create Date: 2023-03-13 16:58:30.782837 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision = '3ceec084158f' +down_revision = None +branch_labels = None +depends_on = None + +def create_missing_table(name: str, *create_args) -> bool: + "Create the table if it is not already present in the database." + conn = op.get_bind() + inspector = Inspector.from_engine(conn) + if name not in inspector.get_table_names(): + op.create_table(name, *create_args) + return True + return False + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + if create_missing_table('applicant_type', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('applicant_type_description', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id')): + op.create_index(op.f('ix_applicant_type_id'), 'applicant_type', ['id'], unique=False) + + if create_missing_table('case_status', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('status_description', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_case_status_id'), 'case_status', ['id'], unique=False) + + if create_missing_table('guest_group', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('group_name', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_guest_group_id'), 'guest_group', ['id'], unique=False) + + if create_missing_table('host', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_host_id'), 'host', ['id'], unique=False) + + if create_missing_table('host_household', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('household_name', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_host_household_id'), 'host_household', ['id'], unique=False) + + if create_missing_table('housing_program_service_provider', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('provider_name', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_housing_program_service_provider_id'), 'housing_program_service_provider', ['id'], unique=False) + + if create_missing_table('image_tag_type', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('tag_text', sa.String(), nullable=False), + sa.Column('tag_description', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_image_tag_type_id'), 'image_tag_type', ['id'], unique=False) + + if create_missing_table('intake_question_type', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('type_description', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_intake_question_type_id'), 'intake_question_type', ['id'], unique=False) + + if create_missing_table('match_status', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('status_description', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_match_status_id'), 'match_status', ['id'], unique=False) + + if create_missing_table('user', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('email', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('email') + ): + op.create_index(op.f('ix_user_id'), 'user', ['id'], unique=False) + + if create_missing_table('applicant_status', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('applicant_type', sa.Integer(), nullable=False), + sa.Column('status_description', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['applicant_type'], ['applicant_type.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_applicant_status_id'), 'applicant_status', ['id'], unique=False) + + if create_missing_table('group_match_result', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('guest_group', sa.Integer(), nullable=False), + sa.Column('host_household', sa.Integer(), nullable=False), + sa.Column('match_status', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['guest_group'], ['guest_group.id'], ), + sa.ForeignKeyConstraint(['host_household'], ['host_household.id'], ), + sa.ForeignKeyConstraint(['match_status'], ['match_status.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_group_match_result_id'), 'group_match_result', ['id'], unique=False) + + if create_missing_table('housing_program', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('program_name', sa.String(), nullable=False), + sa.Column('service_provider', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['service_provider'], ['housing_program_service_provider.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_housing_program_id'), 'housing_program', ['id'], unique=False) + + if create_missing_table('applicant', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('applicant_type', sa.Integer(), nullable=False), + sa.Column('applicant_status', sa.Integer(), nullable=False), + sa.Column('user', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['applicant_status'], ['applicant_status.id'], ), + sa.ForeignKeyConstraint(['applicant_type'], ['applicant_type.id'], ), + sa.ForeignKeyConstraint(['user'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_applicant_id'), 'applicant', ['id'], unique=False) + + if create_missing_table('intake_question_set', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('question_set_name', sa.String(), nullable=False), + sa.Column('housing_program', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['housing_program'], ['housing_program.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_intake_question_set_id'), 'intake_question_set', ['id'], unique=False) + + if create_missing_table('program_coordinator', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user', sa.Integer(), nullable=False), + sa.Column('housing_program', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['housing_program'], ['housing_program.id'], ), + sa.ForeignKeyConstraint(['user'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_program_coordinator_id'), 'program_coordinator', ['id'], unique=False) + + if create_missing_table('applicant_status_log', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('log_description', sa.String(), nullable=False), + sa.Column('logtime', sa.DateTime(), nullable=False), + sa.Column('applicant', sa.Integer(), nullable=False), + sa.Column('src_status', sa.Integer(), nullable=False), + sa.Column('dest_status', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), + sa.ForeignKeyConstraint(['dest_status'], ['applicant_status.id'], ), + sa.ForeignKeyConstraint(['src_status'], ['applicant_status.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_applicant_status_log_id'), 'applicant_status_log', ['id'], unique=False) + + if create_missing_table('applicant_uploaded_image', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('applicant', sa.Integer(), nullable=False), + sa.Column('image_data', sa.LargeBinary(length=5242880), nullable=False), + sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_applicant_uploaded_image_id'), 'applicant_uploaded_image', ['id'], unique=False) + + if create_missing_table('guest_group_member', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('guest_group', sa.Integer(), nullable=False), + sa.Column('applicant', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), + sa.ForeignKeyConstraint(['guest_group'], ['guest_group.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_guest_group_member_id'), 'guest_group_member', ['id'], unique=False) + + if create_missing_table('host_household_member', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('host_household', sa.Integer(), nullable=False), + sa.Column('applicant', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), + sa.ForeignKeyConstraint(['host_household'], ['host_household.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_host_household_member_id'), 'host_household_member', ['id'], unique=False) + + if create_missing_table('housing_program_pariticipant', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('applicant', sa.Integer(), nullable=False), + sa.Column('housing_program', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), + sa.ForeignKeyConstraint(['housing_program'], ['housing_program.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_housing_program_pariticipant_id'), 'housing_program_pariticipant', ['id'], unique=False) + + if create_missing_table('intake_question', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('applicant_type', sa.Integer(), nullable=False), + sa.Column('intake_question_type', sa.Integer(), nullable=False), + sa.Column('intake_question_set', sa.Integer(), nullable=False), + sa.Column('question_text', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['applicant_type'], ['applicant_type.id'], ), + sa.ForeignKeyConstraint(['intake_question_set'], ['intake_question_set.id'], ), + sa.ForeignKeyConstraint(['intake_question_type'], ['intake_question_type.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_intake_question_id'), 'intake_question', ['id'], unique=False) + + if create_missing_table('match_result', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('applicant_a', sa.Integer(), nullable=False), + sa.Column('applicant_b', sa.Integer(), nullable=False), + sa.Column('match_status', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['applicant_a'], ['applicant.id'], ), + sa.ForeignKeyConstraint(['applicant_b'], ['applicant.id'], ), + sa.ForeignKeyConstraint(['match_status'], ['match_status.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_match_result_id'), 'match_result', ['id'], unique=False) + + if create_missing_table('program_case', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('coordinator', sa.Integer(), nullable=False), + sa.Column('case_status', sa.Integer(), nullable=False), + sa.Column('host_household', sa.Integer(), nullable=False), + sa.Column('guest_group', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['case_status'], ['case_status.id'], ), + sa.ForeignKeyConstraint(['coordinator'], ['program_coordinator.id'], ), + sa.ForeignKeyConstraint(['guest_group'], ['guest_group.id'], ), + sa.ForeignKeyConstraint(['host_household'], ['host_household.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_program_case_id'), 'program_case', ['id'], unique=False) + + if create_missing_table('image_tag', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('applicant', sa.Integer(), nullable=False), + sa.Column('image_tag_type', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['applicant'], ['applicant_uploaded_image.id'], ), + sa.ForeignKeyConstraint(['image_tag_type'], ['image_tag_type.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_image_tag_id'), 'image_tag', ['id'], unique=False) + + if create_missing_table('intake_response_value', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('intake_question', sa.Integer(), nullable=False), + sa.Column('response_text', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['intake_question'], ['intake_question.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_intake_response_value_id'), 'intake_response_value', ['id'], unique=False) + + if create_missing_table('program_case_log', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('log_description', sa.String(), nullable=False), + sa.Column('logtime', sa.DateTime(), nullable=False), + sa.Column('program_case', sa.Integer(), nullable=False), + sa.Column('src_status', sa.Integer(), nullable=False), + sa.Column('dest_status', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['dest_status'], ['case_status.id'], ), + sa.ForeignKeyConstraint(['program_case'], ['program_case.id'], ), + sa.ForeignKeyConstraint(['src_status'], ['case_status.id'], ), + sa.PrimaryKeyConstraint('id') + ): + op.create_index(op.f('ix_program_case_log_id'), 'program_case_log', ['id'], unique=False) + + if create_missing_table('match_fail_condition', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('response_value_a', sa.Integer(), nullable=False), + sa.Column('response_value_b', sa.Integer(), nullable=False), + sa.Column('reason_text', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['response_value_a'], ['intake_response_value.id'], ), + sa.ForeignKeyConstraint(['response_value_b'], ['intake_response_value.id'], ), + sa.PrimaryKeyConstraint('id')): + op.create_index(op.f('ix_match_fail_condition_id'), 'match_fail_condition', ['id'], unique=False) + + if create_missing_table('match_failure', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('match_result', sa.Integer(), nullable=False), + sa.Column('failed_condition', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['failed_condition'], ['match_fail_condition.id'], ), + sa.ForeignKeyConstraint(['match_result'], ['match_result.id'], ), + sa.PrimaryKeyConstraint('id')): + op.create_index(op.f('ix_match_failure_id'), 'match_failure', ['id'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_match_failure_id'), table_name='match_failure') + op.drop_table('match_failure') + op.drop_index(op.f('ix_match_fail_condition_id'), table_name='match_fail_condition') + op.drop_table('match_fail_condition') + op.drop_index(op.f('ix_program_case_log_id'), table_name='program_case_log') + op.drop_table('program_case_log') + op.drop_index(op.f('ix_intake_response_value_id'), table_name='intake_response_value') + op.drop_table('intake_response_value') + op.drop_index(op.f('ix_image_tag_id'), table_name='image_tag') + op.drop_table('image_tag') + op.drop_index(op.f('ix_program_case_id'), table_name='program_case') + op.drop_table('program_case') + op.drop_index(op.f('ix_match_result_id'), table_name='match_result') + op.drop_table('match_result') + op.drop_index(op.f('ix_intake_question_id'), table_name='intake_question') + op.drop_table('intake_question') + op.drop_index(op.f('ix_housing_program_pariticipant_id'), table_name='housing_program_pariticipant') + op.drop_table('housing_program_pariticipant') + op.drop_index(op.f('ix_host_household_member_id'), table_name='host_household_member') + op.drop_table('host_household_member') + op.drop_index(op.f('ix_guest_group_member_id'), table_name='guest_group_member') + op.drop_table('guest_group_member') + op.drop_index(op.f('ix_applicant_uploaded_image_id'), table_name='applicant_uploaded_image') + op.drop_table('applicant_uploaded_image') + op.drop_index(op.f('ix_applicant_status_log_id'), table_name='applicant_status_log') + op.drop_table('applicant_status_log') + op.drop_index(op.f('ix_program_coordinator_id'), table_name='program_coordinator') + op.drop_table('program_coordinator') + op.drop_index(op.f('ix_intake_question_set_id'), table_name='intake_question_set') + op.drop_table('intake_question_set') + op.drop_index(op.f('ix_applicant_id'), table_name='applicant') + op.drop_table('applicant') + op.drop_index(op.f('ix_housing_program_id'), table_name='housing_program') + op.drop_table('housing_program') + op.drop_index(op.f('ix_group_match_result_id'), table_name='group_match_result') + op.drop_table('group_match_result') + op.drop_index(op.f('ix_applicant_status_id'), table_name='applicant_status') + op.drop_table('applicant_status') + op.drop_index(op.f('ix_user_id'), table_name='user') + op.drop_table('user') + op.drop_index(op.f('ix_match_status_id'), table_name='match_status') + op.drop_table('match_status') + op.drop_index(op.f('ix_intake_question_type_id'), table_name='intake_question_type') + op.drop_table('intake_question_type') + op.drop_index(op.f('ix_image_tag_type_id'), table_name='image_tag_type') + op.drop_table('image_tag_type') + op.drop_index(op.f('ix_housing_program_service_provider_id'), table_name='housing_program_service_provider') + op.drop_table('housing_program_service_provider') + op.drop_index(op.f('ix_host_household_id'), table_name='host_household') + op.drop_table('host_household') + op.drop_index(op.f('ix_host_id'), table_name='host') + op.drop_table('host') + op.drop_index(op.f('ix_guest_group_id'), table_name='guest_group') + op.drop_table('guest_group') + op.drop_index(op.f('ix_case_status_id'), table_name='case_status') + op.drop_table('case_status') + op.drop_index(op.f('ix_applicant_type_id'), table_name='applicant_type') + op.drop_table('applicant_type') + # ### end Alembic commands ### diff --git a/api-v2/alembic/versions/cfc4e41b69d3_initial_form_api.py b/api-v2/alembic/versions/cfc4e41b69d3_initial_form_api.py new file mode 100644 index 00000000..8379e0d9 --- /dev/null +++ b/api-v2/alembic/versions/cfc4e41b69d3_initial_form_api.py @@ -0,0 +1,85 @@ +"""initial_form_api + +Revision ID: cfc4e41b69d3 +Revises: e4c8bb426528 +Create Date: 2024-05-05 17:14:51.771328 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'cfc4e41b69d3' +down_revision = 'e4c8bb426528' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table('field_properties', + sa.Column('properties_id', sa.Integer(), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('field_type', sa.String(length=50), nullable=False), + sa.Column('choices', sa.JSON(), nullable=True), + sa.CheckConstraint("field_type IN ('date', 'dropdown', 'multiple_choice', 'email', 'file_upload', 'group', 'long_text', 'number', 'short_text', 'yes_no')", name='chk_field_type'), + sa.PrimaryKeyConstraint('properties_id') + ) + op.create_table('field_validations', + sa.Column('validations_id', sa.Integer(), nullable=False), + sa.Column('required', sa.Boolean(), nullable=False), + sa.Column('max_length', sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint('validations_id') + ) + op.create_table('forms', + sa.Column('form_id', sa.Integer(), nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('form_id') + ) + op.create_table('field_groups', + sa.Column('group_id', sa.Integer(), nullable=False), + sa.Column('form_id', sa.Integer(), nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.ForeignKeyConstraint(['form_id'], ['forms.form_id'], ), + sa.PrimaryKeyConstraint('group_id') + ) + op.create_table('fields', + sa.Column('field_id', sa.Integer(), nullable=False), + sa.Column('ref', sa.String(length=255), nullable=False), + sa.Column('properties_id', sa.Integer(), nullable=False), + sa.Column('validations_id', sa.Integer(), nullable=False), + sa.Column('group_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['group_id'], ['field_groups.group_id'], ), + sa.ForeignKeyConstraint(['properties_id'], ['field_properties.properties_id'], ), + sa.ForeignKeyConstraint(['validations_id'], ['field_validations.validations_id'], ), + sa.PrimaryKeyConstraint('field_id') + ) + op.create_table('responses', + sa.Column('answer_id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('field_id', sa.String(length=255), nullable=False), + sa.Column('answer_text', sa.Text(), nullable=True), + sa.ForeignKeyConstraint(['field_id'], ['fields.field_id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('answer_id') + ) + with op.batch_alter_table('role', schema=None) as batch_op: + batch_op.create_unique_constraint('role', ['name']) + with op.batch_alter_table('user', schema=None) as batch_op: + batch_op.alter_column('lastName', + existing_type=sa.VARCHAR(length=255), + nullable=True, + existing_server_default=sa.text("'Unknown'")) + +def downgrade() -> None: + with op.batch_alter_table('role', schema=None) as batch_op: + batch_op.drop_constraint('role', type_='unique') + op.drop_table('responses') + op.drop_table('fields') + op.drop_table('field_groups') + op.drop_table('forms') + op.drop_table('field_validations') + op.drop_table('field_properties') diff --git a/api-v2/alembic/versions/e4c8bb426528_add_user_types.py b/api-v2/alembic/versions/e4c8bb426528_add_user_types.py new file mode 100644 index 00000000..42cce722 --- /dev/null +++ b/api-v2/alembic/versions/e4c8bb426528_add_user_types.py @@ -0,0 +1,72 @@ +"""Add user types + +Revision ID: e4c8bb426528 +Revises: ec8b1c17739a +Create Date: 2024-03-10 21:47:13.942845 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy import text +from app.user_roles import UserRole + +# revision identifiers, used by Alembic. +revision = 'e4c8bb426528' +down_revision = 'ec8b1c17739a' +branch_labels = None +depends_on = None + +def upgrade() -> None: + ''' + 1. Add one table: + 1. role - Store available application user roles + 2. Prepopulate the role table with four role types: Admin, Host, Guest, Coordinator + 3. Update the user table to add the first, middle, last name, and role_id columns. + * All existing users will be given the first, last name "UNKNOWN" + * Assign all existing users to the Guest role. + 4. Drop the host table. + * There is no way to map host users back to the user table. We would need a user id foreign + key, or at least an email address. + ''' + role_table = op.create_table('role', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name') + ) + op.bulk_insert(role_table, + [{'name': UserRole.ADMIN.value}, + {'name': UserRole.HOST.value}, + {'name': UserRole.GUEST.value}, + {'name': UserRole.COORDINATOR.value}]) + op.create_index(op.f('ix_role_id'), 'role', ['id']) + + conn = op.get_bind() + guest_role_id = conn.execute(text("SELECT id FROM role WHERE name = 'Guest'")).fetchone()[0] + + with op.batch_alter_table('user', schema=None) as batch_op: + # Each existing user will get the first and last names "Unknown" by default + # and they will be assigned to the "Guest" user role. + batch_op.add_column(sa.Column('firstName', sa.String(length=255), nullable=False, server_default='Unknown')) + batch_op.add_column(sa.Column('middleName', sa.String(length=255), nullable=True)) + batch_op.add_column(sa.Column('lastName', sa.String(length=255), nullable=True)) + batch_op.add_column(sa.Column('role_id', sa.Integer, nullable=False, server_default=str(guest_role_id))) + batch_op.create_foreign_key('fk_user_role_id', 'role', ['role_id'], ['id']) + + op.drop_table('host') + +def downgrade() -> None: + with op.batch_alter_table('user', schema=None) as batch_op: + batch_op.drop_constraint('fk_user_role_id', type_='foreignkey') + batch_op.drop_column('lastName') + batch_op.drop_column('middleName') + batch_op.drop_column('firstName') + + op.drop_index(op.f('ix_role_id'), table_name='role') + op.drop_table('role') + op.create_table('host', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_host_id'), 'host', ['id']) diff --git a/api-v2/alembic/versions/ec8b1c17739a_drop_unused_tables.py b/api-v2/alembic/versions/ec8b1c17739a_drop_unused_tables.py new file mode 100644 index 00000000..a6713646 --- /dev/null +++ b/api-v2/alembic/versions/ec8b1c17739a_drop_unused_tables.py @@ -0,0 +1,299 @@ +"""Drop unused tables + +Revision ID: ec8b1c17739a +Revises: 3ceec084158f +Create Date: 2024-03-10 15:54:55.578328 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'ec8b1c17739a' +down_revision = '3ceec084158f' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('ix_housing_program_pariticipant_id', table_name='housing_program_pariticipant') + op.drop_table('housing_program_pariticipant') + op.drop_index('ix_applicant_status_id', table_name='applicant_status') + op.drop_table('applicant_status') + op.drop_index('ix_applicant_type_id', table_name='applicant_type') + op.drop_table('applicant_type') + op.drop_index('ix_match_failure_id', table_name='match_failure') + op.drop_table('match_failure') + op.drop_index('ix_program_case_log_id', table_name='program_case_log') + op.drop_table('program_case_log') + op.drop_index('ix_intake_response_value_id', table_name='intake_response_value') + op.drop_table('intake_response_value') + op.drop_index('ix_case_status_id', table_name='case_status') + op.drop_table('case_status') + op.drop_index('ix_image_tag_type_id', table_name='image_tag_type') + op.drop_table('image_tag_type') + op.drop_index('ix_applicant_uploaded_image_id', table_name='applicant_uploaded_image') + op.drop_table('applicant_uploaded_image') + op.drop_index('ix_match_fail_condition_id', table_name='match_fail_condition') + op.drop_table('match_fail_condition') + op.drop_index('ix_host_household_member_id', table_name='host_household_member') + op.drop_table('host_household_member') + op.drop_index('ix_applicant_id', table_name='applicant') + op.drop_table('applicant') + op.drop_index('ix_intake_question_id', table_name='intake_question') + op.drop_table('intake_question') + op.drop_index('ix_image_tag_id', table_name='image_tag') + op.drop_table('image_tag') + op.drop_index('ix_match_status_id', table_name='match_status') + op.drop_table('match_status') + op.drop_index('ix_guest_group_id', table_name='guest_group') + op.drop_table('guest_group') + op.drop_index('ix_applicant_status_log_id', table_name='applicant_status_log') + op.drop_table('applicant_status_log') + op.drop_index('ix_host_household_id', table_name='host_household') + op.drop_table('host_household') + op.drop_index('ix_match_result_id', table_name='match_result') + op.drop_table('match_result') + op.drop_index('ix_intake_question_set_id', table_name='intake_question_set') + op.drop_table('intake_question_set') + op.drop_index('ix_program_case_id', table_name='program_case') + op.drop_table('program_case') + op.drop_index('ix_intake_question_type_id', table_name='intake_question_type') + op.drop_table('intake_question_type') + op.drop_index('ix_guest_group_member_id', table_name='guest_group_member') + op.drop_table('guest_group_member') + op.drop_index('ix_group_match_result_id', table_name='group_match_result') + op.drop_table('group_match_result') + op.drop_index('ix_program_coordinator_id', table_name='program_coordinator') + op.drop_table('program_coordinator') + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('program_coordinator', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('user', sa.INTEGER(), nullable=False), + sa.Column('housing_program', sa.INTEGER(), nullable=False), + sa.ForeignKeyConstraint(['housing_program'], ['housing_program.id'], ), + sa.ForeignKeyConstraint(['user'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_program_coordinator_id', 'program_coordinator', ['id'], unique=False) + op.create_table('group_match_result', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('guest_group', sa.INTEGER(), nullable=False), + sa.Column('host_household', sa.INTEGER(), nullable=False), + sa.Column('match_status', sa.INTEGER(), nullable=False), + sa.ForeignKeyConstraint(['guest_group'], ['guest_group.id'], ), + sa.ForeignKeyConstraint(['host_household'], ['host_household.id'], ), + sa.ForeignKeyConstraint(['match_status'], ['match_status.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_group_match_result_id', 'group_match_result', ['id'], unique=False) + op.create_table('guest_group_member', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('guest_group', sa.INTEGER(), nullable=False), + sa.Column('applicant', sa.INTEGER(), nullable=False), + sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), + sa.ForeignKeyConstraint(['guest_group'], ['guest_group.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_guest_group_member_id', 'guest_group_member', ['id'], unique=False) + op.create_table('intake_question_type', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('type_description', sa.VARCHAR(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_intake_question_type_id', 'intake_question_type', ['id'], unique=False) + op.create_table('program_case', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('coordinator', sa.INTEGER(), nullable=False), + sa.Column('case_status', sa.INTEGER(), nullable=False), + sa.Column('host_household', sa.INTEGER(), nullable=False), + sa.Column('guest_group', sa.INTEGER(), nullable=False), + sa.ForeignKeyConstraint(['case_status'], ['case_status.id'], ), + sa.ForeignKeyConstraint(['coordinator'], ['program_coordinator.id'], ), + sa.ForeignKeyConstraint(['guest_group'], ['guest_group.id'], ), + sa.ForeignKeyConstraint(['host_household'], ['host_household.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_program_case_id', 'program_case', ['id'], unique=False) + op.create_table('intake_question_set', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('question_set_name', sa.VARCHAR(), nullable=False), + sa.Column('housing_program', sa.INTEGER(), nullable=False), + sa.ForeignKeyConstraint(['housing_program'], ['housing_program.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_intake_question_set_id', 'intake_question_set', ['id'], unique=False) + op.create_table('match_result', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('applicant_a', sa.INTEGER(), nullable=False), + sa.Column('applicant_b', sa.INTEGER(), nullable=False), + sa.Column('match_status', sa.INTEGER(), nullable=False), + sa.ForeignKeyConstraint(['applicant_a'], ['applicant.id'], ), + sa.ForeignKeyConstraint(['applicant_b'], ['applicant.id'], ), + sa.ForeignKeyConstraint(['match_status'], ['match_status.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_match_result_id', 'match_result', ['id'], unique=False) + op.create_table('host_household', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('household_name', sa.VARCHAR(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_host_household_id', 'host_household', ['id'], unique=False) + op.create_table('applicant_status_log', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('log_description', sa.VARCHAR(), nullable=False), + sa.Column('logtime', sa.DATETIME(), nullable=False), + sa.Column('applicant', sa.INTEGER(), nullable=False), + sa.Column('src_status', sa.INTEGER(), nullable=False), + sa.Column('dest_status', sa.INTEGER(), nullable=False), + sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), + sa.ForeignKeyConstraint(['dest_status'], ['applicant_status.id'], ), + sa.ForeignKeyConstraint(['src_status'], ['applicant_status.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_applicant_status_log_id', 'applicant_status_log', ['id'], unique=False) + op.create_table('guest_group', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('group_name', sa.VARCHAR(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_guest_group_id', 'guest_group', ['id'], unique=False) + op.create_table('match_status', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('status_description', sa.VARCHAR(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_match_status_id', 'match_status', ['id'], unique=False) + op.create_table('image_tag', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('applicant', sa.INTEGER(), nullable=False), + sa.Column('image_tag_type', sa.INTEGER(), nullable=False), + sa.ForeignKeyConstraint(['applicant'], ['applicant_uploaded_image.id'], ), + sa.ForeignKeyConstraint(['image_tag_type'], ['image_tag_type.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_image_tag_id', 'image_tag', ['id'], unique=False) + op.create_table('intake_question', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('applicant_type', sa.INTEGER(), nullable=False), + sa.Column('intake_question_type', sa.INTEGER(), nullable=False), + sa.Column('intake_question_set', sa.INTEGER(), nullable=False), + sa.Column('question_text', sa.VARCHAR(), nullable=False), + sa.ForeignKeyConstraint(['applicant_type'], ['applicant_type.id'], ), + sa.ForeignKeyConstraint(['intake_question_set'], ['intake_question_set.id'], ), + sa.ForeignKeyConstraint(['intake_question_type'], ['intake_question_type.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_intake_question_id', 'intake_question', ['id'], unique=False) + op.create_table('applicant', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('applicant_type', sa.INTEGER(), nullable=False), + sa.Column('applicant_status', sa.INTEGER(), nullable=False), + sa.Column('user', sa.INTEGER(), nullable=False), + sa.ForeignKeyConstraint(['applicant_status'], ['applicant_status.id'], ), + sa.ForeignKeyConstraint(['applicant_type'], ['applicant_type.id'], ), + sa.ForeignKeyConstraint(['user'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_applicant_id', 'applicant', ['id'], unique=False) + op.create_table('host_household_member', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('host_household', sa.INTEGER(), nullable=False), + sa.Column('applicant', sa.INTEGER(), nullable=False), + sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), + sa.ForeignKeyConstraint(['host_household'], ['host_household.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_host_household_member_id', 'host_household_member', ['id'], unique=False) + op.create_table('match_fail_condition', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('response_value_a', sa.INTEGER(), nullable=False), + sa.Column('response_value_b', sa.INTEGER(), nullable=False), + sa.Column('reason_text', sa.VARCHAR(), nullable=False), + sa.ForeignKeyConstraint(['response_value_a'], ['intake_response_value.id'], ), + sa.ForeignKeyConstraint(['response_value_b'], ['intake_response_value.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_match_fail_condition_id', 'match_fail_condition', ['id'], unique=False) + op.create_table('applicant_uploaded_image', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('applicant', sa.INTEGER(), nullable=False), + sa.Column('image_data', sa.BLOB(), nullable=False), + sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_applicant_uploaded_image_id', 'applicant_uploaded_image', ['id'], unique=False) + op.create_table('image_tag_type', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('tag_text', sa.VARCHAR(), nullable=False), + sa.Column('tag_description', sa.VARCHAR(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_image_tag_type_id', 'image_tag_type', ['id'], unique=False) + op.create_table('case_status', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('status_description', sa.VARCHAR(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_case_status_id', 'case_status', ['id'], unique=False) + op.create_table('intake_response_value', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('intake_question', sa.INTEGER(), nullable=False), + sa.Column('response_text', sa.VARCHAR(), nullable=False), + sa.ForeignKeyConstraint(['intake_question'], ['intake_question.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_intake_response_value_id', 'intake_response_value', ['id'], unique=False) + op.create_table('program_case_log', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('log_description', sa.VARCHAR(), nullable=False), + sa.Column('logtime', sa.DATETIME(), nullable=False), + sa.Column('program_case', sa.INTEGER(), nullable=False), + sa.Column('src_status', sa.INTEGER(), nullable=False), + sa.Column('dest_status', sa.INTEGER(), nullable=False), + sa.ForeignKeyConstraint(['dest_status'], ['case_status.id'], ), + sa.ForeignKeyConstraint(['program_case'], ['program_case.id'], ), + sa.ForeignKeyConstraint(['src_status'], ['case_status.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_program_case_log_id', 'program_case_log', ['id'], unique=False) + op.create_table('match_failure', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('match_result', sa.INTEGER(), nullable=False), + sa.Column('failed_condition', sa.INTEGER(), nullable=False), + sa.ForeignKeyConstraint(['failed_condition'], ['match_fail_condition.id'], ), + sa.ForeignKeyConstraint(['match_result'], ['match_result.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_match_failure_id', 'match_failure', ['id'], unique=False) + op.create_table('applicant_type', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('applicant_type_description', sa.VARCHAR(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_applicant_type_id', 'applicant_type', ['id'], unique=False) + op.create_table('applicant_status', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('applicant_type', sa.INTEGER(), nullable=False), + sa.Column('status_description', sa.VARCHAR(), nullable=False), + sa.ForeignKeyConstraint(['applicant_type'], ['applicant_type.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_applicant_status_id', 'applicant_status', ['id'], unique=False) + op.create_table('housing_program_pariticipant', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('applicant', sa.INTEGER(), nullable=False), + sa.Column('housing_program', sa.INTEGER(), nullable=False), + sa.ForeignKeyConstraint(['applicant'], ['applicant.id'], ), + sa.ForeignKeyConstraint(['housing_program'], ['housing_program.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_housing_program_pariticipant_id', 'housing_program_pariticipant', ['id'], unique=False) + # ### end Alembic commands ### diff --git a/api-v2/app/access/user_repo.py b/api-v2/app/access/user_repo.py new file mode 100644 index 00000000..0fb65721 --- /dev/null +++ b/api-v2/app/access/user_repo.py @@ -0,0 +1,87 @@ +from typing import List + +from ..models import UnmatchedGuestCase, UnmatchedGuestCaseStatus, User, Role +from ..user_roles import UmatchedCaseStatus, UserRole + + +class UnmatchedCaseRepository: + + def __init__(self, session): + self.session = session + + def add_case(self, guest_id: int, + coordinator_id: int) -> UnmatchedGuestCase: + status_id = self.session.query(UnmatchedGuestCaseStatus).filter_by( + status_text=UmatchedCaseStatus.IN_PROGRESS).first().id + new_guest_case = UnmatchedGuestCase(guest_id=guest_id, + coordinator_id=coordinator_id, + status_id=status_id) + self.session.add(new_guest_case) + self.session.commit() + + return new_guest_case + + def delete_case_for_guest(self, guest_id: int) -> bool: + guest_case = self.session.query(UnmatchedGuestCaseStatus).filter_by( + guest_id=guest_id).first() + if guest_case: + self.session.delete(guest_case) + self.session.commit() + return True + return False + + def get_case_for_guest(self, guest_id: int) -> UnmatchedGuestCase: + return self.session.query(UnmatchedGuestCase).filter_by( + guest_id=guest_id).first() + + +class UserRepository: + + def __init__(self, session): + self.session = session + + def _get_role(self, role: UserRole) -> Role: + db_role = self.session.query(Role).filter_by(name=role.value).first() + if not db_role: + raise ValueError(f"{role.value} is not a valid user role type") + return db_role + + def add_user(self, + email: str, + role: UserRole, + firstName: str, + middleName: str = None, + lastName: str = None) -> User: + new_role = self._get_role(role) + new_user = User(email=email, + firstName=firstName, + middleName=middleName, + lastName=lastName, + role_id=new_role.id) + self.session.add(new_user) + self.session.commit() + + return new_user + + def delete_user(self, user_id: int) -> bool: + user = self.session.query(User).filter_by(id=user_id).first() + if user: + self.session.delete(user) + self.session.commit() + return True + return False + + def get_user_by_id(self, id: int) -> User: + return self.session.query(User).filter_by(id=id).first() + + def get_user(self, email: str) -> User: + return self.session.query(User).filter_by(email=email).first() + + def get_all_users(self) -> List[User]: + return self.session.query(User).all() + + def get_user_id(self, email: str) -> int: + return self.session.query(User).filter_by(email=email).first().id + + def get_users_with_role(self, role: UserRole) -> List[User]: + return self.session.query(User).filter_by(role=self._get_role(role)) diff --git a/api-v2/app/access/user_roles.py b/api-v2/app/access/user_roles.py new file mode 100644 index 00000000..b6dc43af --- /dev/null +++ b/api-v2/app/access/user_roles.py @@ -0,0 +1,13 @@ +from enum import Enum + + +class UserRole(Enum): + ADMIN = "Admin" + GUEST = "Guest" + HOST = "Host" + COORDINATOR = "Coordinator" + + +class UmatchedCaseStatus(Enum): + IN_PROGRESS = "In Progress" + COMPLETE = "Complete" diff --git a/api-v2/app/api/main.py b/api-v2/app/api/main.py index c96ebe12..1de32e8b 100644 --- a/api-v2/app/api/main.py +++ b/api-v2/app/api/main.py @@ -5,3 +5,4 @@ api_router = APIRouter() api_router.include_router(auth.router, prefix="/auth", tags=["auth"]) +api_router.include_router(auth.router, prefix="/intake-profile", tags=["intake-profile"]) diff --git a/api-v2/app/core/db.py b/api-v2/app/core/db.py index a6e2f98b..98a9ce2d 100644 --- a/api-v2/app/core/db.py +++ b/api-v2/app/core/db.py @@ -1,7 +1,7 @@ from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker -from core.config import settings +from app.core.config import settings engine = create_engine( diff --git a/api-v2/app/intake_profile/__init__.py b/api-v2/app/intake_profile/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/intake_profile/controller.py b/api-v2/app/intake_profile/controller.py new file mode 100644 index 00000000..dd2db127 --- /dev/null +++ b/api-v2/app/intake_profile/controller.py @@ -0,0 +1,34 @@ +import logging + +from fastapi import Depends, APIRouter, HTTPException, Response, Security +from fastapi.responses import RedirectResponse +from api.deps import ( + get_db, + get_cognito_client, + requires_auth, + allow_roles, + role_to_cognito_group_map, +) + +router = APIRouter() + + +@router.post("/guest/") +def post_guest_intake_profile(body, guest: Depends(aim_guest)): + forms_repo = FormsRepository(DataAccessLayer.session()) + + form_id = forms_repo.add_form(body) + form = forms_repo.get_form_json(form_id) + if form: + return form, 200 + return {}, 404 + + +@router.get("/guest/{form_id}") +def get_guest_intake_profile(form_id, guest: Depends(aim_guest)): + forms_repo = FormsRepository(DataAccessLayer.session()) + + form = forms_repo.get_form_json(form_id) + if form: + return form, 200 + return f"Form with id {form_id} does not exist.", 404 diff --git a/api-v2/app/intake_profile/forms/forms.py b/api-v2/app/intake_profile/forms/forms.py new file mode 100644 index 00000000..66bd8bc9 --- /dev/null +++ b/api-v2/app/intake_profile/forms/forms.py @@ -0,0 +1,79 @@ +from sqlalchemy import Column, Integer, String, ForeignKey, Text, Boolean, DateTime +from sqlalchemy.sql import func +from app.core.db import Base + + +class Form(Base): + __tablename__ = 'forms' + form_id = Column(Integer, primary_key=True) + title = Column(String(255), nullable=False) + description = Column(Text) + created_at = Column(DateTime, default=func.current_timestamp()) + + def get_field_ids(self) -> List[int]: + return [ + field.field_id for group in self.field_groups + for field in group.fields + ] + + +class FieldProperties(Base): + __tablename__ = 'field_properties' + properties_id = Column(Integer, primary_key=True) + description = Column(Text) + field_type = Column(String(50), nullable=False) + choices = Column(JSON) + + __table_args__ = (CheckConstraint( + "field_type IN ('date', 'dropdown', 'multiple_choice', 'email', 'file_upload', 'group', 'long_text', 'number', 'short_text', 'yes_no')", + name='chk_field_type'), ) + + +class FieldValidations(Base): + __tablename__ = 'field_validations' + validations_id = Column(Integer, primary_key=True) + required = Column(Boolean, nullable=False, default=False) + max_length = Column(Integer) # NULL if not applicable + + +class FieldGroup(Base): + __tablename__ = 'field_groups' + group_id = Column(Integer, primary_key=True) + form_id = Column(Integer, ForeignKey('forms.form_id'), nullable=False) + title = Column(String(255), nullable=False) + description = Column(Text) + form = relationship("Form", back_populates="field_groups") + + +class Field(Base): + __tablename__ = 'fields' + field_id = Column(Integer, primary_key=True) + ref = Column(String(255), nullable=False) + properties_id = Column(Integer, + ForeignKey('field_properties.properties_id'), + nullable=False) + validations_id = Column(Integer, + ForeignKey('field_validations.validations_id'), + nullable=False) + group_id = Column(Integer, ForeignKey('field_groups.group_id')) + properties = relationship("FieldProperties") + validations = relationship("FieldValidations") + group = relationship("FieldGroup", back_populates="fields") + + +class Response(Base): + __tablename__ = 'responses' + answer_id = Column(Integer, primary_key=True) + user_id = Column(Integer, ForeignKey('user.id'), nullable=False) + field_id = Column(Integer, ForeignKey('fields.field_id'), nullable=False) + answer_text = Column(Text) + user = relationship("User") + field = relationship("Field") + + +Form.field_groups = relationship("FieldGroup", + order_by=FieldGroup.group_id, + back_populates="form") +FieldGroup.fields = relationship("Field", + order_by=Field.field_id, + back_populates="group") diff --git a/api-v2/app/intake_profile/model.py b/api-v2/app/intake_profile/model.py new file mode 100644 index 00000000..c4530b09 --- /dev/null +++ b/api-v2/app/intake_profile/model.py @@ -0,0 +1,21 @@ + +from enum import Enum, auto + + +class IntakeProfileStatus(Enum): + NEW = auto() + IN_PROGRESS = auto() + APPROVED = auto() + DENIED = auto() + + +class IntakeProfile: + + def __init__(self, form_id: int): + if form_id is None: + raise Exception("IntakeProfile is not valid without a Form") + self.intake_form_id: form_id = form_id + self.attachments: list[bytes] = [] + self.status: IntakeProfileStatus = IntakeProfileStatus.NEW + + diff --git a/api-v2/app/intake_profile/repository.py b/api-v2/app/intake_profile/repository.py new file mode 100644 index 00000000..1be46562 --- /dev/null +++ b/api-v2/app/intake_profile/repository.py @@ -0,0 +1,30 @@ +"""Defines a SQLAlchemy-backed Repository for the IntakeProfile.""" +from typing import Optional + +from sqlalchemy.orm import Session +from model import IntakeProfile + + +class IntakeProfileRepository: + """Repository backed by SQLAlchemy for data persistence.""" + + def __init__(self, session: Session): + """Initialize an IntakeProfile Repository with a SQLAlchemy Session. + + An Exception will be thrown if a Session is not used to create an + object of this class. + """ + if session is None: + raise Exception( + "IntakeProfileRepository is not valid without a SQLAlchemy Session" + ) + self.session = session + + def add(self, intake_profile: IntakeProfile): + """Add the given IntakeProfile to the repository.""" + with self.session as session: + session.add(intake_profile) + + def get(self, intake_profile_id: int) -> Optional[IntakeProfile]: + """Get an IntakeProfile with the given identifier.""" + return self.session.query(IntakeProfile).get(intake_profile_id) diff --git a/api-v2/app/intake_profile/schemas.py b/api-v2/app/intake_profile/schemas.py new file mode 100644 index 00000000..b98f7919 --- /dev/null +++ b/api-v2/app/intake_profile/schemas.py @@ -0,0 +1,207 @@ +from pydantic import BaseModel +from typing import Optional + +from enum import Enum + + +class UserRoleEnum(str, Enum): + ADMIN = "admin" + GUEST = "guest" + HOST = "host" + COORDINATOR = "coordinator" + + +class RoleBase(BaseModel): + id: int + type: UserRoleEnum + + class Config: + from_attributes = True + + +class UserBase(BaseModel): + email: str + firstName: str + middleName: Optional[str] = None + lastName: Optional[str] = None + + +class UserCreate(UserBase): + password: str + role: UserRoleEnum + + +class User(UserBase): + id: int + role: RoleBase + + class Config: + from_attributes = True + + +class UserSignIn(BaseModel): + email: str + password: str + + +class UserSignInResponse(BaseModel): + user: User + token: str + + +class SmartNested(Nested): + ''' + Schema attribute used to serialize nested attributes to + primary keys, unless they are already loaded. This + enables serialization of complex nested relationships. + + Modified from + https://marshmallow-sqlalchemy.readthedocs.io/en/latest/recipes.html#smart-nested-field + ''' + + def serialize(self, attr, obj, accessor=None): + if hasattr(obj, attr): + value = getattr(obj, attr, None) + if value is None: + return None + elif hasattr(value, 'id'): + return {"id": value.id} + else: + return super(SmartNested, self).serialize(attr, obj, accessor) + else: + raise AttributeError( + f"{obj.__class__.__name__} object has no attribute '{attr}'") + + +class RoleSchema(BaseModel): + + model_config = ConfigDict(from_attributes=True) + + +class UnmatchedCaseSchema(BaseModel): + + model_config = ConfigDict(from_attributes=True) + + +class UnmatchedCaseStatusSchema(BaseModel): + + model_config = ConfigDict(from_attributes=True) + + +class UserSchema(BaseModel): + model_config = ConfigDict(from_attributes=True) + + +class HousingProgramServiceProviderSchema(BaseModel): + + model_config = ConfigDict(from_attributes=True) + + +class HousingProgramSchema(BaseModel): + + model_config = ConfigDict(from_attributes=True) + + +class FieldValidationsSchema(BaseModel): + + model_config = ConfigDict(from_attributes=True) + + required : bool + max_length : int + + +class FieldPropertiesSchema(BaseModel): + + class Meta: + model = FieldProperties + include_relationships = True + load_instance = True + exclude = ('properties_id', ) + + description = auto_field() + field_type = auto_field() + choices = auto_field() + + +class FieldSchema(BaseModel): + + class Meta: + model = Field + include_relationships = True + load_instance = True + exclude = ('properties_id', 'validations_id', 'group_id') + + field_id = auto_field(dump_only=True) + ref = auto_field() + properties = SmartNested(FieldPropertiesSchema) + validations = SmartNested(FieldValidationsSchema) + + +class FieldGroupSchema(BaseModel): + + class Meta: + model = FieldGroup + include_relationships = True + load_instance = True + exclude = ('group_id', 'form_id') + + title = auto_field() + description = auto_field() + fields = SmartNested(FieldSchema, many=True) + + +class FormSchema(BaseModel): + + class Meta: + model = Form + include_relationships = True + load_instance = True + exclude = ('form_id', ) + + title = auto_field() + description = auto_field() + field_groups = SmartNested(FieldGroupSchema, many=True) + + +class ResponseSchema(BaseModel): + + class Meta: + model = Response + include_relationship = True + load_instance = True + exclude = ('answer_id', ) + + user_id = auto_field(load_only=True) + field_id = auto_field(load_only=True) + answer_text = auto_field() + user = SmartNested(UserSchema, only=['name'], required=False, missing=None) + field = SmartNested(FieldSchema, + only=['field_id', 'ref', 'properties'], + required=False, + missing=None) + + @post_load + def make_response(self, data, **kwargs): + if data.user is None: + user = self._session.query(User).get(data.user_id) + if not user: + raise ValidationError('User not found', 'user_id') + data.user = user + + if data.field is None: + field = self._session.query(Field).get(data.field_id) + if not field: + raise ValidationError('Field not found', 'field_id') + data.field = field + + return data + + +user_schema = UserSchema() +users_schema = UserSchema(many=True) +service_provider_schema = HousingProgramServiceProviderSchema() +service_provider_list_schema = HousingProgramServiceProviderSchema(many=True) +form_schema = FormSchema() +response_schema = ResponseSchema(many=True) +unmatched_cs_schema = UnmatchedCaseStatusSchema() +unmatched_c_schema = UnmatchedCaseSchema() diff --git a/api-v2/app/matching/__init__.py b/api-v2/app/matching/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/matching/controller.py b/api-v2/app/matching/controller.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/matching/model.py b/api-v2/app/matching/model.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/matching/schemas.py b/api-v2/app/matching/schemas.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/models.py b/api-v2/app/models.py index 4d48b243..3aaa5d07 100644 --- a/api-v2/app/models.py +++ b/api-v2/app/models.py @@ -1,9 +1,13 @@ +"""Model.""" + from sqlalchemy import Column, ForeignKey, Integer, String from sqlalchemy.orm import relationship from sqlalchemy.orm import validates as validates_sqlachemy +from sqlalchemy import create_engine, text +from sqlalchemy.engine import Engine +from sqlalchemy.exc import SQLAlchemyError - -from core.db import Base +from app.core.db import Base class User(Base): @@ -21,8 +25,7 @@ class User(Base): def validate_first_name(self, key, value): if not value or not value.strip(): raise ValueError( - f"{key} must contain at least one non-space character" - ) + f"{key} must contain at least one non-space character") return value.strip() @@ -32,3 +35,53 @@ class Role(Base): type = Column(String, nullable=False, unique=True) users = relationship("User", back_populates="role") + + +class UnmatchedGuestCase(Base): + __tablename__ = "unmatched_guest_case" + id = Column(Integer, primary_key=True, index=True) + guest_id = Column(Integer, ForeignKey('user.id'), nullable=False) + coordinator_id = Column(Integer, ForeignKey('user.id'), nullable=False) + status_id = Column(Integer, + ForeignKey('unmatched_guest_case_status.id'), + nullable=False) + status = relationship("UnmatchedGuestCaseStatus", back_populates="cases") + + +class UnmatchedGuestCaseStatus(Base): + __tablename__ = "unmatched_guest_case_status" + id = Column(Integer, primary_key=True, index=True) + status_text = Column(String(255), nullable=False, unique=True) + cases = relationship("UnmatchedGuestCase", back_populates="status") + + +class DataAccessLayer: + _engine: Engine = None + + @classmethod + def db_init(cls, conn_string): + # Check that a database engine is not already set. The test project will + # hook into the DataAccessLayer to create a test project database engine. + if cls._engine: return + + cls._engine = create_engine(conn_string, echo=True, future=True) + Base.metadata.create_all(bind=cls._engine) + + @classmethod + def session(cls) -> Session: + return Session(cls._engine) + + @classmethod + def revision_id(cls) -> str: + "Return the database alembic migration revision number." + if not cls._engine: return "" + try: + with cls._engine.connect() as conn: + # Using text() to ensure the query is treated as a literal SQL statement + result = conn.execute( + text("SELECT version_num FROM alembic_version")) + revision_id = result.scalar() + return revision_id + except SQLAlchemyError: + # This catches errors such as missing alembic_version table + return "" diff --git a/api-v2/app/onboarding/__init__.py b/api-v2/app/onboarding/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/onboarding/controller.py b/api-v2/app/onboarding/controller.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/onboarding/model.py b/api-v2/app/onboarding/model.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/onboarding/schemas.py b/api-v2/app/onboarding/schemas.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/schemas.py b/api-v2/app/schemas.py index 97e22489..b98f7919 100644 --- a/api-v2/app/schemas.py +++ b/api-v2/app/schemas.py @@ -47,3 +47,161 @@ class UserSignIn(BaseModel): class UserSignInResponse(BaseModel): user: User token: str + + +class SmartNested(Nested): + ''' + Schema attribute used to serialize nested attributes to + primary keys, unless they are already loaded. This + enables serialization of complex nested relationships. + + Modified from + https://marshmallow-sqlalchemy.readthedocs.io/en/latest/recipes.html#smart-nested-field + ''' + + def serialize(self, attr, obj, accessor=None): + if hasattr(obj, attr): + value = getattr(obj, attr, None) + if value is None: + return None + elif hasattr(value, 'id'): + return {"id": value.id} + else: + return super(SmartNested, self).serialize(attr, obj, accessor) + else: + raise AttributeError( + f"{obj.__class__.__name__} object has no attribute '{attr}'") + + +class RoleSchema(BaseModel): + + model_config = ConfigDict(from_attributes=True) + + +class UnmatchedCaseSchema(BaseModel): + + model_config = ConfigDict(from_attributes=True) + + +class UnmatchedCaseStatusSchema(BaseModel): + + model_config = ConfigDict(from_attributes=True) + + +class UserSchema(BaseModel): + model_config = ConfigDict(from_attributes=True) + + +class HousingProgramServiceProviderSchema(BaseModel): + + model_config = ConfigDict(from_attributes=True) + + +class HousingProgramSchema(BaseModel): + + model_config = ConfigDict(from_attributes=True) + + +class FieldValidationsSchema(BaseModel): + + model_config = ConfigDict(from_attributes=True) + + required : bool + max_length : int + + +class FieldPropertiesSchema(BaseModel): + + class Meta: + model = FieldProperties + include_relationships = True + load_instance = True + exclude = ('properties_id', ) + + description = auto_field() + field_type = auto_field() + choices = auto_field() + + +class FieldSchema(BaseModel): + + class Meta: + model = Field + include_relationships = True + load_instance = True + exclude = ('properties_id', 'validations_id', 'group_id') + + field_id = auto_field(dump_only=True) + ref = auto_field() + properties = SmartNested(FieldPropertiesSchema) + validations = SmartNested(FieldValidationsSchema) + + +class FieldGroupSchema(BaseModel): + + class Meta: + model = FieldGroup + include_relationships = True + load_instance = True + exclude = ('group_id', 'form_id') + + title = auto_field() + description = auto_field() + fields = SmartNested(FieldSchema, many=True) + + +class FormSchema(BaseModel): + + class Meta: + model = Form + include_relationships = True + load_instance = True + exclude = ('form_id', ) + + title = auto_field() + description = auto_field() + field_groups = SmartNested(FieldGroupSchema, many=True) + + +class ResponseSchema(BaseModel): + + class Meta: + model = Response + include_relationship = True + load_instance = True + exclude = ('answer_id', ) + + user_id = auto_field(load_only=True) + field_id = auto_field(load_only=True) + answer_text = auto_field() + user = SmartNested(UserSchema, only=['name'], required=False, missing=None) + field = SmartNested(FieldSchema, + only=['field_id', 'ref', 'properties'], + required=False, + missing=None) + + @post_load + def make_response(self, data, **kwargs): + if data.user is None: + user = self._session.query(User).get(data.user_id) + if not user: + raise ValidationError('User not found', 'user_id') + data.user = user + + if data.field is None: + field = self._session.query(Field).get(data.field_id) + if not field: + raise ValidationError('Field not found', 'field_id') + data.field = field + + return data + + +user_schema = UserSchema() +users_schema = UserSchema(many=True) +service_provider_schema = HousingProgramServiceProviderSchema() +service_provider_list_schema = HousingProgramServiceProviderSchema(many=True) +form_schema = FormSchema() +response_schema = ResponseSchema(many=True) +unmatched_cs_schema = UnmatchedCaseStatusSchema() +unmatched_c_schema = UnmatchedCaseSchema() diff --git a/api-v2/app/stays/__init__.py b/api-v2/app/stays/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/stays/controller.py b/api-v2/app/stays/controller.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/stays/model.py b/api-v2/app/stays/model.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/stays/schemas.py b/api-v2/app/stays/schemas.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/tenant_housing_provider/__init__.py b/api-v2/app/tenant_housing_provider/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/tenant_housing_provider/controller.py b/api-v2/app/tenant_housing_provider/controller.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/tenant_housing_provider/model.py b/api-v2/app/tenant_housing_provider/model.py new file mode 100644 index 00000000..d3eeb9c8 --- /dev/null +++ b/api-v2/app/tenant_housing_provider/model.py @@ -0,0 +1,20 @@ + +class HousingProgramServiceProvider(Base): + __tablename__ = "housing_program_service_provider" + + id = Column(Integer, primary_key=True, index=True) + provider_name = Column(String, nullable=False) + + def __repr__(self): + return f"HousingProgramServiceProvider(id={id},provider_name='{self.provider_name}')" + + +class HousingProgram(Base): + __tablename__ = "housing_program" + + id = Column(Integer, primary_key=True, index=True) + program_name = Column(String, nullable=False) + service_provider = Column( + Integer, + ForeignKey('housing_program_service_provider.id'), + nullable=False) diff --git a/api-v2/app/tenant_housing_provider/schemas.py b/api-v2/app/tenant_housing_provider/schemas.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/tenant_housing_provider/service_provider_repository.py b/api-v2/app/tenant_housing_provider/service_provider_repository.py new file mode 100644 index 00000000..f7847056 --- /dev/null +++ b/api-v2/app/tenant_housing_provider/service_provider_repository.py @@ -0,0 +1,98 @@ +from typing import Optional, List + +# Third Party +from sqlalchemy import func, select +from sqlalchemy.orm import Session + +# Local +from model import HousingProgramServiceProvider + + +class HousingProviderRepository: + + def create_service_provider( + self, + provider_name: str) -> Optional[HousingProgramServiceProvider]: + """ + Create a housing program service provider, if it + is not already in the database. + + Return the newly created service provider. Return None + if the service provider already exists. + """ + with DataAccessLayer.session() as session: + existing_provider = session.execute( + select(HousingProgramServiceProvider).filter( + HousingProgramServiceProvider.provider_name == + provider_name)).scalar_one_or_none() + + if existing_provider is None: + new_provider = HousingProgramServiceProvider( + provider_name=provider_name) + session.add(new_provider) + session.commit() + session.refresh(new_provider) + return new_provider + + return None + + def delete_service_provider(self, provider_id: int) -> bool: + """Delete a service provider. Return false if the + service provider is not found. Return true otherwise. + + :param provider_id: The ID of the service provider to delete. + """ + with DataAccessLayer.session() as session: + provider = session.get(HousingProgramServiceProvider, provider_id) + if provider: + session.delete(provider) + session.commit() + return True + + return False + + def get_service_provider_by_id( + self, provider_id: int) -> Optional[HousingProgramServiceProvider]: + """Get details about a housing program service provider from an ID + + :param provider_id: The ID of the service provider to read, update or delete + :type provider_id: int + """ + with DataAccessLayer.session() as session: + return session.get(HousingProgramServiceProvider, provider_id) + + def get_service_providers(self) -> List[HousingProgramServiceProvider]: + """ + Get a list of all housing program service providers. + """ + with DataAccessLayer.session() as session: + return session.scalars(select(HousingProgramServiceProvider)).all() + + def update_service_provider( + self, new_name: str, + provider_id: int) -> Optional[HousingProgramServiceProvider]: + """ + Update a housing program service provider with + id 'provider_id'. Return the updated service provider + if update is successful, otherwise return None. + """ + with DataAccessLayer.session() as session: + provider_to_update = session.get(HousingProgramServiceProvider, + provider_id) + if provider_to_update: + provider_to_update.provider_name = new_name + session.commit() + session.refresh(provider_to_update) + return provider_to_update + return None + + def provider_count(self, existing_session: Session = None): + + def count(lcl_session: Session): + return lcl_session.scalar( + select(func.count(HousingProgramServiceProvider.id))) + + if existing_session is None: + with DataAccessLayer.session() as session: + return count(session) + return count(existing_session) diff --git a/api-v2/poetry.lock b/api-v2/poetry.lock index 9ef32e8c..d7dfcb1f 100644 --- a/api-v2/poetry.lock +++ b/api-v2/poetry.lock @@ -549,19 +549,19 @@ standard = ["uvicorn[standard] (>=0.15.0)"] [[package]] name = "filelock" -version = "3.15.4" +version = "3.16.0" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, + {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, + {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "greenlet" @@ -988,13 +988,13 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.1-py3-none-any.whl", hash = "sha256:facaa5a3c57aa1e053e3da7b49e0cc31fe0113ca42a4659d5c2e98e545624afe"}, + {file = "platformdirs-4.3.1.tar.gz", hash = "sha256:63b79589009fa8159973601dd4563143396b35c5f93a58b36f9049ff046949b1"}, ] [package.extras] @@ -1691,17 +1691,17 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7 [[package]] name = "tox" -version = "4.18.0" +version = "4.18.1" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.18.0-py3-none-any.whl", hash = "sha256:0a457400cf70615dc0627eb70d293e80cd95d8ce174bb40ac011011f0c03a249"}, - {file = "tox-4.18.0.tar.gz", hash = "sha256:5dfa1cab9f146becd6e351333a82f9e0ade374451630ba65ee54584624c27b58"}, + {file = "tox-4.18.1-py3-none-any.whl", hash = "sha256:35d472032ee1f73fe20c3e0e73d7073a4e85075c86ff02c576f9fc7c6a15a578"}, + {file = "tox-4.18.1.tar.gz", hash = "sha256:3c0c96bc3a568a5c7e66387a4cfcf8c875b52e09f4d47c9f7a277ec82f1a0b11"}, ] [package.dependencies] -cachetools = ">=5.4" +cachetools = ">=5.5" chardet = ">=5.2" colorama = ">=0.4.6" filelock = ">=3.15.4" @@ -1712,8 +1712,8 @@ pyproject-api = ">=1.7.1" virtualenv = ">=20.26.3" [package.extras] -docs = ["furo (>=2024.7.18)", "sphinx (>=7.4.7)", "sphinx-argparse-cli (>=1.16)", "sphinx-autodoc-typehints (>=2.2.3)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] -testing = ["build[virtualenv] (>=1.2.1)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=70.3)", "time-machine (>=2.14.2)", "wheel (>=0.43)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-argparse-cli (>=1.17)", "sphinx-autodoc-typehints (>=2.4)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=24.8)"] +testing = ["build[virtualenv] (>=1.2.2)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=74.1.2)", "time-machine (>=2.15)", "wheel (>=0.44)"] [[package]] name = "typer" @@ -1842,13 +1842,13 @@ test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)" [[package]] name = "virtualenv" -version = "20.26.3" +version = "20.26.4" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, - {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, + {file = "virtualenv-20.26.4-py3-none-any.whl", hash = "sha256:48f2695d9809277003f30776d155615ffc11328e6a0a8c1f0ec80188d7874a55"}, + {file = "virtualenv-20.26.4.tar.gz", hash = "sha256:c17f4e0f3e6036e9f26700446f85c76ab11df65ff6d8a9cbfad9f71aabfcf23c"}, ] [package.dependencies] diff --git a/api-v2/tests/access/__init_.py b/api-v2/tests/access/__init_.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/tests/test_authentication.py b/api-v2/tests/access/test_authentication.py similarity index 100% rename from api-v2/tests/test_authentication.py rename to api-v2/tests/access/test_authentication.py diff --git a/api-v2/tests/test_host_controller.py b/api-v2/tests/access/test_host_controller.py similarity index 100% rename from api-v2/tests/test_host_controller.py rename to api-v2/tests/access/test_host_controller.py diff --git a/api-v2/tests/test_mocking.py b/api-v2/tests/access/test_mocking.py similarity index 100% rename from api-v2/tests/test_mocking.py rename to api-v2/tests/access/test_mocking.py diff --git a/api-v2/tests/test_user_repo.py b/api-v2/tests/access/test_user_repo.py similarity index 100% rename from api-v2/tests/test_user_repo.py rename to api-v2/tests/access/test_user_repo.py diff --git a/api-v2/tests/conftest.py b/api-v2/tests/conftest.py deleted file mode 100644 index 8b811068..00000000 --- a/api-v2/tests/conftest.py +++ /dev/null @@ -1,161 +0,0 @@ -import os - -import pytest -import secrets -from collections.abc import Generator -from pytest import MonkeyPatch -import sqlalchemy -from sqlalchemy.orm import Session - -from openapi_server.configs.staging import StagingHUUConfig -from openapi_server.configs.development import DevelopmentHUUConfig -from openapi_server.app import create_app -from openapi_server.configs.mock_aws import AWSMockService, AWSTemporaryUserpool -from openapi_server.models.database import DataAccessLayer - -def pytest_addoption(parser: pytest.Parser) -> None: - ''' - pytest hook used to register argparse-style options and ini-style config values, - called once at the beginning of a test run. - ''' - parser.addoption( - "--mode", - action="store", - default="debug", - help="run tests in debug or release mode", - ) - -def pytest_configure(config: pytest.Config) -> None: - ''' - pytest hook used to perform initial test application configuration, - called at the beginning of a test run, within conftest.py file. - ''' - mode = config.getoption("mode", default='debug').lower() - if mode == 'debug': - # All application configurations are defined explicitly in code. The - # system environment is not used. All resources that can be safely - # mocked, will be mocked (e.g. mock AWS cognito API calls) - with MonkeyPatch().context() as m: - for env_var in os.environ.keys(): - m.delenv(env_var) - app_config = DevelopmentHUUConfig( - TESTING=True, - FLASK_DEBUG=True, - DATABASE_URL = 'sqlite:///:memory:' - ) - config.mock_aws = True - elif mode == 'release': - # Load configuration from the environment, to allow the use of - # secrets, and disable the mocking of any resources - from dotenv import load_dotenv, find_dotenv - dot_env = find_dotenv() - if dot_env: - load_dotenv(dot_env) - with MonkeyPatch().context() as m: - # The real userpool should never be used while testing - # Our test infrastructure will create temporary user - # pools for each test. - m.setenv("COGNITO_CLIENT_ID", "Totally fake client id") - m.setenv("COGNITO_CLIENT_SECRET", "Yet another fake secret12") - m.setenv("COGNITO_REDIRECT_URI", "Redirect your way back to writing more test cases") - m.setenv("COGNITO_USER_POOL_ID", "Water's warm. IDs are fake") - m.setenv("SECRET_KEY", secrets.token_urlsafe(32)) - m.setenv("DATABASE_URL", "sqlite:///:memory:") - app_config = StagingHUUConfig( - TESTING=True, - FLASK_DEBUG=True - ) - config.mock_aws = False - else: - raise KeyError(f"pytest application configuration mode {mode} not" - "recognized. Only debug and release modes supported.") - - config.app_config = app_config - -@pytest.fixture(scope="session") -def app_config(request): - return request.config.app_config - -@pytest.fixture(scope="session") -def is_mocking(pytestconfig): - return pytestconfig.mock_aws - -@pytest.fixture() -def app(pytestconfig, empty_db_session): - flask_app = create_app(pytestconfig.app_config).app - - # Tests will never operate on real user data, so provide a - # temporary userpool even if mocking is disabled - app_environment_cls = AWSMockService if pytestconfig.mock_aws else AWSTemporaryUserpool - - with app_environment_cls(flask_app): - yield flask_app - -@pytest.fixture -def alembic_engine(): - ''' - Override the pytest-alembic default engine to use an in-memory - database at the base revision. - ''' - return sqlalchemy.create_engine("sqlite:///:memory:") - -@pytest.fixture() -def empty_db_session(alembic_runner, alembic_engine) -> Generator[Session, None, None]: - ''' - SetUp and TearDown an empty in-memory database for - database repository tests. - - This fixture does not initialize the full application. - ''' - # Upgrade the database to the current head revision - # This applies all of our alembic migration scripts - # to the empty database - alembic_runner.migrate_up_to("heads") - DataAccessLayer._engine = alembic_engine - - yield DataAccessLayer.session() - - test_engine, DataAccessLayer._engine = DataAccessLayer._engine, None - test_engine.dispose() - -@pytest.fixture() -def empty_db_session_provider(empty_db_session): - class _provider: - def session(): return empty_db_session - - return _provider - -@pytest.fixture() -def client(app): - return app.test_client() - -@pytest.fixture -def empty_environment(monkeypatch: MonkeyPatch) -> MonkeyPatch: - ''' - Create an isolated environment for testing purposes. - The environment variables are cleared to ensure the - configuration object is not dependent on the machine configuration. - ''' - for env_var in os.environ.keys(): - monkeypatch.delenv(env_var) - return monkeypatch - -@pytest.fixture -def fake_prod_env(empty_environment: MonkeyPatch) -> MonkeyPatch: - ''' - Define a fake production environment by setting each of the required - production configuration variables with fake values. - ''' - empty_environment.setenv("ENV", "production") - empty_environment.setenv("FLASK_DEBUG", "False") - empty_environment.setenv("TESTING", "False") - empty_environment.setenv("SECRET_KEY", "A completely made up fake secret !@#$12234") - empty_environment.setenv("DATABASE_URL", "sqlite:///:memory:") - empty_environment.setenv("COGNITO_CLIENT_ID", "Totally fake client id") - empty_environment.setenv("COGNITO_CLIENT_SECRET", "Yet another fake secret12") - empty_environment.setenv("COGNITO_REGION", "Not even the region actually exists") - empty_environment.setenv("COGNITO_REDIRECT_URI", "Redirect your way back to writing more test cases") - empty_environment.setenv("COGNITO_USER_POOL_ID", "Water's warm. IDs are fake") - empty_environment.setenv("COGNITO_ACCESS_ID", "If you need fake access, use this ID") - empty_environment.setenv("COGNITO_ACCESS_KEY", "WARNING: This is a real-ly fake key 12345a6sdf") - return empty_environment \ No newline at end of file diff --git a/api-v2/tests/intake_profile/__init__.py b/api-v2/tests/intake_profile/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/tests/intake_profile/test_forms.py b/api-v2/tests/intake_profile/test_forms.py new file mode 100644 index 00000000..0a9113c3 --- /dev/null +++ b/api-v2/tests/intake_profile/test_forms.py @@ -0,0 +1,134 @@ +from types import MappingProxyType + +from app.repositories.forms import FormsRepository +from app.repositories.user_repo import UserRepository, UserRole + +TEST_FORM_READ_ONLY = MappingProxyType({ + "title": + "Employee Onboarding", + "description": + "Collect necessary employee data.", + "field_groups": [{ + "title": + "Personal Details", + "description": + "Please enter your personal details.", + "fields": [{ + "ref": "position", + "properties": { + "description": "Position in the company", + "field_type": "dropdown", + "choices": ['Manager', 'Developer', 'Designer'], + }, + "validations": { + "required": True, + "max_length": 12 + } + }, { + "ref": "service_length", + "properties": { + "description": "Years in the company", + "field_type": "number", + "choices": None, + }, + "validations": { + "required": False, + "max_length": None + } + }] + }, { + "title": + "Second Group", + "description": + "A second field group.", + "fields": [{ + "ref": "start date", + "properties": { + "description": "Start date", + "field_type": "date", + "choices": "11-22-2005", + }, + "validations": { + "required": True, + "max_length": 12 + } + }] + }] +}) + + +def assert_form_equal(actual_form: dict, expected_form: dict): + """ + Do a deep equality check of a form, excluding dynamically + assigned values like timestamps and primary key ids. + """ + actual_copy = actual_form.copy() + del actual_copy['created_at'] + for group in actual_copy['field_groups']: + del group['form'] + for field in group['fields']: + del field['field_id'] + del field['group'] + + assert actual_copy == expected_form + + +def test_add_form_valid_json(empty_db_session_provider): + form_json = dict(TEST_FORM_READ_ONLY) + + form_repo = FormsRepository(empty_db_session_provider.session()) + created_form_id = form_repo.add_form(form_json) + retrieved_form = form_repo.get_form_json(created_form_id) + + assert_form_equal(retrieved_form, form_json) + + +def test_add_get_responses(empty_db_session_provider): + with empty_db_session_provider.session() as session: + user_repo = UserRepository(session) + form_repo = FormsRepository(session) + + user_repo.add_user('fake@email.com', UserRole.COORDINATOR, 'firstname') + user_id = user_repo.get_user_id('fake@email.com') + created_form_id = form_repo.add_form(TEST_FORM_READ_ONLY) + retrieved_form = form_repo.get_form_json(created_form_id) + + def _get_field_id(lcl_form, ref): + for group in lcl_form['field_groups']: + for field in group['fields']: + if field['ref'] == ref: + return int(field['field_id']) + raise ValueError(f'ref {ref} not found in test form') + + expected_responses = [{ + "user_id": + user_id, + "field_id": + _get_field_id(retrieved_form, 'position'), + "answer_text": + "Designer" + }, { + "user_id": + user_id, + "field_id": + _get_field_id(retrieved_form, 'service_length'), + "answer_text": + "5" + }, { + "user_id": + user_id, + "field_id": + _get_field_id(retrieved_form, 'start date'), + "answer_text": + '2024-05-19' + }] + form_repo.add_user_responses(user_id, expected_responses) + + retrieved_answers = form_repo.get_user_responses( + user_id, created_form_id) + + assert len(retrieved_answers) == 3 + for expected, actual in zip(expected_responses, retrieved_answers): + assert expected['answer_text'] == actual['answer_text'] + assert expected['user_id'] == actual['user']['id'] + assert expected['field_id'] == actual['field']['field_id'] diff --git a/api-v2/tests/test_forms_schema.py b/api-v2/tests/intake_profile/test_forms_schema.py similarity index 100% rename from api-v2/tests/test_forms_schema.py rename to api-v2/tests/intake_profile/test_forms_schema.py diff --git a/api-v2/tests/matching/__init__.py b/api-v2/tests/matching/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/tests/onboarding/__init__.py b/api-v2/tests/onboarding/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/tests/stays/__init__.py b/api-v2/tests/stays/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/tests/tenant_housing_provider/__init__.py b/api-v2/tests/tenant_housing_provider/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/tests/test_service_provider_controller.py b/api-v2/tests/tenant_housing_provider/test_service_provider_controller.py similarity index 100% rename from api-v2/tests/test_service_provider_controller.py rename to api-v2/tests/tenant_housing_provider/test_service_provider_controller.py diff --git a/api-v2/tests/test_service_provider_repository.py b/api-v2/tests/tenant_housing_provider/test_service_provider_repository.py similarity index 100% rename from api-v2/tests/test_service_provider_repository.py rename to api-v2/tests/tenant_housing_provider/test_service_provider_repository.py diff --git a/api-v2/tests/test_alembic_migration.py b/api-v2/tests/test_alembic_migration.py index 500fa552..36f56aa5 100644 --- a/api-v2/tests/test_alembic_migration.py +++ b/api-v2/tests/test_alembic_migration.py @@ -1,6 +1,6 @@ -from openapi_server.models.database import DataAccessLayer -from openapi_server.models.user_roles import UserRole -from openapi_server.repositories.user_repo import UserRepository +from app.models import DataAccessLayer +from app.user_roles import UserRole +from app.repositories.user_repo import UserRepository # Importing these tests will register them within our test project # These tests do an excellent job of detecting errors in the alembic @@ -10,25 +10,27 @@ from pytest_alembic.tests import test_model_definitions_match_ddl from pytest_alembic.tests import test_up_down_consistency + def test_db_session_version(empty_db_session): - ''' + """ Test that the pytest in-memory database is at the most - up-to-date alembic migration version. This will ensure all - the require database objects and pre-populated fields will + up-to-date alembic migration version. This will ensure all + the require database objects and pre-populated fields will be available. - ''' + """ # Adding a new database revision will break this test case - # Before updating to the new revision please add additional + # Before updating to the new revision please add additional # test cases below that check the integrity of your new migration assert DataAccessLayer.revision_id() == 'cfc4e41b69d3' + def test_user_roles_available(empty_db_session): - ''' - Test that all of the UserRole types are pre-populated within + """ + Test that all of the UserRole types are pre-populated within the Role table after migrating the database to the HEAD revision. - ''' + """ user_repo = UserRepository(empty_db_session) for role in UserRole: db_role = user_repo._get_role(role) - assert db_role.name == role.value \ No newline at end of file + assert db_role.name == role.value diff --git a/api-v2/tests/test_forms_repo.py b/api-v2/tests/test_forms_repo.py deleted file mode 100644 index 14f665a1..00000000 --- a/api-v2/tests/test_forms_repo.py +++ /dev/null @@ -1,127 +0,0 @@ -from types import MappingProxyType - -from openapi_server.repositories.forms import FormsRepository -from openapi_server.repositories.user_repo import UserRepository, UserRole - -TEST_FORM_READ_ONLY = MappingProxyType({ - "title": "Employee Onboarding", - "description": "Collect necessary employee data.", - "field_groups": [ - { - "title": "Personal Details", - "description": "Please enter your personal details.", - "fields": [ - { - "ref": "position", - "properties": { - "description": "Position in the company", - "field_type": "dropdown", - "choices": ['Manager', 'Developer', 'Designer'], - }, - "validations": { - "required": True, - "max_length": 12 - } - }, - { - "ref": "service_length", - "properties": { - "description": "Years in the company", - "field_type": "number", - "choices": None, - }, - "validations": { - "required": False, - "max_length": None - } - } - ] - }, - { - "title": "Second Group", - "description": "A second field group.", - "fields": [ - { - "ref": "start date", - "properties": { - "description": "Start date", - "field_type": "date", - "choices": "11-22-2005", - }, - "validations": { - "required": True, - "max_length": 12 - } - } - ] - } - ] -}) - -def assert_form_equal(actual_form: dict, expected_form: dict): - ''' - Do a deep equality check of a form, excluding dynamically - assigned values like timestamps and primary key ids. - ''' - actual_copy = actual_form.copy() - del actual_copy['created_at'] - for group in actual_copy['field_groups']: - del group['form'] - for field in group['fields']: - del field['field_id'] - del field['group'] - - assert actual_copy == expected_form - -def test_add_form_valid_json(empty_db_session_provider): - form_json = dict(TEST_FORM_READ_ONLY) - - form_repo = FormsRepository(empty_db_session_provider.session()) - created_form_id = form_repo.add_form(form_json) - retrieved_form = form_repo.get_form_json(created_form_id) - - assert_form_equal(retrieved_form, form_json) - -def test_add_get_responses(empty_db_session_provider): - with empty_db_session_provider.session() as session: - user_repo = UserRepository(session) - form_repo = FormsRepository(session) - - user_repo.add_user('fake@email.com', UserRole.COORDINATOR, 'firstname') - user_id = user_repo.get_user_id('fake@email.com') - created_form_id = form_repo.add_form(TEST_FORM_READ_ONLY) - retrieved_form = form_repo.get_form_json(created_form_id) - - def _get_field_id(lcl_form, ref): - for group in lcl_form['field_groups']: - for field in group['fields']: - if field['ref'] == ref: - return int(field['field_id']) - raise ValueError(f'ref {ref} not found in test form') - - expected_responses = [ - { - "user_id": user_id, - "field_id": _get_field_id(retrieved_form, 'position'), - "answer_text": "Designer" - }, - { - "user_id": user_id, - "field_id": _get_field_id(retrieved_form, 'service_length'), - "answer_text": "5" - }, - { - "user_id": user_id, - "field_id": _get_field_id(retrieved_form, 'start date'), - "answer_text": '2024-05-19' - } - ] - form_repo.add_user_responses(user_id, expected_responses) - - retrieved_answers = form_repo.get_user_responses(user_id, created_form_id) - - assert len(retrieved_answers) == 3 - for expected, actual in zip(expected_responses, retrieved_answers): - assert expected['answer_text'] == actual['answer_text'] - assert expected['user_id'] == actual['user']['id'] - assert expected['field_id'] == actual['field']['field_id'] \ No newline at end of file diff --git a/api-v2/tox.ini b/api-v2/tox.ini index b4aee3ae..f45b9c48 100644 --- a/api-v2/tox.ini +++ b/api-v2/tox.ini @@ -11,7 +11,7 @@ allowlist_externals = poetry commands_pre = poetry install commands = - poetry run pytest {tty:--color=yes} {posargs:tests} --cov=app --mode=debug + poetry run pytest {tty:--color=yes} {posargs:tests} --cov=app #--mode=debug [testenv:releasetest] description = run tests without mocking using pytest diff --git a/api/openapi_server/models/database.py b/api/openapi_server/models/database.py index f8fe9664..4647cbe7 100644 --- a/api/openapi_server/models/database.py +++ b/api/openapi_server/models/database.py @@ -28,6 +28,12 @@ def validate_first_name(self, key, value): raise ValueError(f"{key} must contain at least one non-space character") return value.strip() +class Role(Base): + __tablename__ = "role" + id = Column(Integer, primary_key=True, index=True) + name = Column(String, nullable=False, unique=True) + users = relationship("User", back_populates="role") + class UnmatchedGuestCase(Base): __tablename__ = "unmatched_guest_case" id = Column(Integer, primary_key=True, index=True) @@ -42,12 +48,6 @@ class UnmatchedGuestCaseStatus(Base): status_text = Column(String(255), nullable=False, unique=True) cases = relationship("UnmatchedGuestCase", back_populates="status") -class Role(Base): - __tablename__ = "role" - id = Column(Integer, primary_key=True, index=True) - name = Column(String, nullable=False, unique=True) - users = relationship("User", back_populates="role") - class HousingProgramServiceProvider(Base): __tablename__ = "housing_program_service_provider" diff --git a/api/tests/test_forms_repo.py b/api/tests/test_forms_repo.py index 14f665a1..8b24c021 100644 --- a/api/tests/test_forms_repo.py +++ b/api/tests/test_forms_repo.py @@ -124,4 +124,4 @@ def _get_field_id(lcl_form, ref): for expected, actual in zip(expected_responses, retrieved_answers): assert expected['answer_text'] == actual['answer_text'] assert expected['user_id'] == actual['user']['id'] - assert expected['field_id'] == actual['field']['field_id'] \ No newline at end of file + assert expected['field_id'] == actual['field']['field_id'] From e8e96a7b61ef4fa0ae2ef2d770e32a67368ef3d4 Mon Sep 17 00:00:00 2001 From: John Wroge <72668920+johnwroge@users.noreply.github.com> Date: Sat, 7 Sep 2024 21:52:44 -0400 Subject: [PATCH 27/70] Added forgot password route handler in api/routes/auth.py, created forgot password response and request classes, and reformatted the calculate secret hash function used in previous api files --- api-v2/app/api/deps.py | 12 ++++++++++++ api-v2/app/api/routes/auth.py | 33 ++++++++++++++++++++++++++++++++- api-v2/app/schemas.py | 10 +++++++++- 3 files changed, 53 insertions(+), 2 deletions(-) diff --git a/api-v2/app/api/deps.py b/api-v2/app/api/deps.py index 5ca8c1da..f9acfc22 100644 --- a/api-v2/app/api/deps.py +++ b/api-v2/app/api/deps.py @@ -1,6 +1,8 @@ import boto3 import jwt import time +import hmac +import base64 from fastapi import Request, HTTPException from fastapi.security import SecurityScopes @@ -87,3 +89,13 @@ def allow_roles(request: Request, security_scopes: SecurityScopes): if not contains_group: raise HTTPException(status_code=403, detail="Unauthorized") return True + + + +def calc_secret_hash(username: str) -> str: + message = username + settings.COGNITO_CLIENT_ID + secret = bytearray(settings.COGNITO_CLIENT_SECRET, "utf-8") + dig = hmac.new( + secret, msg=message.encode("utf-8"), digestmod="sha256" + ).digest() + return base64.b64encode(dig).decode() diff --git a/api-v2/app/api/routes/auth.py b/api-v2/app/api/routes/auth.py index 6ab3691d..77a446bf 100644 --- a/api-v2/app/api/routes/auth.py +++ b/api-v2/app/api/routes/auth.py @@ -6,7 +6,8 @@ from botocore.exceptions import ClientError -from schemas import UserCreate, UserSignIn, UserSignInResponse +from schemas import UserCreate, UserSignIn, UserSignInResponse, ForgotPasswordRequest, ForgotPasswordResponse + from crud import create_user, delete_user, get_user from api.deps import ( get_db, @@ -14,6 +15,7 @@ requires_auth, allow_roles, role_to_cognito_group_map, + calc_secret_hash ) from utils import calc_secret_hash @@ -154,3 +156,32 @@ def signin( ) def secret(): return {"message": "Welcome to the secret route"} + + +""" +# Forgot Password Route + +This route handles forgot password requests by hashing credentials and sending to AWS Cognito. + +""" + + +@router.post("/forgot_password", response_model=ForgotPasswordResponse) +def forgot_password( + body: ForgotPasswordRequest, + cognito_client=Depends(get_cognito_client) +): + secret_hash = calc_secret_hash(body.email) + + try: + response = cognito_client.forgot_password( + ClientId=cognito_client_id , + SecretHash=secret_hash, + Username=body.email + ) + except boto3.exceptions.Boto3Error as e: + code = e.response['Error']['Code'] + message = e.response['Error']['Message'] + raise HTTPException(status_code=401, detail={"code": code, "message": message}) + + return {"message": "Password reset instructions sent"} diff --git a/api-v2/app/schemas.py b/api-v2/app/schemas.py index b98f7919..8d696296 100644 --- a/api-v2/app/schemas.py +++ b/api-v2/app/schemas.py @@ -49,7 +49,7 @@ class UserSignInResponse(BaseModel): token: str -class SmartNested(Nested): +class SmartNested(Nested): ''' Schema attribute used to serialize nested attributes to primary keys, unless they are already loaded. This @@ -196,7 +196,15 @@ def make_response(self, data, **kwargs): return data +class ForgotPasswordRequest(BaseModel): + email: str + +class ForgotPasswordResponse(BaseModel): + code: int + type: str + message: str + user_schema = UserSchema() users_schema = UserSchema(many=True) service_provider_schema = HousingProgramServiceProviderSchema() From 33459143c7deec487533bcc258ca8a7f4f7ccb87 Mon Sep 17 00:00:00 2001 From: John Wroge <72668920+johnwroge@users.noreply.github.com> Date: Sat, 7 Sep 2024 22:08:53 -0400 Subject: [PATCH 28/70] Imported boto3 --- api-v2/app/api/routes/auth.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/api-v2/app/api/routes/auth.py b/api-v2/app/api/routes/auth.py index 77a446bf..69f1ca51 100644 --- a/api-v2/app/api/routes/auth.py +++ b/api-v2/app/api/routes/auth.py @@ -1,4 +1,5 @@ import logging +import boto3 from fastapi import Depends, APIRouter, HTTPException, Response, Security from fastapi.responses import RedirectResponse @@ -6,6 +7,7 @@ from botocore.exceptions import ClientError + from schemas import UserCreate, UserSignIn, UserSignInResponse, ForgotPasswordRequest, ForgotPasswordResponse from crud import create_user, delete_user, get_user From 53efe4bd668afd9dff866c8245850f349719f484 Mon Sep 17 00:00:00 2001 From: John Wroge <72668920+johnwroge@users.noreply.github.com> Date: Sat, 7 Sep 2024 22:28:34 -0400 Subject: [PATCH 29/70] I added the confirm forgot password route handler to verify forgot password requests --- api-v2/app/api/routes/auth.py | 32 +++++++++++++++++++++++++++++++- api-v2/app/schemas.py | 8 ++++++++ 2 files changed, 39 insertions(+), 1 deletion(-) diff --git a/api-v2/app/api/routes/auth.py b/api-v2/app/api/routes/auth.py index 69f1ca51..ecb0d949 100644 --- a/api-v2/app/api/routes/auth.py +++ b/api-v2/app/api/routes/auth.py @@ -8,7 +8,7 @@ -from schemas import UserCreate, UserSignIn, UserSignInResponse, ForgotPasswordRequest, ForgotPasswordResponse +from schemas import UserCreate, UserSignIn, UserSignInResponse, ForgotPasswordRequest, ForgotPasswordResponse, ConfirmForgotPasswordResponse, ConfirmForgotPasswordRequest from crud import create_user, delete_user, get_user from api.deps import ( @@ -187,3 +187,33 @@ def forgot_password( raise HTTPException(status_code=401, detail={"code": code, "message": message}) return {"message": "Password reset instructions sent"} + + +""" +# Confirm forgot password route + +This route handles forgot password confirmation code requests by receiving the confirmation code and sending to AWS Cognito to verify. + +""" + +@router.post("/confirm_forgot_password", response_model=ConfirmForgotPasswordResponse) +def confirm_forgot_password( + body: ConfirmForgotPasswordRequest, + cognito_client=Depends(get_cognito_client) +): + secret_hash = calc_secret_hash(body.email) + + try: + response = cognito_client.confirm_forgot_password( + ClientId=settings.COGNITO_CLIENT_ID, + SecretHash=secret_hash, + Username=body.email, + ConfirmationCode=body.code, + Password=body.password + ) + except boto3.exceptions.Boto3Error as e: + code = e.response['Error']['Code'] + message = e.response['Error']['Message'] + raise HTTPException(status_code=401, detail={"code": code, "message": message}) + + return {"message": "Password has been reset successfully"} diff --git a/api-v2/app/schemas.py b/api-v2/app/schemas.py index 8d696296..44f7135d 100644 --- a/api-v2/app/schemas.py +++ b/api-v2/app/schemas.py @@ -204,6 +204,14 @@ class ForgotPasswordResponse(BaseModel): code: int type: str message: str + +class ConfirmForgotPasswordRequest(BaseModel): + email: str + code: str + password: str + +class ConfirmForgotPasswordResponse(BaseModel): + message: str user_schema = UserSchema() users_schema = UserSchema(many=True) From 08053ce3ca424293f66cd3486e3d834759920d56 Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Tue, 10 Sep 2024 15:29:23 -0700 Subject: [PATCH 30/70] fastapi-migration: Organization and tests This commit contains files organized by functionality. It creates Python packages via `__init__.py` in directories for packages that will be imported. The `tests` directory was reorganized by test type end-to-end, integration, and unit. All existing tests have been put into the integration directory. The `housing service provider` has been renamed to `Housing Org` and the code that updates this information is contained in the `tenant_housing_orgs` directory in the `app` source code directory. The Repository for this has been replaced with the `crud.py` file and the code for the controller has been updated to implement HTTP method specifications more closely. The tests for this controller have been updated to black box integration tests; meaning, it does not inspect the database. --- .../{tenant_housing_provider => }/__init__.py | 0 .../intake_profile => app/api}/__init__.py | 0 api-v2/app/api/deps.py | 4 +- api-v2/app/api/main.py | 7 +- api-v2/app/api/routes/auth.py | 10 +- api-v2/app/core/config.py | 4 +- api-v2/app/core/db.py | 6 +- api-v2/app/crud.py | 4 +- api-v2/app/intake_profile/schemas.py | 245 +++++---------- api-v2/app/main.py | 6 +- api-v2/app/models.py | 30 -- api-v2/app/schemas.py | 282 ++++++++---------- api-v2/app/seed.py | 7 +- .../tenant_housing_orgs}/__init__.py | 0 api-v2/app/tenant_housing_orgs/controller.py | 95 ++++++ api-v2/app/tenant_housing_orgs/crud.py | 49 +++ api-v2/app/tenant_housing_orgs/models.py | 31 ++ api-v2/app/tenant_housing_orgs/schemas.py | 8 + api-v2/app/tenant_housing_provider/model.py | 20 -- .../service_provider_repository.py | 98 ------ api-v2/app/utils.py | 4 +- api-v2/tests/conftest.py | 77 +++++ .../test_authentication.py | 0 .../test_forms.py | 0 .../test_forms_schema.py | 0 .../test_host_controller.py | 0 .../test_housing_orgs_controller.py | 213 +++++++++++++ .../{access => integration}/test_mocking.py | 0 .../{access => integration}/test_user_repo.py | 0 api-v2/tests/setup_utils.py | 24 +- .../test_service_provider_controller.py | 210 ------------- .../test_service_provider_repository.py | 112 ------- api-v2/tests/test_alembic_migration.py | 1 - api-v2/tests/{ => unit}/access/__init_.py | 0 .../intake_profile}/__init__.py | 0 .../{stays => unit/matching}/__init__.py | 0 .../onboarding}/__init__.py | 0 .../unit/stays/__init__.py} | 0 .../unit/tenant_housing_provider/__init__.py} | 0 39 files changed, 701 insertions(+), 846 deletions(-) rename api-v2/app/{tenant_housing_provider => }/__init__.py (100%) rename api-v2/{tests/intake_profile => app/api}/__init__.py (100%) rename api-v2/{tests/matching => app/tenant_housing_orgs}/__init__.py (100%) create mode 100644 api-v2/app/tenant_housing_orgs/controller.py create mode 100644 api-v2/app/tenant_housing_orgs/crud.py create mode 100644 api-v2/app/tenant_housing_orgs/models.py create mode 100644 api-v2/app/tenant_housing_orgs/schemas.py delete mode 100644 api-v2/app/tenant_housing_provider/model.py delete mode 100644 api-v2/app/tenant_housing_provider/service_provider_repository.py create mode 100644 api-v2/tests/conftest.py rename api-v2/tests/{access => integration}/test_authentication.py (100%) rename api-v2/tests/{intake_profile => integration}/test_forms.py (100%) rename api-v2/tests/{intake_profile => integration}/test_forms_schema.py (100%) rename api-v2/tests/{access => integration}/test_host_controller.py (100%) create mode 100644 api-v2/tests/integration/test_housing_orgs_controller.py rename api-v2/tests/{access => integration}/test_mocking.py (100%) rename api-v2/tests/{access => integration}/test_user_repo.py (100%) delete mode 100644 api-v2/tests/tenant_housing_provider/test_service_provider_controller.py delete mode 100644 api-v2/tests/tenant_housing_provider/test_service_provider_repository.py rename api-v2/tests/{ => unit}/access/__init_.py (100%) rename api-v2/tests/{onboarding => unit/intake_profile}/__init__.py (100%) rename api-v2/tests/{stays => unit/matching}/__init__.py (100%) rename api-v2/tests/{tenant_housing_provider => unit/onboarding}/__init__.py (100%) rename api-v2/{app/tenant_housing_provider/controller.py => tests/unit/stays/__init__.py} (100%) rename api-v2/{app/tenant_housing_provider/schemas.py => tests/unit/tenant_housing_provider/__init__.py} (100%) diff --git a/api-v2/app/tenant_housing_provider/__init__.py b/api-v2/app/__init__.py similarity index 100% rename from api-v2/app/tenant_housing_provider/__init__.py rename to api-v2/app/__init__.py diff --git a/api-v2/tests/intake_profile/__init__.py b/api-v2/app/api/__init__.py similarity index 100% rename from api-v2/tests/intake_profile/__init__.py rename to api-v2/app/api/__init__.py diff --git a/api-v2/app/api/deps.py b/api-v2/app/api/deps.py index 5ca8c1da..2ea05cfb 100644 --- a/api-v2/app/api/deps.py +++ b/api-v2/app/api/deps.py @@ -5,8 +5,8 @@ from fastapi import Request, HTTPException from fastapi.security import SecurityScopes -from core.db import SessionLocal -from core.config import settings +from app.core.db import SessionLocal +from app.core.config import settings cognito_region = settings.COGNITO_REGION cognito_access_id = settings.COGNITO_ACCESS_ID diff --git a/api-v2/app/api/main.py b/api-v2/app/api/main.py index 1de32e8b..dcd786fe 100644 --- a/api-v2/app/api/main.py +++ b/api-v2/app/api/main.py @@ -1,8 +1,11 @@ from fastapi import APIRouter -from api.routes import auth +from app.api.routes import auth +from app.intake_profile import controller as intake_profile +from app.tenant_housing_orgs import controller as housing_org api_router = APIRouter() api_router.include_router(auth.router, prefix="/auth", tags=["auth"]) -api_router.include_router(auth.router, prefix="/intake-profile", tags=["intake-profile"]) +api_router.include_router(intake_profile.router, prefix="/intake-profile", tags=["intake_profile"]) +api_router.include_router(housing_org.router, prefix="/housing-orgs", tags=["tenant_housing_orgs"]) diff --git a/api-v2/app/api/routes/auth.py b/api-v2/app/api/routes/auth.py index 6ab3691d..50784538 100644 --- a/api-v2/app/api/routes/auth.py +++ b/api-v2/app/api/routes/auth.py @@ -6,9 +6,9 @@ from botocore.exceptions import ClientError -from schemas import UserCreate, UserSignIn, UserSignInResponse -from crud import create_user, delete_user, get_user -from api.deps import ( +from app.schemas import UserCreate, UserSignIn, UserSignInResponse +from app.crud import create_user, delete_user, get_user +from app.api.deps import ( get_db, get_cognito_client, requires_auth, @@ -16,8 +16,8 @@ role_to_cognito_group_map, ) -from utils import calc_secret_hash -from core.config import settings +from app.utils import calc_secret_hash +from app.core.config import settings router = APIRouter() diff --git a/api-v2/app/core/config.py b/api-v2/app/core/config.py index 71b98de5..f6269797 100644 --- a/api-v2/app/core/config.py +++ b/api-v2/app/core/config.py @@ -1,3 +1,4 @@ +from pydantic import ConfigDict from pydantic_settings import BaseSettings from dotenv import load_dotenv @@ -18,8 +19,7 @@ class Settings(BaseSettings): ENV: str DATABASE_URL: str - class Config: - env_file = ".env" + model_config = ConfigDict(env_file = ".env") settings = Settings() diff --git a/api-v2/app/core/db.py b/api-v2/app/core/db.py index 98a9ce2d..d9457f50 100644 --- a/api-v2/app/core/db.py +++ b/api-v2/app/core/db.py @@ -1,6 +1,5 @@ from sqlalchemy import create_engine -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm import DeclarativeBase, sessionmaker from app.core.config import settings @@ -10,4 +9,5 @@ SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) -Base = declarative_base() +class Base(DeclarativeBase): + pass diff --git a/api-v2/app/crud.py b/api-v2/app/crud.py index dc099ea1..c28f2edd 100644 --- a/api-v2/app/crud.py +++ b/api-v2/app/crud.py @@ -1,7 +1,7 @@ from sqlalchemy.orm import Session -import models -import schemas +import app.models as models +import app.schemas as schemas def get_role(db: Session, role: int): diff --git a/api-v2/app/intake_profile/schemas.py b/api-v2/app/intake_profile/schemas.py index b98f7919..9fa4a3c0 100644 --- a/api-v2/app/intake_profile/schemas.py +++ b/api-v2/app/intake_profile/schemas.py @@ -1,207 +1,100 @@ -from pydantic import BaseModel -from typing import Optional - -from enum import Enum - - -class UserRoleEnum(str, Enum): - ADMIN = "admin" - GUEST = "guest" - HOST = "host" - COORDINATOR = "coordinator" - - -class RoleBase(BaseModel): - id: int - type: UserRoleEnum - - class Config: - from_attributes = True - - -class UserBase(BaseModel): - email: str - firstName: str - middleName: Optional[str] = None - lastName: Optional[str] = None - - -class UserCreate(UserBase): - password: str - role: UserRoleEnum - - -class User(UserBase): - id: int - role: RoleBase - - class Config: - from_attributes = True - - -class UserSignIn(BaseModel): - email: str - password: str - - -class UserSignInResponse(BaseModel): - user: User - token: str - - -class SmartNested(Nested): - ''' - Schema attribute used to serialize nested attributes to - primary keys, unless they are already loaded. This - enables serialization of complex nested relationships. - - Modified from - https://marshmallow-sqlalchemy.readthedocs.io/en/latest/recipes.html#smart-nested-field - ''' - - def serialize(self, attr, obj, accessor=None): - if hasattr(obj, attr): - value = getattr(obj, attr, None) - if value is None: - return None - elif hasattr(value, 'id'): - return {"id": value.id} - else: - return super(SmartNested, self).serialize(attr, obj, accessor) - else: - raise AttributeError( - f"{obj.__class__.__name__} object has no attribute '{attr}'") - - -class RoleSchema(BaseModel): - - model_config = ConfigDict(from_attributes=True) - - -class UnmatchedCaseSchema(BaseModel): - - model_config = ConfigDict(from_attributes=True) - - -class UnmatchedCaseStatusSchema(BaseModel): - - model_config = ConfigDict(from_attributes=True) - - -class UserSchema(BaseModel): - model_config = ConfigDict(from_attributes=True) - - -class HousingProgramServiceProviderSchema(BaseModel): - - model_config = ConfigDict(from_attributes=True) - - -class HousingProgramSchema(BaseModel): - - model_config = ConfigDict(from_attributes=True) +from pydantic import BaseModel, ConfigDict class FieldValidationsSchema(BaseModel): model_config = ConfigDict(from_attributes=True) - required : bool - max_length : int + required: bool + max_length: int -class FieldPropertiesSchema(BaseModel): +# class FieldPropertiesSchema(BaseModel): - class Meta: - model = FieldProperties - include_relationships = True - load_instance = True - exclude = ('properties_id', ) +# class Meta: +# model = FieldProperties +# include_relationships = True +# load_instance = True +# exclude = ('properties_id', ) - description = auto_field() - field_type = auto_field() - choices = auto_field() +# description = auto_field() +# field_type = auto_field() +# choices = auto_field() -class FieldSchema(BaseModel): +# class FieldSchema(BaseModel): - class Meta: - model = Field - include_relationships = True - load_instance = True - exclude = ('properties_id', 'validations_id', 'group_id') +# class Meta: +# model = Field +# include_relationships = True +# load_instance = True +# exclude = ('properties_id', 'validations_id', 'group_id') - field_id = auto_field(dump_only=True) - ref = auto_field() - properties = SmartNested(FieldPropertiesSchema) - validations = SmartNested(FieldValidationsSchema) +# field_id = auto_field(dump_only=True) +# ref = auto_field() +# properties = SmartNested(FieldPropertiesSchema) +# validations = SmartNested(FieldValidationsSchema) -class FieldGroupSchema(BaseModel): +# class FieldGroupSchema(BaseModel): - class Meta: - model = FieldGroup - include_relationships = True - load_instance = True - exclude = ('group_id', 'form_id') +# class Meta: +# model = FieldGroup +# include_relationships = True +# load_instance = True +# exclude = ('group_id', 'form_id') - title = auto_field() - description = auto_field() - fields = SmartNested(FieldSchema, many=True) +# title = auto_field() +# description = auto_field() +# fields = SmartNested(FieldSchema, many=True) -class FormSchema(BaseModel): +# class FormSchema(BaseModel): - class Meta: - model = Form - include_relationships = True - load_instance = True - exclude = ('form_id', ) +# class Meta: +# model = Form +# include_relationships = True +# load_instance = True +# exclude = ('form_id', ) - title = auto_field() - description = auto_field() - field_groups = SmartNested(FieldGroupSchema, many=True) +# title = auto_field() +# description = auto_field() +# field_groups = SmartNested(FieldGroupSchema, many=True) -class ResponseSchema(BaseModel): +# class ResponseSchema(BaseModel): - class Meta: - model = Response - include_relationship = True - load_instance = True - exclude = ('answer_id', ) +# class Meta: +# model = Response +# include_relationship = True +# load_instance = True +# exclude = ('answer_id', ) - user_id = auto_field(load_only=True) - field_id = auto_field(load_only=True) - answer_text = auto_field() - user = SmartNested(UserSchema, only=['name'], required=False, missing=None) - field = SmartNested(FieldSchema, - only=['field_id', 'ref', 'properties'], - required=False, - missing=None) +# user_id = auto_field(load_only=True) +# field_id = auto_field(load_only=True) +# answer_text = auto_field() +# user = SmartNested(UserSchema, only=['name'], required=False, missing=None) +# field = SmartNested(FieldSchema, +# only=['field_id', 'ref', 'properties'], +# required=False, +# missing=None) - @post_load - def make_response(self, data, **kwargs): - if data.user is None: - user = self._session.query(User).get(data.user_id) - if not user: - raise ValidationError('User not found', 'user_id') - data.user = user +# @post_load +# def make_response(self, data, **kwargs): +# if data.user is None: +# user = self._session.query(User).get(data.user_id) +# if not user: +# raise ValidationError('User not found', 'user_id') +# data.user = user - if data.field is None: - field = self._session.query(Field).get(data.field_id) - if not field: - raise ValidationError('Field not found', 'field_id') - data.field = field +# if data.field is None: +# field = self._session.query(Field).get(data.field_id) +# if not field: +# raise ValidationError('Field not found', 'field_id') +# data.field = field - return data +# return data -user_schema = UserSchema() -users_schema = UserSchema(many=True) -service_provider_schema = HousingProgramServiceProviderSchema() -service_provider_list_schema = HousingProgramServiceProviderSchema(many=True) -form_schema = FormSchema() -response_schema = ResponseSchema(many=True) -unmatched_cs_schema = UnmatchedCaseStatusSchema() -unmatched_c_schema = UnmatchedCaseSchema() +# form_schema = FormSchema() +# response_schema = ResponseSchema(many=True) diff --git a/api-v2/app/main.py b/api-v2/app/main.py index aafa4a7d..ba078449 100644 --- a/api-v2/app/main.py +++ b/api-v2/app/main.py @@ -2,9 +2,9 @@ from contextlib import asynccontextmanager -from api.main import api_router -from core.config import settings -from seed import init_db +from app.api.main import api_router +# from core.config import settings +from app.seed import init_db @asynccontextmanager diff --git a/api-v2/app/models.py b/api-v2/app/models.py index 3aaa5d07..36877ead 100644 --- a/api-v2/app/models.py +++ b/api-v2/app/models.py @@ -55,33 +55,3 @@ class UnmatchedGuestCaseStatus(Base): cases = relationship("UnmatchedGuestCase", back_populates="status") -class DataAccessLayer: - _engine: Engine = None - - @classmethod - def db_init(cls, conn_string): - # Check that a database engine is not already set. The test project will - # hook into the DataAccessLayer to create a test project database engine. - if cls._engine: return - - cls._engine = create_engine(conn_string, echo=True, future=True) - Base.metadata.create_all(bind=cls._engine) - - @classmethod - def session(cls) -> Session: - return Session(cls._engine) - - @classmethod - def revision_id(cls) -> str: - "Return the database alembic migration revision number." - if not cls._engine: return "" - try: - with cls._engine.connect() as conn: - # Using text() to ensure the query is treated as a literal SQL statement - result = conn.execute( - text("SELECT version_num FROM alembic_version")) - revision_id = result.scalar() - return revision_id - except SQLAlchemyError: - # This catches errors such as missing alembic_version table - return "" diff --git a/api-v2/app/schemas.py b/api-v2/app/schemas.py index b98f7919..d9efdd87 100644 --- a/api-v2/app/schemas.py +++ b/api-v2/app/schemas.py @@ -1,5 +1,4 @@ -from pydantic import BaseModel -from typing import Optional +from pydantic import BaseModel, ConfigDict from enum import Enum @@ -15,15 +14,14 @@ class RoleBase(BaseModel): id: int type: UserRoleEnum - class Config: - from_attributes = True + model_config = ConfigDict(from_attributes=True) class UserBase(BaseModel): email: str firstName: str - middleName: Optional[str] = None - lastName: Optional[str] = None + middleName: str | None = None + lastName: str | None = None class UserCreate(UserBase): @@ -35,8 +33,7 @@ class User(UserBase): id: int role: RoleBase - class Config: - from_attributes = True + model_config = ConfigDict(from_attributes=True) class UserSignIn(BaseModel): @@ -49,159 +46,138 @@ class UserSignInResponse(BaseModel): token: str -class SmartNested(Nested): - ''' - Schema attribute used to serialize nested attributes to - primary keys, unless they are already loaded. This - enables serialization of complex nested relationships. - - Modified from - https://marshmallow-sqlalchemy.readthedocs.io/en/latest/recipes.html#smart-nested-field - ''' - - def serialize(self, attr, obj, accessor=None): - if hasattr(obj, attr): - value = getattr(obj, attr, None) - if value is None: - return None - elif hasattr(value, 'id'): - return {"id": value.id} - else: - return super(SmartNested, self).serialize(attr, obj, accessor) - else: - raise AttributeError( - f"{obj.__class__.__name__} object has no attribute '{attr}'") - - -class RoleSchema(BaseModel): - - model_config = ConfigDict(from_attributes=True) - - -class UnmatchedCaseSchema(BaseModel): - - model_config = ConfigDict(from_attributes=True) - - -class UnmatchedCaseStatusSchema(BaseModel): - - model_config = ConfigDict(from_attributes=True) - - -class UserSchema(BaseModel): - model_config = ConfigDict(from_attributes=True) - - -class HousingProgramServiceProviderSchema(BaseModel): - - model_config = ConfigDict(from_attributes=True) - - -class HousingProgramSchema(BaseModel): - - model_config = ConfigDict(from_attributes=True) - - -class FieldValidationsSchema(BaseModel): - - model_config = ConfigDict(from_attributes=True) - - required : bool - max_length : int - - -class FieldPropertiesSchema(BaseModel): - - class Meta: - model = FieldProperties - include_relationships = True - load_instance = True - exclude = ('properties_id', ) - - description = auto_field() - field_type = auto_field() - choices = auto_field() - - -class FieldSchema(BaseModel): - - class Meta: - model = Field - include_relationships = True - load_instance = True - exclude = ('properties_id', 'validations_id', 'group_id') - - field_id = auto_field(dump_only=True) - ref = auto_field() - properties = SmartNested(FieldPropertiesSchema) - validations = SmartNested(FieldValidationsSchema) - - -class FieldGroupSchema(BaseModel): - - class Meta: - model = FieldGroup - include_relationships = True - load_instance = True - exclude = ('group_id', 'form_id') - - title = auto_field() - description = auto_field() - fields = SmartNested(FieldSchema, many=True) - - -class FormSchema(BaseModel): +# class SmartNested(Nested): +# ''' +# Schema attribute used to serialize nested attributes to +# primary keys, unless they are already loaded. This +# enables serialization of complex nested relationships. - class Meta: - model = Form - include_relationships = True - load_instance = True - exclude = ('form_id', ) +# Modified from +# https://marshmallow-sqlalchemy.readthedocs.io/en/latest/recipes.html#smart-nested-field +# ''' - title = auto_field() - description = auto_field() - field_groups = SmartNested(FieldGroupSchema, many=True) +# def serialize(self, attr, obj, accessor=None): +# if hasattr(obj, attr): +# value = getattr(obj, attr, None) +# if value is None: +# return None +# elif hasattr(value, 'id'): +# return {"id": value.id} +# else: +# return super(SmartNested, self).serialize(attr, obj, accessor) +# else: +# raise AttributeError( +# f"{obj.__class__.__name__} object has no attribute '{attr}'") +# class RoleSchema(BaseModel): -class ResponseSchema(BaseModel): +# model_config = ConfigDict(from_attributes=True) - class Meta: - model = Response - include_relationship = True - load_instance = True - exclude = ('answer_id', ) +# class UnmatchedCaseSchema(BaseModel): - user_id = auto_field(load_only=True) - field_id = auto_field(load_only=True) - answer_text = auto_field() - user = SmartNested(UserSchema, only=['name'], required=False, missing=None) - field = SmartNested(FieldSchema, - only=['field_id', 'ref', 'properties'], - required=False, - missing=None) +# model_config = ConfigDict(from_attributes=True) - @post_load - def make_response(self, data, **kwargs): - if data.user is None: - user = self._session.query(User).get(data.user_id) - if not user: - raise ValidationError('User not found', 'user_id') - data.user = user +# class UnmatchedCaseStatusSchema(BaseModel): - if data.field is None: - field = self._session.query(Field).get(data.field_id) - if not field: - raise ValidationError('Field not found', 'field_id') - data.field = field +# model_config = ConfigDict(from_attributes=True) + +# class UserSchema(BaseModel): +# model_config = ConfigDict(from_attributes=True) + +# class FieldValidationsSchema(BaseModel): + +# model_config = ConfigDict(from_attributes=True) + +# required : bool +# max_length : int + +# class FieldPropertiesSchema(BaseModel): + +# class Meta: +# model = FieldProperties +# include_relationships = True +# load_instance = True +# exclude = ('properties_id', ) + +# description = auto_field() +# field_type = auto_field() +# choices = auto_field() + +# class FieldSchema(BaseModel): + +# class Meta: +# model = Field +# include_relationships = True +# load_instance = True +# exclude = ('properties_id', 'validations_id', 'group_id') + +# field_id = auto_field(dump_only=True) +# ref = auto_field() +# properties = SmartNested(FieldPropertiesSchema) +# validations = SmartNested(FieldValidationsSchema) + +# class FieldGroupSchema(BaseModel): + +# class Meta: +# model = FieldGroup +# include_relationships = True +# load_instance = True +# exclude = ('group_id', 'form_id') + +# title = auto_field() +# description = auto_field() +# fields = SmartNested(FieldSchema, many=True) + +# class FormSchema(BaseModel): + +# class Meta: +# model = Form +# include_relationships = True +# load_instance = True +# exclude = ('form_id', ) + +# title = auto_field() +# description = auto_field() +# field_groups = SmartNested(FieldGroupSchema, many=True) + +# class ResponseSchema(BaseModel): + +# class Meta: +# model = Response +# include_relationship = True +# load_instance = True +# exclude = ('answer_id', ) + +# user_id = auto_field(load_only=True) +# field_id = auto_field(load_only=True) +# answer_text = auto_field() +# user = SmartNested(UserSchema, only=['name'], required=False, missing=None) +# field = SmartNested(FieldSchema, +# only=['field_id', 'ref', 'properties'], +# required=False, +# missing=None) + +# @post_load +# def make_response(self, data, **kwargs): +# if data.user is None: +# user = self._session.query(User).get(data.user_id) +# if not user: +# raise ValidationError('User not found', 'user_id') +# data.user = user - return data +# if data.field is None: +# field = self._session.query(Field).get(data.field_id) +# if not field: +# raise ValidationError('Field not found', 'field_id') +# data.field = field +# return data -user_schema = UserSchema() -users_schema = UserSchema(many=True) -service_provider_schema = HousingProgramServiceProviderSchema() -service_provider_list_schema = HousingProgramServiceProviderSchema(many=True) -form_schema = FormSchema() -response_schema = ResponseSchema(many=True) -unmatched_cs_schema = UnmatchedCaseStatusSchema() -unmatched_c_schema = UnmatchedCaseSchema() +# user_schema = UserSchema() +# users_schema = UserSchema(many=True) +# service_provider_schema = HousingProgramServiceProviderSchema() +# service_provider_list_schema = HousingProgramServiceProviderSchema(many=True) +# form_schema = FormSchema() +# response_schema = ResponseSchema(many=True) +# unmatched_cs_schema = UnmatchedCaseStatusSchema() +# unmatched_c_schema = UnmatchedCaseSchema() diff --git a/api-v2/app/seed.py b/api-v2/app/seed.py index 1831ebeb..a4204afd 100644 --- a/api-v2/app/seed.py +++ b/api-v2/app/seed.py @@ -1,6 +1,7 @@ -from core.db import Base, engine from sqlalchemy import event -from models import Role + +from app.core.db import Base, engine +from app.models import Role INITIAL_ROLES = [ {"type": "admin"}, @@ -21,4 +22,4 @@ def initialize_table(target, connection, **kw): def init_db(): - Base.metadata.create_all(bind=engine, checkfirst=True) \ No newline at end of file + Base.metadata.create_all(bind=engine, checkfirst=True) diff --git a/api-v2/tests/matching/__init__.py b/api-v2/app/tenant_housing_orgs/__init__.py similarity index 100% rename from api-v2/tests/matching/__init__.py rename to api-v2/app/tenant_housing_orgs/__init__.py diff --git a/api-v2/app/tenant_housing_orgs/controller.py b/api-v2/app/tenant_housing_orgs/controller.py new file mode 100644 index 00000000..4973f4f7 --- /dev/null +++ b/api-v2/app/tenant_housing_orgs/controller.py @@ -0,0 +1,95 @@ +from . import crud, models, schemas + +from fastapi import APIRouter, Depends, Request, Response, HTTPException, status +from fastapi.responses import RedirectResponse +from sqlalchemy.orm import Session + +from app.api.deps import ( + get_db, ) + +router = APIRouter() + + +@router.post("/", status_code=status.HTTP_201_CREATED) +def create_housing_org( + housing_org: schemas.HousingOrg, + request: Request, + session: Session = Depends(get_db) +) -> schemas.HousingOrg: + """Create a housing org. + + A housing org is created if it is not already in + the database. + + Return the newly created housing org. Return None + if the housing org already exists. + """ + db_org = crud.read_housing_org_by_name(session, housing_org.org_name) + if db_org: + return RedirectResponse(status_code=status.HTTP_303_SEE_OTHER, + url=f"{request.url}/{db_org.id}") + + return crud.create_housing_org(session, housing_org) + + +@router.get("/{housing_org_id}") +def get_housing_org( + housing_org_id: int, session: Session = Depends(get_db) +) -> schemas.HousingOrg | None: + """Get details about a housing org from an ID. + + :param org_id: The ID of the housing org to read, update or delete + :type org_id: int + """ + housing_org = crud.read_housing_org_by_id(session, housing_org_id) + if not housing_org: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, + detail="Housing Org not found") + return housing_org + + +@router.get("/") +def get_housing_orgs(session: Session = Depends(get_db)) -> list[ + schemas.HousingOrg]: + """Get a list of all housing orgs.""" + return crud.read_housing_orgs(session) + + +@router.put("/{housing_org_id}", status_code=status.HTTP_200_OK) +def put_housing_org( + housing_org_id: int, + body: schemas.HousingOrg, + response: Response, + session: Session = Depends(get_db)) -> schemas.HousingOrg: + """Create or Update a Housing Org with the given ID. + + Return the updated housing org if update is successful, otherwise return None. + + If the representation contains a Housing Org ID that does match the ID given + in the path, then a HTTP 409 Conflict will be returned. + """ + if body.id is not None and body.id != housing_org_id: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail=f"Failed to find org with id {housing_org_id}") + + housing_org = models.HousingOrg(id=housing_org_id, org_name=body.org_name) + + was_created = crud.upsert_housing_org(session, housing_org) + if was_created: + response.status_code = status.HTTP_201_CREATED + + return housing_org + + +@router.delete("/{housing_org_id}", status_code=status.HTTP_204_NO_CONTENT) +def delete_housing_org(housing_org_id: int, + session: Session = Depends(get_db)): + """Delete a housing org. + + :param housing_org_id: The ID of the housing org to delete. + """ + housing_org = crud.read_housing_org_by_id(session, housing_org_id) + if not housing_org: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) + crud.delete_housing_org(session, housing_org) diff --git a/api-v2/app/tenant_housing_orgs/crud.py b/api-v2/app/tenant_housing_orgs/crud.py new file mode 100644 index 00000000..dfeaefae --- /dev/null +++ b/api-v2/app/tenant_housing_orgs/crud.py @@ -0,0 +1,49 @@ +from sqlalchemy.orm import Session +from sqlalchemy import select, insert, update + +from . import models, schemas + + +def create_housing_org(session: Session, housing_org: schemas.HousingOrg): + new_org = models.HousingOrg(org_name=housing_org.org_name) + session.add(new_org) + session.commit() + session.refresh(new_org) + return new_org + + +def read_housing_org_by_id(session: Session, + housing_org_id: int) -> models.HousingOrg: + return session.get(models.HousingOrg, housing_org_id) + + +def read_housing_org_by_name(session: Session, + org_name: str) -> models.HousingOrg: + query = select( + models.HousingOrg).filter(models.HousingOrg.org_name == org_name) + return session.scalars(query).one_or_none() + + +def read_housing_orgs(session: Session) -> list[models.HousingOrg]: + return session.scalars(select(models.HousingOrg)).all() + + +def upsert_housing_org(session: Session, + housing_org: models.HousingOrg) -> bool: + was_created = False + + with session.begin(): + db_housing_org = session.query(models.HousingOrg).filter_by(id=housing_org.id).first() + if db_housing_org: + db_housing_org.org_name = housing_org.org_name + else: + session.add(housing_org) + was_created = True + session.commit() + + return was_created + +def delete_housing_org(session: Session, housing_org: models.HousingOrg): + housing_org = session.get(models.HousingOrg, housing_org.id) + session.delete(housing_org) + session.commit() diff --git a/api-v2/app/tenant_housing_orgs/models.py b/api-v2/app/tenant_housing_orgs/models.py new file mode 100644 index 00000000..8ba6d607 --- /dev/null +++ b/api-v2/app/tenant_housing_orgs/models.py @@ -0,0 +1,31 @@ +from typing import Annotated +from typing import List +from sqlalchemy import ForeignKey, String +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column +from sqlalchemy.orm import relationship + +from app.core.db import Base + +intpk = Annotated[int, mapped_column(primary_key=True)] + + +class HousingOrg(Base): + __tablename__ = "housing_orgs" + + id: Mapped[intpk] + org_name: Mapped[str] = mapped_column(String, nullable=False, unique=True) + programs: Mapped[List["HousingProgram"]] = relationship(back_populates="housing_org") + + def __repr__(self): + return f"HousingOrg(id={id},org_name='{self.org_name}')" + + +class HousingProgram(Base): + __tablename__ = "housing_programs" + + id: Mapped[intpk] + program_name: Mapped[str] = mapped_column(String, nullable=False) + housing_org_id: Mapped[int] = mapped_column(ForeignKey('housing_orgs.id'), + nullable=False) + housing_org: Mapped["HousingOrg"] = relationship(back_populates="programs") diff --git a/api-v2/app/tenant_housing_orgs/schemas.py b/api-v2/app/tenant_housing_orgs/schemas.py new file mode 100644 index 00000000..92beedbb --- /dev/null +++ b/api-v2/app/tenant_housing_orgs/schemas.py @@ -0,0 +1,8 @@ +from pydantic import BaseModel, ConfigDict + + +class HousingOrg(BaseModel): + id: int | None = None + org_name: str + + model_config = ConfigDict(from_attributes=True) diff --git a/api-v2/app/tenant_housing_provider/model.py b/api-v2/app/tenant_housing_provider/model.py deleted file mode 100644 index d3eeb9c8..00000000 --- a/api-v2/app/tenant_housing_provider/model.py +++ /dev/null @@ -1,20 +0,0 @@ - -class HousingProgramServiceProvider(Base): - __tablename__ = "housing_program_service_provider" - - id = Column(Integer, primary_key=True, index=True) - provider_name = Column(String, nullable=False) - - def __repr__(self): - return f"HousingProgramServiceProvider(id={id},provider_name='{self.provider_name}')" - - -class HousingProgram(Base): - __tablename__ = "housing_program" - - id = Column(Integer, primary_key=True, index=True) - program_name = Column(String, nullable=False) - service_provider = Column( - Integer, - ForeignKey('housing_program_service_provider.id'), - nullable=False) diff --git a/api-v2/app/tenant_housing_provider/service_provider_repository.py b/api-v2/app/tenant_housing_provider/service_provider_repository.py deleted file mode 100644 index f7847056..00000000 --- a/api-v2/app/tenant_housing_provider/service_provider_repository.py +++ /dev/null @@ -1,98 +0,0 @@ -from typing import Optional, List - -# Third Party -from sqlalchemy import func, select -from sqlalchemy.orm import Session - -# Local -from model import HousingProgramServiceProvider - - -class HousingProviderRepository: - - def create_service_provider( - self, - provider_name: str) -> Optional[HousingProgramServiceProvider]: - """ - Create a housing program service provider, if it - is not already in the database. - - Return the newly created service provider. Return None - if the service provider already exists. - """ - with DataAccessLayer.session() as session: - existing_provider = session.execute( - select(HousingProgramServiceProvider).filter( - HousingProgramServiceProvider.provider_name == - provider_name)).scalar_one_or_none() - - if existing_provider is None: - new_provider = HousingProgramServiceProvider( - provider_name=provider_name) - session.add(new_provider) - session.commit() - session.refresh(new_provider) - return new_provider - - return None - - def delete_service_provider(self, provider_id: int) -> bool: - """Delete a service provider. Return false if the - service provider is not found. Return true otherwise. - - :param provider_id: The ID of the service provider to delete. - """ - with DataAccessLayer.session() as session: - provider = session.get(HousingProgramServiceProvider, provider_id) - if provider: - session.delete(provider) - session.commit() - return True - - return False - - def get_service_provider_by_id( - self, provider_id: int) -> Optional[HousingProgramServiceProvider]: - """Get details about a housing program service provider from an ID - - :param provider_id: The ID of the service provider to read, update or delete - :type provider_id: int - """ - with DataAccessLayer.session() as session: - return session.get(HousingProgramServiceProvider, provider_id) - - def get_service_providers(self) -> List[HousingProgramServiceProvider]: - """ - Get a list of all housing program service providers. - """ - with DataAccessLayer.session() as session: - return session.scalars(select(HousingProgramServiceProvider)).all() - - def update_service_provider( - self, new_name: str, - provider_id: int) -> Optional[HousingProgramServiceProvider]: - """ - Update a housing program service provider with - id 'provider_id'. Return the updated service provider - if update is successful, otherwise return None. - """ - with DataAccessLayer.session() as session: - provider_to_update = session.get(HousingProgramServiceProvider, - provider_id) - if provider_to_update: - provider_to_update.provider_name = new_name - session.commit() - session.refresh(provider_to_update) - return provider_to_update - return None - - def provider_count(self, existing_session: Session = None): - - def count(lcl_session: Session): - return lcl_session.scalar( - select(func.count(HousingProgramServiceProvider.id))) - - if existing_session is None: - with DataAccessLayer.session() as session: - return count(session) - return count(existing_session) diff --git a/api-v2/app/utils.py b/api-v2/app/utils.py index 5138bb47..652dcd32 100644 --- a/api-v2/app/utils.py +++ b/api-v2/app/utils.py @@ -1,7 +1,7 @@ import hmac import base64 -from core.config import settings +from app.core.config import settings # Helper function to calculate secret hash @@ -17,4 +17,4 @@ def calc_secret_hash(email: str) -> str: dig = hmac.new( secret, msg=message.encode("utf-8"), digestmod="sha256" ).digest() - return base64.b64encode(dig).decode() \ No newline at end of file + return base64.b64encode(dig).decode() diff --git a/api-v2/tests/conftest.py b/api-v2/tests/conftest.py new file mode 100644 index 00000000..ccf91230 --- /dev/null +++ b/api-v2/tests/conftest.py @@ -0,0 +1,77 @@ +import os + +import pytest +from pytest import MonkeyPatch +import sqlalchemy as sa +from sqlalchemy.orm import sessionmaker +from sqlalchemy.pool import StaticPool +from fastapi.testclient import TestClient + +from app.main import app as main_app +from app.core.db import Base +from app.api.deps import get_db + + +@pytest.fixture +def client(): + SQLALCHEMY_DATABASE_URL = "sqlite+pysqlite:///:memory:" + + engine = sa.create_engine( + SQLALCHEMY_DATABASE_URL, + connect_args={"check_same_thread": False}, + poolclass=StaticPool, + ) + TestingSessionLocal = sessionmaker(autocommit=False, + autoflush=False, + bind=engine) + + Base.metadata.create_all(bind=engine) + + def override_get_db(): + try: + session = TestingSessionLocal() + yield session + finally: + session.close() + + main_app.dependency_overrides[get_db] = override_get_db + + return TestClient(main_app) + + +# @pytest.fixture +# def empty_environment(monkeypatch: MonkeyPatch) -> MonkeyPatch: +# """Create an isolated environment for testing purposes. + +# The environment variables are cleared to ensure the +# configuration object is not dependent on the machine configuration. +# """ +# for env_var in os.environ.keys(): +# monkeypatch.delenv(env_var) +# return monkeypatch + +# @pytest.fixture +# def fake_prod_env(empty_environment: MonkeyPatch) -> MonkeyPatch: +# """Define a fake production environment. + +# Define a fake production environment by setting each of the required +# production configuration variables with fake values. +# """ +# empty_environment.setenv("SECRET_KEY", +# "A completely made up fake secret !@#$12234") +# empty_environment.setenv("DATABASE_URL", "sqlite:///:memory:") +# empty_environment.setenv("COGNITO_CLIENT_ID", "Totally fake client id") +# empty_environment.setenv("COGNITO_CLIENT_SECRET", +# "Yet another fake secret12") +# empty_environment.setenv("COGNITO_REGION", +# "Not even the region actually exists") +# empty_environment.setenv( +# "COGNITO_REDIRECT_URI", +# "Redirect your way back to writing more test cases") +# empty_environment.setenv("COGNITO_USER_POOL_ID", +# "Water's warm. IDs are fake") +# empty_environment.setenv("COGNITO_ACCESS_ID", +# "If you need fake access, use this ID") +# empty_environment.setenv("COGNITO_ACCESS_KEY", +# "WARNING: This is a real-ly fake key 12345a6sdf") +# return empty_environment diff --git a/api-v2/tests/access/test_authentication.py b/api-v2/tests/integration/test_authentication.py similarity index 100% rename from api-v2/tests/access/test_authentication.py rename to api-v2/tests/integration/test_authentication.py diff --git a/api-v2/tests/intake_profile/test_forms.py b/api-v2/tests/integration/test_forms.py similarity index 100% rename from api-v2/tests/intake_profile/test_forms.py rename to api-v2/tests/integration/test_forms.py diff --git a/api-v2/tests/intake_profile/test_forms_schema.py b/api-v2/tests/integration/test_forms_schema.py similarity index 100% rename from api-v2/tests/intake_profile/test_forms_schema.py rename to api-v2/tests/integration/test_forms_schema.py diff --git a/api-v2/tests/access/test_host_controller.py b/api-v2/tests/integration/test_host_controller.py similarity index 100% rename from api-v2/tests/access/test_host_controller.py rename to api-v2/tests/integration/test_host_controller.py diff --git a/api-v2/tests/integration/test_housing_orgs_controller.py b/api-v2/tests/integration/test_housing_orgs_controller.py new file mode 100644 index 00000000..c395f275 --- /dev/null +++ b/api-v2/tests/integration/test_housing_orgs_controller.py @@ -0,0 +1,213 @@ +from fastapi.testclient import TestClient + +PATH = "/api/housing-orgs" + + +def populate_test_database(client: TestClient, num_entries: int) -> list[int]: + """Add the given number of entries to the database. + + Add num_entries rows to the test database and return the + created ids. fail test if any of the creation requests + fails. + + note: orgs are created using sqlalchemy commands, + not api requests. + """ + ids = [] + for i in range(num_entries): + REQUESTED_ORG = {"org_name": f"org no {i}"} + response = client.post(PATH, json=REQUESTED_ORG) + assert response.status_code == 201, "Could not create housing org." + org = response.json() + assert org is not None, ( + f"test setup failure. failed to create org no {i}." + "cannot perform endpoint test!") + assert 'id' in org + ids.append(org["id"]) + return ids + + +def test_create_housing_org(client): + """Test create a new housing org.""" + requested_org = {"org_name": "-123ASCII&"} + + response = client.post(PATH, json=requested_org) + response_obj = response.json() + + assert response.status_code == 201, response + assert response_obj["org_name"] == requested_org["org_name"] + + print(response_obj['id']) + response = client.get(f"{PATH}/{response_obj['id']}") + assert response.status_code == 200, response + response_obj = response.json() + assert response_obj["org_name"] == requested_org["org_name"], response + + +def test_create_with_extra_data(client): + """Test that sending an create POST request with extra + json entries in the body does not disrupt the update. + + We should safely ignore additional fields. + """ + create_request = { + "org_name": "A new org", + "extra_int": 1, + "extra_bool": True, + "extra_string": "I'm notta name" + } + + response = client.post(PATH, json=create_request) + response_body = response.json() + + assert response.status_code == 201, response + assert 'org_name' in response_body + assert 'id' in response_body + assert response_body['org_name'] == create_request['org_name'] + assert 'extra_int' not in response_body, "We should not send back request json extra fields" + assert 'extra_bool' not in response_body, "We should not send back request json extra fields" + assert 'extra_string' not in response_body, "We should not send back request json extra fields" + + response = client.get(f"{PATH}/{response_body['id']}") + assert response.status_code == 200, "POST succeeded but the housing org doesn't exist." + assert response_body["org_name"] == create_request["org_name"] + + +def test_create_bad_json_invalid_type(client): + bad_create_request = {"org_name": 1} + response = client.post(PATH, json=bad_create_request) + + assert response.status_code == 422, response + + +def test_create_bad_json_missing_name(client): + bad_create_request = {"org_namez": 1} + response = client.post(PATH, json=bad_create_request) + + assert response.status_code == 422, response + + +def test_delete_housing_org(client: TestClient): + """ + Test deleting a housing org that we know exists, + using a delete request. + """ + ids = populate_test_database(client=client, num_entries=1) + path = f'{PATH}/{ids[0]}' + response = client.delete(path) + assert response.status_code == 204, response + + response = client.get(path) + assert response.status_code == 404, "Housing org was not deleted." + + +def test_delete_nonexistant_org(client: TestClient): + """ + Test that deleting a nonexistant org responds with the + correct status code and does not modify the db. + """ + NUM_ROWS = 4 + populate_test_database(client=client, num_entries=NUM_ROWS) + + response = client.get(PATH) + response_body = response.json() + assert response.status_code == 200, "Housing orgs endpoint failure." + assert len(response_body) == NUM_ROWS + + response = client.delete(f"{PATH}/{999}") + assert response.status_code == 404, response + + response = client.get(PATH) + response_body = response.json() + assert response.status_code == 200, "Housing orgs endpoint failure." + assert len(response_body) == NUM_ROWS + + +def test_get_nonexistent_org(client: TestClient): + populate_test_database(client=client, num_entries=8) + response = client.get(f"{PATH}/{999}") + response_body = response.json() + + assert response.status_code == 404, response + assert 'org_name' not in response_body + + +def test_get_housing_orgs(client: TestClient): + """Test case for get_housing_orgs + + Get a list of housing orgs. + """ + expected_org_count = 12 + populate_test_database(client=client, num_entries=expected_org_count) + + response = client.get(PATH) + response_body = response.json() + + assert response.status_code == 200, response + assert len(response_body) == expected_org_count + + +def test_get_housing_org_empty_db(client): + response = client.get(PATH) + response_body = response.json() + + assert response.status_code == 200, response + assert len(response_body) == 0 + + +def test_put_update_housing_org(client: TestClient): + """Test case for update_housing_org + + Update a housing org + """ + ids = populate_test_database(client=client, num_entries=1) + updated_org = {"org_name": "Rebranded Org~~~"} + + response = client.put(f"{PATH}/{ids[0]}", json=updated_org) + + assert response.status_code == 200, response + + response_obj = response.json() + assert response_obj["org_name"] == updated_org["org_name"] + assert response_obj["id"] == ids[0] + + +def test_put_create_housing_org_no_id(client: TestClient): + put_body = {"org_name": "New Housing Org Name"} + response = client.put(f"{PATH}/{999}", json=put_body) + assert response.status_code == 201, response + + +def test_put_create_housing_org_mismatch_id(client: TestClient): + failed_put_body = {"id": 1, "org_name": "New Housing Org Name"} + response = client.put(f"{PATH}/{999}", json=failed_put_body) + assert response.status_code == 409, response + + +def test_put_with_extra_data(client: TestClient): + """ + Test that sending an update PUT request with extra + json entries in the body does not disrupt the update. + + We should safely ignore additional fields. + """ + ids = populate_test_database(client=client, num_entries=1) + update_request = { + "org_name": "A brand new name", + "extra_int": 1, + "extra_bool": True, + "extra_string": "I'm notta name" + } + response = client.put(f"{PATH}/{ids[0]}", json=update_request) + response_body = response.json() + + assert response.status_code == 200, response + + assert 'org_name' in response_body + assert 'id' in response_body + assert 'extra_int' not in response_body, "We should not send back request json extra fields" + assert 'extra_bool' not in response_body, "We should not send back request json extra fields" + assert 'extra_string' not in response_body, "We should not send back request json extra fields" + + assert response_body['org_name'] == update_request["org_name"] + assert response_body['id'] == ids[0] diff --git a/api-v2/tests/access/test_mocking.py b/api-v2/tests/integration/test_mocking.py similarity index 100% rename from api-v2/tests/access/test_mocking.py rename to api-v2/tests/integration/test_mocking.py diff --git a/api-v2/tests/access/test_user_repo.py b/api-v2/tests/integration/test_user_repo.py similarity index 100% rename from api-v2/tests/access/test_user_repo.py rename to api-v2/tests/integration/test_user_repo.py diff --git a/api-v2/tests/setup_utils.py b/api-v2/tests/setup_utils.py index b780b07b..ac5a16e6 100644 --- a/api-v2/tests/setup_utils.py +++ b/api-v2/tests/setup_utils.py @@ -1,25 +1,5 @@ -from typing import List +from app.tenant_housing_provider.service_provider_repository import HousingProviderRepository -from openapi_server.repositories.service_provider_repository import HousingProviderRepository - -def populate_test_database(num_entries) -> List[int]: - ''' - Add num_entries rows to the test database and return the - created Ids. Fail test if any of the creation requests - fails. - - Note: Providers are created using SQLAlchemy commands, - not API requests. - ''' - ids = [] - db_helper = HousingProviderRepository() - for i in range(num_entries): - provider = db_helper.create_service_provider(f"Provider No {i}") - assert provider is not None, ( - f"Test setup failure. Failed to create Provider No {i}." - "Cannot perform endpoint test!") - ids.append(provider.id) - return ids def signup_user(app, email: str, password: str, firstName: str = None, middleName: str = None, lastName: str = None) -> None: @@ -82,4 +62,4 @@ def create_and_signin_user(test_client, email: str, password: str) -> (str, str) Fail the test if the signup, confirm, or signin operation fails. ''' create_user(test_client, email, password) - return signin_user(test_client, email, password) \ No newline at end of file + return signin_user(test_client, email, password) diff --git a/api-v2/tests/tenant_housing_provider/test_service_provider_controller.py b/api-v2/tests/tenant_housing_provider/test_service_provider_controller.py deleted file mode 100644 index 00ace823..00000000 --- a/api-v2/tests/tenant_housing_provider/test_service_provider_controller.py +++ /dev/null @@ -1,210 +0,0 @@ -from __future__ import absolute_import - -from openapi_server.repositories.service_provider_repository import HousingProviderRepository -from tests.setup_utils import populate_test_database - -def test_create_service_provider(client): - """ - Test creating a new service provider using a - simulated post request. Verify that the - response is correct, and that the app - database was properly updated. - """ - REQUESTED_PROVIDER = { - "provider_name" : "-123ASCII&" - } - response = client.post( - '/api/serviceProviders', - json=REQUESTED_PROVIDER) - - assert response.status_code ==201, f'Response body is: {response.json}' - assert 'provider_name' in response.json - assert 'id' in response.json - assert response.json['provider_name'] == REQUESTED_PROVIDER['provider_name'] - - db_entry = HousingProviderRepository().get_service_provider_by_id(response.json['id']) - assert db_entry is not None, "Request succeeeded but the database was not updated!" - assert db_entry.provider_name == REQUESTED_PROVIDER['provider_name'] - -def test_create_with_extra_data(client): - ''' - Test that sending an create POST request with extra - json entries in the body does not disrupt the update. - - We should safely ignore additional fields. - ''' - create_request = { - "provider_name": "A new provider", - "extra_int": 1, - "extra_bool": True, - "extra_string": "I'm notta name" - } - - response = client.post( - '/api/serviceProviders', - json=create_request) - - assert response.status_code ==201, f'Response body is: {response.json}' - assert 'provider_name' in response.json - assert 'id' in response.json - assert response.json['provider_name'] == create_request['provider_name'] - assert 'extra_int' not in response.json, "We should not send back request json extra fields" - assert 'extra_bool' not in response.json, "We should not send back request json extra fields" - assert 'extra_string' not in response.json, "We should not send back request json extra fields" - - db_entry = HousingProviderRepository().get_service_provider_by_id(response.json['id']) - assert db_entry is not None, "Request succeeeded but the database was not updated!" - assert db_entry.provider_name == create_request['provider_name'] - -def test_create_bad_json_invalid_type(client): - bad_create_request = { - "provider_name": 1 - } - response = client.post( - '/api/serviceProviders', - json=bad_create_request) - - assert response.status_code == 400, f'Response body is: {response.json}' - -def test_create_bad_json_missing_name(client): - bad_create_request = { - "provider_namez": 1 - } - response = client.post( - '/api/serviceProviders', - json=bad_create_request) - - assert response.status_code == 400, f'Response body is: {response.json}' - -def test_delete_service_provider(client): - """ - Test deleting a service provider that we know exists, - using a simulated delete request. Verify that the request - succeeds and check that the provider is no longer - availabe within the database. - """ - # Test database is empty at start. Create an entry to delete - ids = populate_test_database(num_entries=1) - response = client.delete(f'/api/serviceProviders/{ids[0]}') - assert response.status_code == 200, f'Response body is: {response.json}' - - deleted_provider = HousingProviderRepository().get_service_provider_by_id(ids[0]) - assert deleted_provider is None, "Request succeeded, but provider is still in the database!" - -def test_delete_nonexistant_provider(client): - """ - Test that deleting a nonexistant provider responds with the - correct status code and does not modify the db. - """ - NUM_ROWS = 4 - ids = populate_test_database(num_entries=NUM_ROWS) - assert HousingProviderRepository().provider_count() == NUM_ROWS, "Test setup failure" - - response = client.delete(f'/api/serviceProviders/{999}') - assert response.status_code == 404, f'Response body is: {response.json}' - - assert HousingProviderRepository().provider_count() == NUM_ROWS, ( - "Request failed, but the row count changed!" - ) - -def test_get_service_provider_by_id(client): - """Test case for get_service_provider_by_id - - Get details about a housing program service provider from an ID - """ - ids = populate_test_database(num_entries=8) - ID_TO_TEST = ids[3] - provider_in_db = HousingProviderRepository().get_service_provider_by_id(ID_TO_TEST) - - response = client.get(f"/api/serviceProviders/{ID_TO_TEST}") - assert response.status_code == 200, f'Response body is : {response.json}' - - assert 'provider_name' in response.json - assert 'id' in response.json - assert response.json['provider_name'] == provider_in_db.provider_name - assert response.json['id'] == ID_TO_TEST - -def test_get_nonexistent_provider(client): - populate_test_database(num_entries=8) - response = client.get(f"/api/serviceProviders/{999}") - assert response.status_code == 404, f'Response body is : {response.json}' - - assert 'provider_name' not in response.json - -def test_get_service_providers(client): - """Test case for get_service_providers - - Get a list of housing program service providers. - """ - expected_provider_count = 12 - populate_test_database(num_entries=expected_provider_count) - - response = client.get('/api/serviceProviders') - assert response.status_code == 200, f"Response body is : {response.json}" - assert len(response.json) == expected_provider_count - -def test_get_service_provider_empty_db(client): - response = client.get('/api/serviceProviders') - assert response.status_code == 200, f"Response body is : {response.json}" - assert len(response.json) == 0 - -def test_update_service_provider(client): - """Test case for update_service_provider - - Update a housing program service provider - """ - ids = populate_test_database(num_entries=1) - updated_provider = { - "provider_name" : "Rebranded Provider~~~" - } - response = client.put( - f"/api/serviceProviders/{ids[0]}", - json=updated_provider) - assert response.status_code == 200, f'Response body is: {response.json}' - - assert 'provider_name' in response.json - assert 'id' in response.json - - assert response.json['provider_name'] == updated_provider["provider_name"] - assert response.json['id'] == ids[0] - -def test_update_with_extra_data(client): - ''' - Test that sending an update PUT request with extra - json entries in the body does not disrupt the update. - - We should safely ignore additional fields. - ''' - ids = populate_test_database(num_entries=1) - update_request = { - "provider_name": "A brand new name", - "extra_int": 1, - "extra_bool": True, - "extra_string": "I'm notta name" - } - response = client.put( - f"/api/serviceProviders/{ids[0]}", - json=update_request) - - assert response.status_code == 200, f'Response body is: {response.json}' - - assert 'provider_name' in response.json - assert 'id' in response.json - assert 'extra_int' not in response.json, "We should not send back request json extra fields" - assert 'extra_bool' not in response.json, "We should not send back request json extra fields" - assert 'extra_string' not in response.json, "We should not send back request json extra fields" - - assert response.json['provider_name'] == update_request["provider_name"] - assert response.json['id'] == ids[0] - -def test_update_nonexistant_service_provider(client): - ids = populate_test_database(num_entries=1) - failed_update_request = { - "provider_name" : "Failed Update Name" - } - response = client.put( - f"/api/serviceProviders/{999}", - json=failed_update_request) - assert response.status_code == 404, f'Response body is: {response.json}' - - assert 'provider_name' not in response.json \ No newline at end of file diff --git a/api-v2/tests/tenant_housing_provider/test_service_provider_repository.py b/api-v2/tests/tenant_housing_provider/test_service_provider_repository.py deleted file mode 100644 index 61f9c1d8..00000000 --- a/api-v2/tests/tenant_housing_provider/test_service_provider_repository.py +++ /dev/null @@ -1,112 +0,0 @@ -# Third Party -import pytest -from collections.abc import Generator -# Local -from openapi_server.repositories.service_provider_repository import HousingProviderRepository - -@pytest.fixture -def empty_housing_repo(empty_db_session) -> Generator[HousingProviderRepository, None, None]: - ''' - SetUp and TearDown an empty housing repository for - testing purposes. - ''' - yield HousingProviderRepository() - -@pytest.fixture -def housing_repo_5_entries(empty_housing_repo: HousingProviderRepository) -> Generator[HousingProviderRepository, None, None]: - ''' - SetUp and TearDown a housing repository with five service providers. - The providers will have ids [1-5] and names Provider 1...Provider5 - ''' - for i in range(1, 6): - new = empty_housing_repo.create_service_provider(f"Provider {i}") - assert new is not None, f"Test Setup Failure! Failed to create provider {i}" - assert new.id == i, "The test ids are expected to go from 1-5" - yield empty_housing_repo - -def test_empty_db_count(empty_housing_repo: HousingProviderRepository): - ''' - Test our test setup, to ensure that newly created repos are in fact empty. - ''' - assert empty_housing_repo.provider_count() == 0 - -def test_create_provider(empty_housing_repo: HousingProviderRepository): - ''' - Test creating a new provider within an empty database. - ''' - EXPECTED_NAME = "MyFancyProvider" - - newProvider = empty_housing_repo.create_service_provider(EXPECTED_NAME) - - assert newProvider is not None, "Repo create method failed" - assert newProvider.id == 1, "Expected id 1 since this is the first created provider" - assert newProvider.provider_name == EXPECTED_NAME, "Created provider name did not match request" - -def test_delete_nonexistent_provider(empty_housing_repo: HousingProviderRepository): - ''' - Attempt to delete a service provider that does - not exist. Verify that the deletion gracefully - fails. - ''' - assert empty_housing_repo.delete_service_provider(42) == False - -def test_delete_newly_created_provider(empty_housing_repo: HousingProviderRepository): - ''' - Test creating and then deleting a new service provider, without error. - ''' - new = empty_housing_repo.create_service_provider("Doomed Provider") - assert new is not None, "Test setup failure! Initial create failed." - assert empty_housing_repo.delete_service_provider(new.id) - -def test_get_existing_provider_by_id(housing_repo_5_entries: HousingProviderRepository): - ''' - Test getting a provider by id. - ''' - for i in range(1, 6): - provider = housing_repo_5_entries.get_service_provider_by_id(i) - assert provider.provider_name == f"Provider {i}" - assert provider.id == i - -def test_get_all_providers(housing_repo_5_entries: HousingProviderRepository): - ''' - Test getting all available service providers - ''' - all = housing_repo_5_entries.get_service_providers() - assert all is not None - assert len(all) == 5 - - for i in range(1, 6): - provider = all[i-1] - assert provider.id == i - assert provider.provider_name == f"Provider {i}" - -def test_get_all_providers_empty_db(empty_housing_repo: HousingProviderRepository): - all = empty_housing_repo.get_service_providers() - assert all is not None - assert len(all) == 0 - -def test_get_nonexisting_provider_by_id(housing_repo_5_entries: HousingProviderRepository): - failed_get = housing_repo_5_entries.get_service_provider_by_id(42) - assert failed_get is None - -def test_update_existing_service_provider(housing_repo_5_entries: HousingProviderRepository): - UPDATED_NAME = "Rad New Name" - UPDATED_ID = 3 - returned_provider = housing_repo_5_entries.update_service_provider(UPDATED_NAME, UPDATED_ID) - retrieved_provider = housing_repo_5_entries.get_service_provider_by_id(UPDATED_ID) - - assert retrieved_provider is not None - assert retrieved_provider is not None - - assert returned_provider.id == UPDATED_ID - assert returned_provider.provider_name == UPDATED_NAME - - assert retrieved_provider.id == UPDATED_ID - assert retrieved_provider.provider_name == UPDATED_NAME - -def test_update_nonexistent_provider(housing_repo_5_entries: HousingProviderRepository): - returned_provider = housing_repo_5_entries.update_service_provider(9999, "Failed Update Name") - assert returned_provider is None - -def test_provider_count(housing_repo_5_entries: HousingProviderRepository): - assert housing_repo_5_entries.provider_count() == 5 \ No newline at end of file diff --git a/api-v2/tests/test_alembic_migration.py b/api-v2/tests/test_alembic_migration.py index 36f56aa5..12f1baff 100644 --- a/api-v2/tests/test_alembic_migration.py +++ b/api-v2/tests/test_alembic_migration.py @@ -1,4 +1,3 @@ -from app.models import DataAccessLayer from app.user_roles import UserRole from app.repositories.user_repo import UserRepository diff --git a/api-v2/tests/access/__init_.py b/api-v2/tests/unit/access/__init_.py similarity index 100% rename from api-v2/tests/access/__init_.py rename to api-v2/tests/unit/access/__init_.py diff --git a/api-v2/tests/onboarding/__init__.py b/api-v2/tests/unit/intake_profile/__init__.py similarity index 100% rename from api-v2/tests/onboarding/__init__.py rename to api-v2/tests/unit/intake_profile/__init__.py diff --git a/api-v2/tests/stays/__init__.py b/api-v2/tests/unit/matching/__init__.py similarity index 100% rename from api-v2/tests/stays/__init__.py rename to api-v2/tests/unit/matching/__init__.py diff --git a/api-v2/tests/tenant_housing_provider/__init__.py b/api-v2/tests/unit/onboarding/__init__.py similarity index 100% rename from api-v2/tests/tenant_housing_provider/__init__.py rename to api-v2/tests/unit/onboarding/__init__.py diff --git a/api-v2/app/tenant_housing_provider/controller.py b/api-v2/tests/unit/stays/__init__.py similarity index 100% rename from api-v2/app/tenant_housing_provider/controller.py rename to api-v2/tests/unit/stays/__init__.py diff --git a/api-v2/app/tenant_housing_provider/schemas.py b/api-v2/tests/unit/tenant_housing_provider/__init__.py similarity index 100% rename from api-v2/app/tenant_housing_provider/schemas.py rename to api-v2/tests/unit/tenant_housing_provider/__init__.py From cb118beafb51b897cb7ad3e34e600dd19f1ab23d Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Tue, 10 Sep 2024 15:50:07 -0700 Subject: [PATCH 31/70] fastapi-migration: Fix bad merge --- api-v2/app/intake_profile/controller.py | 33 +++---- api-v2/app/schemas.py | 113 +++++------------------- 2 files changed, 37 insertions(+), 109 deletions(-) diff --git a/api-v2/app/intake_profile/controller.py b/api-v2/app/intake_profile/controller.py index dd2db127..f8134cbc 100644 --- a/api-v2/app/intake_profile/controller.py +++ b/api-v2/app/intake_profile/controller.py @@ -2,7 +2,8 @@ from fastapi import Depends, APIRouter, HTTPException, Response, Security from fastapi.responses import RedirectResponse -from api.deps import ( + +from app.api.deps import ( get_db, get_cognito_client, requires_auth, @@ -13,22 +14,22 @@ router = APIRouter() -@router.post("/guest/") -def post_guest_intake_profile(body, guest: Depends(aim_guest)): - forms_repo = FormsRepository(DataAccessLayer.session()) +# @router.post("/guest/") +# def post_guest_intake_profile(body, guest: Depends(aim_guest)): +# forms_repo = FormsRepository(DataAccessLayer.session()) - form_id = forms_repo.add_form(body) - form = forms_repo.get_form_json(form_id) - if form: - return form, 200 - return {}, 404 +# form_id = forms_repo.add_form(body) +# form = forms_repo.get_form_json(form_id) +# if form: +# return form, 200 +# return {}, 404 -@router.get("/guest/{form_id}") -def get_guest_intake_profile(form_id, guest: Depends(aim_guest)): - forms_repo = FormsRepository(DataAccessLayer.session()) +# @router.get("/guest/{form_id}") +# def get_guest_intake_profile(form_id, guest: Depends(aim_guest)): +# forms_repo = FormsRepository(DataAccessLayer.session()) - form = forms_repo.get_form_json(form_id) - if form: - return form, 200 - return f"Form with id {form_id} does not exist.", 404 +# form = forms_repo.get_form_json(form_id) +# if form: +# return form, 200 +# return f"Form with id {form_id} does not exist.", 404 diff --git a/api-v2/app/schemas.py b/api-v2/app/schemas.py index 9a5601c0..ca047085 100644 --- a/api-v2/app/schemas.py +++ b/api-v2/app/schemas.py @@ -46,6 +46,26 @@ class UserSignInResponse(BaseModel): token: str +class ForgotPasswordRequest(BaseModel): + email: str + + +class ForgotPasswordResponse(BaseModel): + code: int + type: str + message: str + + +class ConfirmForgotPasswordRequest(BaseModel): + email: str + code: str + password: str + + +class ConfirmForgotPasswordResponse(BaseModel): + message: str + + # class SmartNested(Nested): # ''' # Schema attribute used to serialize nested attributes to @@ -83,100 +103,7 @@ class UserSignInResponse(BaseModel): # class UserSchema(BaseModel): # model_config = ConfigDict(from_attributes=True) -# class FieldValidationsSchema(BaseModel): - -# model_config = ConfigDict(from_attributes=True) - -# required : bool -# max_length : int - -# class FieldPropertiesSchema(BaseModel): - -# class Meta: -# model = FieldProperties -# include_relationships = True -# load_instance = True -# exclude = ('properties_id', ) - -# description = auto_field() -# field_type = auto_field() -# choices = auto_field() - -# class FieldSchema(BaseModel): - -# class Meta: -# model = Field -# include_relationships = True -# load_instance = True -# exclude = ('properties_id', 'validations_id', 'group_id') - -# field_id = auto_field(dump_only=True) -# ref = auto_field() -# properties = SmartNested(FieldPropertiesSchema) -# validations = SmartNested(FieldValidationsSchema) - -# class FieldGroupSchema(BaseModel): - -# class Meta: -# model = FieldGroup -# include_relationships = True -# load_instance = True -# exclude = ('group_id', 'form_id') - -# title = auto_field() -# description = auto_field() -# fields = SmartNested(FieldSchema, many=True) - -# class FormSchema(BaseModel): - -# class Meta: -# model = Form -# include_relationships = True -# load_instance = True -# exclude = ('form_id', ) - -# title = auto_field() -# description = auto_field() -# field_groups = SmartNested(FieldGroupSchema, many=True) - -# class ResponseSchema(BaseModel): - -# class Meta: -# model = Response -# include_relationship = True -# load_instance = True -# exclude = ('answer_id', ) - -# user_id = auto_field(load_only=True) -# field_id = auto_field(load_only=True) -# answer_text = auto_field() -# user = SmartNested(UserSchema, only=['name'], required=False, missing=None) -# field = SmartNested(FieldSchema, -# only=['field_id', 'ref', 'properties'], -# required=False, -# missing=None) - -# @post_load -# def make_response(self, data, **kwargs): -# if data.user is None: -# user = self._session.query(User).get(data.user_id) -# if not user: -# raise ValidationError('User not found', 'user_id') -# data.user = user - -# if data.field is None: -# field = self._session.query(Field).get(data.field_id) -# if not field: -# raise ValidationError('Field not found', 'field_id') -# data.field = field - -# return data - # user_schema = UserSchema() # users_schema = UserSchema(many=True) -# service_provider_schema = HousingProgramServiceProviderSchema() -# service_provider_list_schema = HousingProgramServiceProviderSchema(many=True) -# form_schema = FormSchema() -# response_schema = ResponseSchema(many=True) # unmatched_cs_schema = UnmatchedCaseStatusSchema() # unmatched_c_schema = UnmatchedCaseSchema() From c7fa414c76b80a9e23ecdb436930891f2ff12396 Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Tue, 10 Sep 2024 15:54:24 -0700 Subject: [PATCH 32/70] fastapi-migration: Add e2e test for workflow testing The directory is currently empty until end-to-end workflow tests are added. --- api-v2/tests/e2e/empty.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 api-v2/tests/e2e/empty.py diff --git a/api-v2/tests/e2e/empty.py b/api-v2/tests/e2e/empty.py new file mode 100644 index 00000000..e69de29b From 2addea98c8052c94ba4f87c5a9239655d990a11b Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Tue, 10 Sep 2024 17:08:47 -0700 Subject: [PATCH 33/70] fastapi-migration: Add basic docs to Housing Orgs code --- api-v2/app/tenant_housing_orgs/controller.py | 7 +- api-v2/app/tenant_housing_orgs/crud.py | 21 +++++- api-v2/app/tenant_housing_orgs/models.py | 2 + api-v2/app/tenant_housing_orgs/schemas.py | 1 + api-v2/tests/integration/setup_utils.py | 72 ++++++++++++++++++++ 5 files changed, 99 insertions(+), 4 deletions(-) create mode 100644 api-v2/tests/integration/setup_utils.py diff --git a/api-v2/app/tenant_housing_orgs/controller.py b/api-v2/app/tenant_housing_orgs/controller.py index 4973f4f7..0870c7ff 100644 --- a/api-v2/app/tenant_housing_orgs/controller.py +++ b/api-v2/app/tenant_housing_orgs/controller.py @@ -1,3 +1,7 @@ +"""Controller (or "Resource") that represents a Housing Org(anization). + +This module implements the HTTP interface that represents a Housing Org. +""" from . import crud, models, schemas from fastapi import APIRouter, Depends, Request, Response, HTTPException, status @@ -14,8 +18,7 @@ def create_housing_org( housing_org: schemas.HousingOrg, request: Request, - session: Session = Depends(get_db) -) -> schemas.HousingOrg: + session: Session = Depends(get_db)) -> schemas.HousingOrg: """Create a housing org. A housing org is created if it is not already in diff --git a/api-v2/app/tenant_housing_orgs/crud.py b/api-v2/app/tenant_housing_orgs/crud.py index dfeaefae..9deb6a72 100644 --- a/api-v2/app/tenant_housing_orgs/crud.py +++ b/api-v2/app/tenant_housing_orgs/crud.py @@ -1,10 +1,13 @@ +"""A simple CRUD implementation for the HousingOrg data model.""" + from sqlalchemy.orm import Session -from sqlalchemy import select, insert, update +from sqlalchemy import select from . import models, schemas def create_housing_org(session: Session, housing_org: schemas.HousingOrg): + """Create a Housing Org.""" new_org = models.HousingOrg(org_name=housing_org.org_name) session.add(new_org) session.commit() @@ -14,26 +17,38 @@ def create_housing_org(session: Session, housing_org: schemas.HousingOrg): def read_housing_org_by_id(session: Session, housing_org_id: int) -> models.HousingOrg: + """Read a HousingOrg by ID.""" return session.get(models.HousingOrg, housing_org_id) def read_housing_org_by_name(session: Session, org_name: str) -> models.HousingOrg: + """Read a HousingOrg by name.""" query = select( models.HousingOrg).filter(models.HousingOrg.org_name == org_name) return session.scalars(query).one_or_none() def read_housing_orgs(session: Session) -> list[models.HousingOrg]: + """Read all HousingOrgs returned as a list.""" return session.scalars(select(models.HousingOrg)).all() def upsert_housing_org(session: Session, housing_org: models.HousingOrg) -> bool: + """Upsert (Update or Insert) a HousingOrg. + + If a HousingOrg exists, it will be updated and the function + will return False. + + If a HousingOrg does not exist, it will be added to the database + and the function will return True. + """ was_created = False with session.begin(): - db_housing_org = session.query(models.HousingOrg).filter_by(id=housing_org.id).first() + db_housing_org = session.query( + models.HousingOrg).filter_by(id=housing_org.id).first() if db_housing_org: db_housing_org.org_name = housing_org.org_name else: @@ -43,7 +58,9 @@ def upsert_housing_org(session: Session, return was_created + def delete_housing_org(session: Session, housing_org: models.HousingOrg): + """Delete a HousingOrg.""" housing_org = session.get(models.HousingOrg, housing_org.id) session.delete(housing_org) session.commit() diff --git a/api-v2/app/tenant_housing_orgs/models.py b/api-v2/app/tenant_housing_orgs/models.py index 8ba6d607..c69cf55a 100644 --- a/api-v2/app/tenant_housing_orgs/models.py +++ b/api-v2/app/tenant_housing_orgs/models.py @@ -1,3 +1,5 @@ +"""SQLAlchemy models for the Housing Org.""" + from typing import Annotated from typing import List from sqlalchemy import ForeignKey, String diff --git a/api-v2/app/tenant_housing_orgs/schemas.py b/api-v2/app/tenant_housing_orgs/schemas.py index 92beedbb..ab666265 100644 --- a/api-v2/app/tenant_housing_orgs/schemas.py +++ b/api-v2/app/tenant_housing_orgs/schemas.py @@ -1,3 +1,4 @@ +"""Pydantic schemas for the Housing Org.""" from pydantic import BaseModel, ConfigDict diff --git a/api-v2/tests/integration/setup_utils.py b/api-v2/tests/integration/setup_utils.py new file mode 100644 index 00000000..279d4d60 --- /dev/null +++ b/api-v2/tests/integration/setup_utils.py @@ -0,0 +1,72 @@ +from fastapi import TestClient + + +def signup_user(client: TestClient, + email: str, + password: str, + firstName: str = None, + middleName: str = None, + lastName: str = None) -> None: + if not firstName: firstName = "firstName" + if not lastName: lastName = "lastName" + if not middleName: middleName = "" + + signup_response = client.post('/api/auth/signup/host', + json={ + 'email': email, + 'password': password, + 'firstName': firstName, + 'middleName': middleName, + 'lastName': lastName + }) + # Currently the signup returns different response structures for auth + # errors and "Bad Request" errors. Ideally the structure of the response + # would always be the same where there is an error. + assert signup_response.status_code != 400, f"User factory failed to signup user: {signup_response.status}, {signup_response.text}" + assert signup_response.status_code == 200, f"User factory failed to signup user: {signup_response.json['message']}" + + +def confirm_user(boto_client, email: str) -> None: + confirm_response = boto_client.admin_confirm_sign_up( + UserPoolId=app.config["COGNITO_USER_POOL_ID"], Username=email) + assert confirm_response['ResponseMetadata'][ + 'HTTPStatusCode'] == 200, f"User factory failed to confirm user" + + +def create_user(client: TestClient, + email: str, + password: str, + firstName: str = None, + middleName: str = None, + lastName: str = None) -> None: + ''' + Signup and confirm a new user. Fail the test if the + signup or confirm operation fails. + ''' + signup_user(client, email, password, firstName, middleName, lastName) + confirm_user(client, email) + + +def signin_user(client: TestClient, email: str, password: str) -> str: + ''' + Signin a user and return the JWT. Fail the test if the + signin operation fails. + ''' + response = client.post('/api/auth/signin', + json={ + 'email': email, + 'password': password + }) + assert response.status_code == 200, "Signin failed" + assert "token" in response.json, "Signin succeeded but no token provided" + return response.json['token'] + + +def create_and_signin_user(client: TestClient, email: str, + password: str) -> (str, str): + ''' + Signup, confirm, and signin a new user. Return the JWT. + Fail the test if the signup, confirm, or signin operation fails. + ''' + create_user(client, email, password) + return signin_user(client, email, password) From 8577d02f8d40ed167607d147ebb832c7d7969287 Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Tue, 10 Sep 2024 17:10:14 -0700 Subject: [PATCH 34/70] fastapi-migration: Remove unused tests These tests were more relevant for the previous API. --- api-v2/tests/setup_utils.py | 65 ----------- api-v2/tests/test_configs.py | 207 ----------------------------------- api-v2/tests/test_schema.py | 147 ------------------------- 3 files changed, 419 deletions(-) delete mode 100644 api-v2/tests/setup_utils.py delete mode 100644 api-v2/tests/test_configs.py delete mode 100644 api-v2/tests/test_schema.py diff --git a/api-v2/tests/setup_utils.py b/api-v2/tests/setup_utils.py deleted file mode 100644 index ac5a16e6..00000000 --- a/api-v2/tests/setup_utils.py +++ /dev/null @@ -1,65 +0,0 @@ -from app.tenant_housing_provider.service_provider_repository import HousingProviderRepository - - -def signup_user(app, email: str, password: str, firstName: str = None, - middleName: str = None, lastName: str = None) -> None: - if not firstName: firstName = "firstName" - if not lastName: lastName = "lastName" - if not middleName: middleName = "" - - signup_response = app.test_client().post( - '/api/auth/signup/host', - json = { - 'email': email, - 'password': password, - 'firstName': firstName, - 'middleName': middleName, - 'lastName': lastName - } - ) - # Currently the signup returns different response structures for auth - # errors and "Bad Request" errors. Ideally the structure of the response - # would always be the same where there is an error. - assert signup_response.status_code != 400, f"User factory failed to signup user: {signup_response.status}, {signup_response.text}" - assert signup_response.status_code == 200, f"User factory failed to signup user: {signup_response.json['message']}" - -def confirm_user(app, email: str) -> None: - confirm_response = app.boto_client.admin_confirm_sign_up( - UserPoolId=app.config["COGNITO_USER_POOL_ID"], - Username=email - ) - assert confirm_response['ResponseMetadata']['HTTPStatusCode'] == 200, f"User factory failed to confirm user" - -def create_user(test_client, email: str, password: str, firstName: str = None, - middleName: str = None, lastName: str = None) -> None: - ''' - Signup and confirm a new user. Fail the test if the - signup or confirm operation fails. - ''' - app = test_client.application - signup_user(app, email, password, firstName, middleName, lastName) - confirm_user(app, email) - -def signin_user(test_client, email: str, password: str) -> str: - ''' - Signin a user and return the JWT. Fail the test if the - signin operation fails. - ''' - response = test_client.post( - '/api/auth/signin', - json = { - 'email': email, - 'password': password - } - ) - assert response.status_code == 200, "Signin failed" - assert "token" in response.json, "Signin succeeded but no token provided" - return response.json['token'] - -def create_and_signin_user(test_client, email: str, password: str) -> (str, str): - ''' - Signup, confirm, and signin a new user. Return the JWT. - Fail the test if the signup, confirm, or signin operation fails. - ''' - create_user(test_client, email, password) - return signin_user(test_client, email, password) diff --git a/api-v2/tests/test_configs.py b/api-v2/tests/test_configs.py deleted file mode 100644 index f2ad286c..00000000 --- a/api-v2/tests/test_configs.py +++ /dev/null @@ -1,207 +0,0 @@ -import pytest -from pytest import MonkeyPatch -from sqlalchemy.engine import make_url - -from openapi_server.app import create_app, HUUFlaskApp, HUUConnexionApp -from openapi_server.configs.production import ProductionHUUConfig -from openapi_server.configs.development import DevelopmentHUUConfig -from openapi_server.models.database import DataAccessLayer - -def create_dev_app() -> HUUConnexionApp: - ''' - Create our app without reading the .env file. The DevelopmentHUUConfig - will read values from the environment, so monkey patching can be used - to set the values. - ''' - return create_app(DevelopmentHUUConfig()) - -def create_prod_app() -> HUUConnexionApp: - ''' - Create the production app without reading the .env file. - Fake production secrets must be set using monkey patching, otherwise - the production configuration will raise errors during its - internal validation. - ''' - return create_app(ProductionHUUConfig()) - -def test_create_app_default_dev(empty_db_session, empty_environment: MonkeyPatch): - ''' - Test that create_app with development config creates a Flask app with - a default development configuration, available as app.config. - ''' - connexion_app = create_app(DevelopmentHUUConfig()) - config = connexion_app.app.config - - assert "DATABASE_URL" in config - assert "PORT" in config - assert "HOST" in config - assert "TESTING" in config - assert "SECRET_KEY" in config - assert "ROOT_URL" in config - - for key in config: - assert "cognito" not in key.lower() - - assert make_url(config["DATABASE_URL"]) is not None - assert isinstance(config["PORT"], int) - assert config["PORT"] > 0 and config["PORT"] <= 65535 - assert config["ROOT_URL"] - -def test_flask_app_override(empty_db_session, empty_environment: MonkeyPatch): - ''' - Test that the create_app properly overrides the connexion app constructor - to return our custom application type that contains global configuration. - ''' - connexion_app = create_app(DevelopmentHUUConfig()) - assert isinstance(connexion_app, HUUConnexionApp) - assert isinstance(connexion_app.app, HUUFlaskApp) - -def test_missing_secret_throws_err(fake_prod_env: MonkeyPatch): - ''' - Test that failing to set a configuration field that is marked as a - secret field throws an error. - ''' - fake_prod_env.delenv("SECRET_KEY") - with pytest.raises(ValueError): - create_app(ProductionHUUConfig()) - -def test_hardcoding_secret_throws_err(fake_prod_env: MonkeyPatch): - def check_with_hardcoded_secret(**kwargs): - with pytest.raises(ValueError): - ProductionHUUConfig(**kwargs) - - check_with_hardcoded_secret(SECRET_KEY="My Hard Coded Fake Secret") - check_with_hardcoded_secret(COGNITO_CLIENT_ID="My Hard Coded Fake Secret") - check_with_hardcoded_secret(COGNITO_CLIENT_SECRET="My Hard Coded Fake Secret") - check_with_hardcoded_secret(COGNITO_REGION="My Hard Coded Fake Secret") - check_with_hardcoded_secret(COGNITO_REDIRECT_URI="My Hard Coded Fake Secret") - check_with_hardcoded_secret(COGNITO_USER_POOL_ID="My Hard Coded Fake Secret") - check_with_hardcoded_secret(COGNITO_ACCESS_ID="My Hard Coded Fake Secret") - check_with_hardcoded_secret(COGNITO_ACCESS_KEY="My Hard Coded Fake Secret") - -def test_config_reads_from_env(empty_db_session, empty_environment: MonkeyPatch): - ''' - Test that hard-coded values are overwritten using values from the system - environment variables. - ''' - env_port = 9000 - hardcoded_port = 7777 - env_DEBUG = False - hardcoded_DEBUG = True - env_secret = "Extremely Cryptographically Insecure Key" - hardcoded_secret = "Equally Insecure Key" - - empty_environment.setenv("FLASK_DEBUG", str(env_DEBUG)) - empty_environment.setenv("PORT", str(env_port)) - empty_environment.setenv("SECRET_KEY", env_secret) - - config = DevelopmentHUUConfig( - FLASK_DEBUG=hardcoded_DEBUG, - PORT=hardcoded_port, - SECRET_KEY=hardcoded_secret - ) - - assert config.FLASK_DEBUG == env_DEBUG - assert config.PORT == env_port - assert config.SECRET_KEY == env_secret - - app = create_app(config).app - app_config = app.config - - assert app_config["DEBUG"] == env_DEBUG - assert app_config["PORT"] == env_port - assert app_config["SECRET_KEY"] == env_secret - assert app.is_debug_app == env_DEBUG - -def test_invalid_port_throws(empty_environment: MonkeyPatch): - empty_environment.setenv("PORT", "-1") - with pytest.raises(ValueError): - create_dev_app() - empty_environment.setenv("PORT", "66000") - with pytest.raises(ValueError): - create_dev_app() - -def test_env_var_bool_parsing(empty_db_session, empty_environment: MonkeyPatch): - def check_bool_parsing(actual: str, expected: bool, msg: str): - empty_environment.setenv("FLASK_DEBUG", actual) - assert create_dev_app().app.config["FLASK_DEBUG"] == expected, msg - - check_bool_parsing("True", True, "match case") - check_bool_parsing("true", True, "lower case") - check_bool_parsing("1", True, "one") - check_bool_parsing("tRuE", True, "mixed case") - check_bool_parsing(" True ", True, "extra padding") - - check_bool_parsing("False", False, "match case") - check_bool_parsing("false", False, "lower case") - check_bool_parsing("0", False, "zero") - check_bool_parsing("fAlSe", False, "mixed case") - check_bool_parsing(" False ", False, "extra padding") - - empty_environment.setenv("FLASK_DEBUG", "") - with pytest.raises(ValueError): - create_dev_app() - -def test_database_url_config(empty_db_session, empty_environment: MonkeyPatch): - ''' - Test that setting the DATABASE_URL initializes the database - using the specified URL. - ''' - empty_environment.setenv("DATABASE_URL", "sqlite:///:memory:") - create_dev_app() - db_engine = DataAccessLayer._engine - assert db_engine is not None - assert db_engine.url.database == ":memory:" - -def test_root_url_required(empty_environment: MonkeyPatch): - with pytest.raises(ValueError, match="ROOT_URL"): - create_app(DevelopmentHUUConfig( - ROOT_URL="" - )) - - with pytest.raises(ValueError, match="ROOT_URL"): - create_app(DevelopmentHUUConfig( - ROOT_URL=None - )) - - empty_environment.setenv("ROOT_URL", "") - with pytest.raises(ValueError, match="ROOT_URL"): - create_app(DevelopmentHUUConfig()) - -def test_prod_app_disables_development(empty_db_session, fake_prod_env: MonkeyPatch): - def check_development_disabled(enable_testing: bool, enable_debug: bool): - fake_prod_env.setenv("FLASK_DEBUG", str(enable_debug)) - fake_prod_env.setenv("TESTING", str(enable_testing)) - if enable_debug or enable_testing: - with pytest.raises(ValueError): - create_prod_app() - else: - create_prod_app() - - check_development_disabled(True, True) - check_development_disabled(True, False) - check_development_disabled(False, True) - check_development_disabled(False, False) - -def test_prod_secret_key_requirements(empty_db_session, fake_prod_env: MonkeyPatch): - def check_insecure_secret(secret: str): - fake_prod_env.setenv("SECRET_KEY", secret) - with pytest.raises(ValueError): - create_prod_app() - def check_secure_secret(secret: str): - fake_prod_env.setenv("SECRET_KEY", secret) - create_prod_app() - - check_insecure_secret("hi") - check_insecure_secret("") - check_insecure_secret("aaaaaaaaaaaaaaaaaaaaaaaaaa") - check_insecure_secret("asdfasdfasdfasdfasdfasdfa") - check_insecure_secret("12312132132132132132132132") - check_insecure_secret("123456789asdfqwe") - check_insecure_secret("123456789ASDFQWERTG") - - check_secure_secret("3-nTeYX6Zi2T6XlvN2m93cNdDHSB6NC0") - check_secure_secret("QiWYHC1St0pPOEXY1ChiwKrYLJQr9yWH") - check_secure_secret("wd-4FBhuf2TYP4T6FrAxaCvRLItXlIK5") - check_secure_secret("omMTDTPUXTcizyka2AtOg570XqWFlFfP") - check_secure_secret("iEIGSrC6jSh6QdLNib0io8sz_60lZ_BE") \ No newline at end of file diff --git a/api-v2/tests/test_schema.py b/api-v2/tests/test_schema.py deleted file mode 100644 index 3fb2d01f..00000000 --- a/api-v2/tests/test_schema.py +++ /dev/null @@ -1,147 +0,0 @@ -import json -import pytest -from marshmallow.exceptions import ValidationError - -from openapi_server.models.schema import user_schema, users_schema, HousingProgramServiceProviderSchema -from openapi_server.models.database import User, Role -from openapi_server.models.user_roles import UserRole -from openapi_server.repositories.user_repo import UserRepository - -def test_housing_program_service_provider(): - test_housing_program_service_provide_string = "{\"id\": 5, \"provider_name\": \"test\"}" - - housing_program_service_provider = HousingProgramServiceProviderSchema(many=True) - - housing_program_service_provider.fields["id"] = 5 - housing_program_service_provider.fields["provider_name"] = "test" - - assert housing_program_service_provider is not None, "HousingProgramServiceProviderSchema is null" - - assert housing_program_service_provider.fields["id"] == 5, "HousingProgramServiceProviderSchema id field did not match what was input" - - assert housing_program_service_provider.fields["provider_name"] == "test", "HousingProgramServiceProviderSchema provider_name field did not match what was input" - - jsonresult = json.dumps(housing_program_service_provider.fields) - - assert jsonresult == test_housing_program_service_provide_string, "HousingProgramServiceProvider json did not match test string " - -def test_deserialize_host(empty_db_session): - ''' - Verify that the host schema can be deserialized from json. - ''' - json_from_request = '{"role": {"name": "Host"}, "email": "realemail@fakedomain.com", "firstName": "first", "middleName": "middle", "lastName": "last"}' - host = user_schema.load(json.loads(json_from_request), session=empty_db_session) - - assert host is not None, "Host is null" - assert isinstance(host, User), "host is not of type User" - assert host.firstName == 'first' - assert host.middleName == 'middle' - assert host.lastName == 'last' - assert host.email == 'realemail@fakedomain.com' - assert isinstance(host.role, Role) - assert host.role.name == 'Host' - -def test_serialize_host(empty_db_session): - ''' - Verify that the host schema can be serialized to json. - ''' - user_repo = UserRepository(empty_db_session) - new_host = user_repo.add_user("realemail@fakedomain.com", UserRole.HOST, "first", "middle", "last") - json_from_host = user_schema.dump(new_host) - assert json_from_host is not None, "Json from host is null" - assert 'name' in json_from_host["role"] - assert json_from_host['role']['name'] == 'Host' - assert json_from_host['firstName'] == 'first' - assert json_from_host['middleName'] == 'middle' - assert json_from_host['lastName'] == 'last' - assert json_from_host['email'] == "realemail@fakedomain.com" - assert 'id' not in json_from_host, "The user Id should be excluded from serialization" - assert 'role_id' not in json_from_host, "The role Id should be excluded from serialization" - -def test_deserialize_multiplehost(empty_db_session): - ''' - Verify that the user schema can be deserialize multiple users. - ''' - json_from_request = '[{"role": {"name": "Host"}, "email": "realemail@fakedomain.com0", "firstName": "first0", "middleName": "middle0", "lastName": "last0"}, ' + \ - ' {"role": {"name": "Guest"}, "email": "realemail@fakedomain.com1", "firstName": "first1", "middleName": "middle1", "lastName": "last1"}, ' + \ - ' {"role": {"name": "Admin"}, "email": "realemail@fakedomain.com2", "firstName": "first2", "middleName": "middle2", "lastName": "last2"}, ' + \ - ' {"role": {"name": "Coordinator"}, "email": "realemail@fakedomain.com3", "firstName": "first3", "middleName": "middle3", "lastName": "last3"}, ' + \ - ' {"role": {"name": "Guest"}, "email": "realemail@fakedomain.com4", "firstName": "first4", "middleName": "middle4", "lastName": "last4"} ]' - users = users_schema.load(json.loads(json_from_request), session=empty_db_session) - - expected_role = ("Host", "Guest", "Admin", "Coordinator", "Guest") - assert len(users) == len(expected_role) - for idx, (actual_user, expected_role) in enumerate(zip(users, expected_role)): - assert actual_user is not None - assert actual_user.role.name == expected_role - assert actual_user.email == f"realemail@fakedomain.com{idx}" - assert actual_user.firstName == f"first{idx}" - assert actual_user.middleName == f"middle{idx}" - assert actual_user.lastName == f"last{idx}" - -def test_serialize_multiplehost(empty_db_session): - ''' - Verify that the host schema can be serialized to multiple hosts. - ''' - user_repo = UserRepository(empty_db_session) - hosts_to_respond_with = [ - user_repo.add_user("realemail@fakedomain.com0", UserRole.HOST, "first0", "middle0", "last0"), - user_repo.add_user("realemail@fakedomain.com1", UserRole.GUEST, "first1", "middle1", "last1"), - user_repo.add_user("realemail@fakedomain.com2", UserRole.ADMIN, "first2", "middle2", "last2"), - user_repo.add_user("realemail@fakedomain.com3", UserRole.COORDINATOR, "first3", "middle3", "last3"), - user_repo.add_user("realemail@fakedomain.com4", UserRole.GUEST, "first4", "middle4", "last4") - ] - users = users_schema.dump(hosts_to_respond_with) - - expected_role = ("Host", "Guest", "Admin", "Coordinator", "Guest") - assert len(users) == len(expected_role) - for idx, (actual_user, expected_role) in enumerate(zip(users, expected_role)): - assert actual_user is not None - assert actual_user["role"]["name"] == expected_role - assert actual_user["email"] == f"realemail@fakedomain.com{idx}" - assert actual_user["firstName"] == f"first{idx}" - assert actual_user["middleName"] == f"middle{idx}" - assert actual_user["lastName"] == f"last{idx}" - -def test_deserializejson_extrafield_noerror(empty_db_session): - ''' - Verify that json with extra fields will not raise a validation error. - This allows us to map request json directly into model objects. - ''' - json_from_request = '{"extra_field": "extra", "role": {"name": "Host"}, "email": "realemail@fakedomain.com", "firstName": "first", "lastName": "last"}' - data_from_request = json.loads(json_from_request) - user = user_schema.load(data_from_request, session=empty_db_session) - assert user.role.name == UserRole.HOST.value - assert user.email == "realemail@fakedomain.com" - assert user.firstName == "first" - assert user.middleName == None - assert user.lastName == "last" - -def test_deserializeuser_missingfield_error(empty_db_session): - ''' - Verify that json with a missing field will raise a validation error. - ''' - # Missing First name - json_from_request = '{"role": {"name": "Host"}, "email": "realemail@fakedomain.com", "middleName": "middle", "lastName": "last"}' - data_from_request = json.loads(json_from_request) - with pytest.raises(ValidationError): - user_schema.load(data_from_request, session=empty_db_session) - -def test_deserializeuser_missingrelationship_error(empty_db_session): - ''' - Verify that json with a missing field will raise a validation error. - ''' - # Missing role - json_from_request = '{"email": "realemail@fakedomain.com", "firstName": "first", "middleName": "middle", "lastName": "last"}' - data_from_request = json.loads(json_from_request) - with pytest.raises(ValidationError): - user_schema.load(data_from_request, session=empty_db_session) - -def test_deserialize_nonexistantrole_err(empty_db_session): - ''' - Verify that json with a missing id will not raise a validation error. - ''' - json_from_request = '{"role": {"name": "FakeRole"}, "email": "realemail@fakedomain.com", "firstName": "first", "middleName": "middle", "lastName": "last"}' - data_from_request = json.loads(json_from_request) - with pytest.raises(ValidationError, match="Role FakeRole does not exist"): - user_schema.load(data_from_request, session=empty_db_session) From 53e9346477d55c499c5183524e9b8bf3bcf65676 Mon Sep 17 00:00:00 2001 From: Erik Date: Tue, 10 Sep 2024 17:52:45 -0700 Subject: [PATCH 35/70] Move existing auth and files into access --- api-v2/app/{api/routes => access}/auth.py | 4 ++-- api-v2/app/{ => access}/crud.py | 4 ++-- api-v2/app/{ => access}/models.py | 0 api-v2/app/{ => access}/schemas.py | 0 api-v2/app/{api/routes => access}/user.py | 6 +++--- api-v2/app/access/user_repo.py | 4 ++-- api-v2/app/api/main.py | 2 +- api-v2/app/seed.py | 2 +- 8 files changed, 11 insertions(+), 11 deletions(-) rename api-v2/app/{api/routes => access}/auth.py (96%) rename api-v2/app/{ => access}/crud.py (92%) rename api-v2/app/{ => access}/models.py (100%) rename api-v2/app/{ => access}/schemas.py (100%) rename api-v2/app/{api/routes => access}/user.py (87%) diff --git a/api-v2/app/api/routes/auth.py b/api-v2/app/access/auth.py similarity index 96% rename from api-v2/app/api/routes/auth.py rename to api-v2/app/access/auth.py index b20a87af..7a1dd234 100644 --- a/api-v2/app/api/routes/auth.py +++ b/api-v2/app/access/auth.py @@ -8,9 +8,9 @@ from botocore.exceptions import ClientError -from app.schemas import UserCreate, UserSignIn, UserSignInResponse, ForgotPasswordRequest, ForgotPasswordResponse, ConfirmForgotPasswordResponse, ConfirmForgotPasswordRequest +from app.access.schemas import UserCreate, UserSignInRequest, UserSignInResponse, ForgotPasswordRequest, ForgotPasswordResponse, ConfirmForgotPasswordResponse, ConfirmForgotPasswordRequest, RefreshTokenResponse -from app.crud import create_user, delete_user, get_user +from app.access.crud import create_user, delete_user, get_user from app.api.deps import ( get_db, get_cognito_client, diff --git a/api-v2/app/crud.py b/api-v2/app/access/crud.py similarity index 92% rename from api-v2/app/crud.py rename to api-v2/app/access/crud.py index c28f2edd..fc7ce869 100644 --- a/api-v2/app/crud.py +++ b/api-v2/app/access/crud.py @@ -1,7 +1,7 @@ from sqlalchemy.orm import Session -import app.models as models -import app.schemas as schemas +import app.access.models as models +import app.access.schemas as schemas def get_role(db: Session, role: int): diff --git a/api-v2/app/models.py b/api-v2/app/access/models.py similarity index 100% rename from api-v2/app/models.py rename to api-v2/app/access/models.py diff --git a/api-v2/app/schemas.py b/api-v2/app/access/schemas.py similarity index 100% rename from api-v2/app/schemas.py rename to api-v2/app/access/schemas.py diff --git a/api-v2/app/api/routes/user.py b/api-v2/app/access/user.py similarity index 87% rename from api-v2/app/api/routes/user.py rename to api-v2/app/access/user.py index e0c65c67..b85685e9 100644 --- a/api-v2/app/api/routes/user.py +++ b/api-v2/app/access/user.py @@ -4,9 +4,9 @@ from sqlalchemy.orm import Session -from schemas import User -from crud import get_user -from api.deps import get_db +from app.access.schemas import User +from app.access.crud import get_user +from app.api.deps import get_db router = APIRouter() diff --git a/api-v2/app/access/user_repo.py b/api-v2/app/access/user_repo.py index 0fb65721..58913202 100644 --- a/api-v2/app/access/user_repo.py +++ b/api-v2/app/access/user_repo.py @@ -1,7 +1,7 @@ from typing import List -from ..models import UnmatchedGuestCase, UnmatchedGuestCaseStatus, User, Role -from ..user_roles import UmatchedCaseStatus, UserRole +from .models import UnmatchedGuestCase, UnmatchedGuestCaseStatus, User, Role +from .user_roles import UmatchedCaseStatus, UserRole class UnmatchedCaseRepository: diff --git a/api-v2/app/api/main.py b/api-v2/app/api/main.py index 915bca1e..9bd840d8 100644 --- a/api-v2/app/api/main.py +++ b/api-v2/app/api/main.py @@ -1,6 +1,6 @@ from fastapi import APIRouter -from app.api.routes import auth, user +from app.access import auth, user from app.intake_profile import controller as intake_profile from app.tenant_housing_orgs import controller as housing_org diff --git a/api-v2/app/seed.py b/api-v2/app/seed.py index a4204afd..6433c7bf 100644 --- a/api-v2/app/seed.py +++ b/api-v2/app/seed.py @@ -1,7 +1,7 @@ from sqlalchemy import event from app.core.db import Base, engine -from app.models import Role +from app.access.models import Role INITIAL_ROLES = [ {"type": "admin"}, From d04aae7857066d57ef41c086f4462144a3907272 Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Wed, 11 Sep 2024 16:17:12 -0700 Subject: [PATCH 36/70] fastapi-migration: Housing Org controller transactions The tenant housing orgs controller is now responsible for maintaining transaction boundaries. This was done after further research in where other systems put the responsibility of managing transactions. --- api-v2/app/tenant_housing_orgs/controller.py | 48 +++++++++++-------- api-v2/app/tenant_housing_orgs/crud.py | 27 ++++------- .../test_housing_orgs_controller.py | 32 ++++++------- 3 files changed, 54 insertions(+), 53 deletions(-) diff --git a/api-v2/app/tenant_housing_orgs/controller.py b/api-v2/app/tenant_housing_orgs/controller.py index 0870c7ff..f6059066 100644 --- a/api-v2/app/tenant_housing_orgs/controller.py +++ b/api-v2/app/tenant_housing_orgs/controller.py @@ -16,9 +16,9 @@ @router.post("/", status_code=status.HTTP_201_CREATED) def create_housing_org( - housing_org: schemas.HousingOrg, - request: Request, - session: Session = Depends(get_db)) -> schemas.HousingOrg: + housing_org: schemas.HousingOrg, + request: Request, + session: Session = Depends(get_db)) -> schemas.HousingOrg: """Create a housing org. A housing org is created if it is not already in @@ -27,12 +27,17 @@ def create_housing_org( Return the newly created housing org. Return None if the housing org already exists. """ - db_org = crud.read_housing_org_by_name(session, housing_org.org_name) - if db_org: - return RedirectResponse(status_code=status.HTTP_303_SEE_OTHER, - url=f"{request.url}/{db_org.id}") + with session.begin(): + db_org = crud.read_housing_org_by_name(session, housing_org.org_name) + if db_org: + return RedirectResponse(status_code=status.HTTP_303_SEE_OTHER, + url=f"{request.url}/{db_org.id}") - return crud.create_housing_org(session, housing_org) + new_housing_org = models.HousingOrg(org_name=housing_org.org_name) + crud.create_housing_org(session, new_housing_org) + + session.refresh(new_housing_org) + return new_housing_org @router.get("/{housing_org_id}") @@ -60,25 +65,26 @@ def get_housing_orgs(session: Session = Depends(get_db)) -> list[ @router.put("/{housing_org_id}", status_code=status.HTTP_200_OK) def put_housing_org( - housing_org_id: int, - body: schemas.HousingOrg, - response: Response, - session: Session = Depends(get_db)) -> schemas.HousingOrg: + housing_org_id: int, + body: schemas.HousingOrg, + response: Response, + session: Session = Depends(get_db)) -> schemas.HousingOrg: """Create or Update a Housing Org with the given ID. - Return the updated housing org if update is successful, otherwise return None. - If the representation contains a Housing Org ID that does match the ID given in the path, then a HTTP 409 Conflict will be returned. """ if body.id is not None and body.id != housing_org_id: raise HTTPException( status_code=status.HTTP_409_CONFLICT, - detail=f"Failed to find org with id {housing_org_id}") + detail="The Housing Org ID in the path mismatches the ID in the request body.") housing_org = models.HousingOrg(id=housing_org_id, org_name=body.org_name) - was_created = crud.upsert_housing_org(session, housing_org) + with session.begin(): + was_created = crud.upsert_housing_org(session, housing_org) + session.commit() + if was_created: response.status_code = status.HTTP_201_CREATED @@ -92,7 +98,9 @@ def delete_housing_org(housing_org_id: int, :param housing_org_id: The ID of the housing org to delete. """ - housing_org = crud.read_housing_org_by_id(session, housing_org_id) - if not housing_org: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) - crud.delete_housing_org(session, housing_org) + with session.begin(): + housing_org = crud.read_housing_org_by_id(session, housing_org_id) + if not housing_org: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) + crud.delete_housing_org(session, housing_org) + session.commit() diff --git a/api-v2/app/tenant_housing_orgs/crud.py b/api-v2/app/tenant_housing_orgs/crud.py index 9deb6a72..1df0044b 100644 --- a/api-v2/app/tenant_housing_orgs/crud.py +++ b/api-v2/app/tenant_housing_orgs/crud.py @@ -3,16 +3,12 @@ from sqlalchemy.orm import Session from sqlalchemy import select -from . import models, schemas +from . import models -def create_housing_org(session: Session, housing_org: schemas.HousingOrg): +def create_housing_org(session: Session, new_housing_org: models.HousingOrg): """Create a Housing Org.""" - new_org = models.HousingOrg(org_name=housing_org.org_name) - session.add(new_org) - session.commit() - session.refresh(new_org) - return new_org + session.add(new_housing_org) def read_housing_org_by_id(session: Session, @@ -46,15 +42,13 @@ def upsert_housing_org(session: Session, """ was_created = False - with session.begin(): - db_housing_org = session.query( - models.HousingOrg).filter_by(id=housing_org.id).first() - if db_housing_org: - db_housing_org.org_name = housing_org.org_name - else: - session.add(housing_org) - was_created = True - session.commit() + db_housing_org = session.query( + models.HousingOrg).filter_by(id=housing_org.id).first() + if db_housing_org: + db_housing_org.org_name = housing_org.org_name + else: + session.add(housing_org) + was_created = True return was_created @@ -63,4 +57,3 @@ def delete_housing_org(session: Session, housing_org: models.HousingOrg): """Delete a HousingOrg.""" housing_org = session.get(models.HousingOrg, housing_org.id) session.delete(housing_org) - session.commit() diff --git a/api-v2/tests/integration/test_housing_orgs_controller.py b/api-v2/tests/integration/test_housing_orgs_controller.py index c395f275..3681706b 100644 --- a/api-v2/tests/integration/test_housing_orgs_controller.py +++ b/api-v2/tests/integration/test_housing_orgs_controller.py @@ -34,14 +34,14 @@ def test_create_housing_org(client): response = client.post(PATH, json=requested_org) response_obj = response.json() - assert response.status_code == 201, response + assert response.status_code == 201 assert response_obj["org_name"] == requested_org["org_name"] - print(response_obj['id']) response = client.get(f"{PATH}/{response_obj['id']}") - assert response.status_code == 200, response + assert response.status_code == 200 response_obj = response.json() - assert response_obj["org_name"] == requested_org["org_name"], response + assert response_obj["id"] == 1 + assert response_obj["org_name"] == requested_org["org_name"] def test_create_with_extra_data(client): @@ -60,7 +60,7 @@ def test_create_with_extra_data(client): response = client.post(PATH, json=create_request) response_body = response.json() - assert response.status_code == 201, response + assert response.status_code == 201 assert 'org_name' in response_body assert 'id' in response_body assert response_body['org_name'] == create_request['org_name'] @@ -77,14 +77,14 @@ def test_create_bad_json_invalid_type(client): bad_create_request = {"org_name": 1} response = client.post(PATH, json=bad_create_request) - assert response.status_code == 422, response + assert response.status_code == 422 def test_create_bad_json_missing_name(client): bad_create_request = {"org_namez": 1} response = client.post(PATH, json=bad_create_request) - assert response.status_code == 422, response + assert response.status_code == 422 def test_delete_housing_org(client: TestClient): @@ -95,7 +95,7 @@ def test_delete_housing_org(client: TestClient): ids = populate_test_database(client=client, num_entries=1) path = f'{PATH}/{ids[0]}' response = client.delete(path) - assert response.status_code == 204, response + assert response.status_code == 204 response = client.get(path) assert response.status_code == 404, "Housing org was not deleted." @@ -115,7 +115,7 @@ def test_delete_nonexistant_org(client: TestClient): assert len(response_body) == NUM_ROWS response = client.delete(f"{PATH}/{999}") - assert response.status_code == 404, response + assert response.status_code == 404 response = client.get(PATH) response_body = response.json() @@ -128,7 +128,7 @@ def test_get_nonexistent_org(client: TestClient): response = client.get(f"{PATH}/{999}") response_body = response.json() - assert response.status_code == 404, response + assert response.status_code == 404 assert 'org_name' not in response_body @@ -143,7 +143,7 @@ def test_get_housing_orgs(client: TestClient): response = client.get(PATH) response_body = response.json() - assert response.status_code == 200, response + assert response.status_code == 200 assert len(response_body) == expected_org_count @@ -151,7 +151,7 @@ def test_get_housing_org_empty_db(client): response = client.get(PATH) response_body = response.json() - assert response.status_code == 200, response + assert response.status_code == 200 assert len(response_body) == 0 @@ -165,7 +165,7 @@ def test_put_update_housing_org(client: TestClient): response = client.put(f"{PATH}/{ids[0]}", json=updated_org) - assert response.status_code == 200, response + assert response.status_code == 200 response_obj = response.json() assert response_obj["org_name"] == updated_org["org_name"] @@ -175,13 +175,13 @@ def test_put_update_housing_org(client: TestClient): def test_put_create_housing_org_no_id(client: TestClient): put_body = {"org_name": "New Housing Org Name"} response = client.put(f"{PATH}/{999}", json=put_body) - assert response.status_code == 201, response + assert response.status_code == 201 def test_put_create_housing_org_mismatch_id(client: TestClient): failed_put_body = {"id": 1, "org_name": "New Housing Org Name"} response = client.put(f"{PATH}/{999}", json=failed_put_body) - assert response.status_code == 409, response + assert response.status_code == 409 def test_put_with_extra_data(client: TestClient): @@ -201,7 +201,7 @@ def test_put_with_extra_data(client: TestClient): response = client.put(f"{PATH}/{ids[0]}", json=update_request) response_body = response.json() - assert response.status_code == 200, response + assert response.status_code == 200 assert 'org_name' in response_body assert 'id' in response_body From e333cdc0a3c78f68f292d53128e1b699f9b96508 Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Wed, 11 Sep 2024 18:16:38 -0700 Subject: [PATCH 37/70] fastapi-migration: Fix Housing Org controller The Housing Org controller had extraneous `session.commit()`s. Another test to check POSTing duplicate orgs was added. --- api-v2/app/tenant_housing_orgs/controller.py | 23 ++++++++------ .../test_housing_orgs_controller.py | 31 +++++++++++++++++-- 2 files changed, 43 insertions(+), 11 deletions(-) diff --git a/api-v2/app/tenant_housing_orgs/controller.py b/api-v2/app/tenant_housing_orgs/controller.py index f6059066..f4db4261 100644 --- a/api-v2/app/tenant_housing_orgs/controller.py +++ b/api-v2/app/tenant_housing_orgs/controller.py @@ -4,6 +4,7 @@ """ from . import crud, models, schemas +from typing import Any from fastapi import APIRouter, Depends, Request, Response, HTTPException, status from fastapi.responses import RedirectResponse from sqlalchemy.orm import Session @@ -14,24 +15,28 @@ router = APIRouter() -@router.post("/", status_code=status.HTTP_201_CREATED) +@router.post("/", + status_code=status.HTTP_201_CREATED, + response_model=schemas.HousingOrg) def create_housing_org( housing_org: schemas.HousingOrg, request: Request, - session: Session = Depends(get_db)) -> schemas.HousingOrg: + session: Session = Depends(get_db)) -> Any: """Create a housing org. A housing org is created if it is not already in the database. - Return the newly created housing org. Return None - if the housing org already exists. + Return the newly created housing org. + If the Housing Org with the given name exists, a redirect response is given. """ with session.begin(): db_org = crud.read_housing_org_by_name(session, housing_org.org_name) if db_org: - return RedirectResponse(status_code=status.HTTP_303_SEE_OTHER, - url=f"{request.url}/{db_org.id}") + redirect_url = request.url_for('get_housing_org', + **{'housing_org_id': db_org.id}) + return RedirectResponse(url=redirect_url, + status_code=status.HTTP_303_SEE_OTHER) new_housing_org = models.HousingOrg(org_name=housing_org.org_name) crud.create_housing_org(session, new_housing_org) @@ -77,13 +82,14 @@ def put_housing_org( if body.id is not None and body.id != housing_org_id: raise HTTPException( status_code=status.HTTP_409_CONFLICT, - detail="The Housing Org ID in the path mismatches the ID in the request body.") + detail= + "The Housing Org ID in the path mismatches the ID in the request body." + ) housing_org = models.HousingOrg(id=housing_org_id, org_name=body.org_name) with session.begin(): was_created = crud.upsert_housing_org(session, housing_org) - session.commit() if was_created: response.status_code = status.HTTP_201_CREATED @@ -103,4 +109,3 @@ def delete_housing_org(housing_org_id: int, if not housing_org: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) crud.delete_housing_org(session, housing_org) - session.commit() diff --git a/api-v2/tests/integration/test_housing_orgs_controller.py b/api-v2/tests/integration/test_housing_orgs_controller.py index 3681706b..c471e9d9 100644 --- a/api-v2/tests/integration/test_housing_orgs_controller.py +++ b/api-v2/tests/integration/test_housing_orgs_controller.py @@ -31,12 +31,13 @@ def test_create_housing_org(client): """Test create a new housing org.""" requested_org = {"org_name": "-123ASCII&"} + # POST response = client.post(PATH, json=requested_org) - response_obj = response.json() - assert response.status_code == 201 + response_obj = response.json() assert response_obj["org_name"] == requested_org["org_name"] + # GET response = client.get(f"{PATH}/{response_obj['id']}") assert response.status_code == 200 response_obj = response.json() @@ -44,6 +45,32 @@ def test_create_housing_org(client): assert response_obj["org_name"] == requested_org["org_name"] +def test_create_duplicate_housing_org_redirects(client): + """Test create a duplicate housing org redirects.""" + requested_org = {"org_name": "-123ASCII&"} + + # POST 1 of 2 + response = client.post(PATH, json=requested_org) + assert response.status_code == 201 + response_obj = response.json() + assert response_obj["id"] is not None + assert response_obj["org_name"] == requested_org["org_name"] + + org_id = response_obj["id"] + + # POST 2 of 2 should redirect instead of creating a new one + # Explicitly turn on following redirects to get a HTTP 200. + # The wrong status code (307) was being returned when setting + # follow_redirect to False. At the time of this writting, it + # seems that something changed the controller's RedirectResponse + # status code. + response = client.post(PATH, follow_redirects=True, json=requested_org) + assert response.status_code == 200, "Should have redirected to existing resource." + response_obj = response.json() + assert response_obj["id"] is not None + assert response_obj["org_name"] == requested_org["org_name"] + + def test_create_with_extra_data(client): """Test that sending an create POST request with extra json entries in the body does not disrupt the update. From fbd38c65d67b871b426f7517049b632ecc20dee4 Mon Sep 17 00:00:00 2001 From: Erik Date: Thu, 12 Sep 2024 11:13:36 -0700 Subject: [PATCH 38/70] Move folders to modules and update imports --- api-v2/app/api/main.py | 12 ---------- api-v2/app/main.py | 2 +- .../access/auth_controller.py} | 6 ++--- api-v2/app/{ => modules}/access/crud.py | 4 ++-- api-v2/app/{ => modules}/access/models.py | 0 api-v2/app/{ => modules}/access/schemas.py | 0 .../access/user_controller.py} | 6 ++--- api-v2/app/{ => modules}/access/user_repo.py | 0 api-v2/app/{ => modules}/access/user_roles.py | 0 api-v2/app/{api => modules}/deps.py | 0 .../intake_profile}/__init__.py | 0 .../intake_profile/controller.py | 2 +- .../intake_profile/forms/forms.py | 0 .../app/{ => modules}/intake_profile/model.py | 0 .../intake_profile/repository.py | 0 .../{ => modules}/intake_profile/schemas.py | 0 .../matching}/__init__.py | 0 .../app/{ => modules}/matching/controller.py | 0 api-v2/app/{ => modules}/matching/model.py | 0 api-v2/app/{ => modules}/matching/schemas.py | 0 .../onboarding}/__init__.py | 0 .../{ => modules}/onboarding/controller.py | 0 api-v2/app/{ => modules}/onboarding/model.py | 0 .../app/{ => modules}/onboarding/schemas.py | 0 api-v2/app/modules/router.py | 22 +++++++++++++++++++ .../{onboarding => modules/stays}/__init__.py | 0 api-v2/app/{ => modules}/stays/controller.py | 0 api-v2/app/{ => modules}/stays/model.py | 0 api-v2/app/{ => modules}/stays/schemas.py | 0 .../tenant_housing_orgs}/__init__.py | 0 .../tenant_housing_orgs/controller.py | 2 +- .../{ => modules}/tenant_housing_orgs/crud.py | 0 .../tenant_housing_orgs/models.py | 0 .../tenant_housing_orgs/schemas.py | 0 api-v2/app/seed.py | 2 +- api-v2/app/tenant_housing_orgs/__init__.py | 0 api-v2/tests/conftest.py | 2 +- 37 files changed, 35 insertions(+), 25 deletions(-) delete mode 100644 api-v2/app/api/main.py rename api-v2/app/{access/auth.py => modules/access/auth_controller.py} (96%) rename api-v2/app/{ => modules}/access/crud.py (90%) rename api-v2/app/{ => modules}/access/models.py (100%) rename api-v2/app/{ => modules}/access/schemas.py (100%) rename api-v2/app/{access/user.py => modules/access/user_controller.py} (85%) rename api-v2/app/{ => modules}/access/user_repo.py (100%) rename api-v2/app/{ => modules}/access/user_roles.py (100%) rename api-v2/app/{api => modules}/deps.py (100%) rename api-v2/app/{api => modules/intake_profile}/__init__.py (100%) rename api-v2/app/{ => modules}/intake_profile/controller.py (96%) rename api-v2/app/{ => modules}/intake_profile/forms/forms.py (100%) rename api-v2/app/{ => modules}/intake_profile/model.py (100%) rename api-v2/app/{ => modules}/intake_profile/repository.py (100%) rename api-v2/app/{ => modules}/intake_profile/schemas.py (100%) rename api-v2/app/{intake_profile => modules/matching}/__init__.py (100%) rename api-v2/app/{ => modules}/matching/controller.py (100%) rename api-v2/app/{ => modules}/matching/model.py (100%) rename api-v2/app/{ => modules}/matching/schemas.py (100%) rename api-v2/app/{matching => modules/onboarding}/__init__.py (100%) rename api-v2/app/{ => modules}/onboarding/controller.py (100%) rename api-v2/app/{ => modules}/onboarding/model.py (100%) rename api-v2/app/{ => modules}/onboarding/schemas.py (100%) create mode 100644 api-v2/app/modules/router.py rename api-v2/app/{onboarding => modules/stays}/__init__.py (100%) rename api-v2/app/{ => modules}/stays/controller.py (100%) rename api-v2/app/{ => modules}/stays/model.py (100%) rename api-v2/app/{ => modules}/stays/schemas.py (100%) rename api-v2/app/{stays => modules/tenant_housing_orgs}/__init__.py (100%) rename api-v2/app/{ => modules}/tenant_housing_orgs/controller.py (99%) rename api-v2/app/{ => modules}/tenant_housing_orgs/crud.py (100%) rename api-v2/app/{ => modules}/tenant_housing_orgs/models.py (100%) rename api-v2/app/{ => modules}/tenant_housing_orgs/schemas.py (100%) delete mode 100644 api-v2/app/tenant_housing_orgs/__init__.py diff --git a/api-v2/app/api/main.py b/api-v2/app/api/main.py deleted file mode 100644 index 9bd840d8..00000000 --- a/api-v2/app/api/main.py +++ /dev/null @@ -1,12 +0,0 @@ -from fastapi import APIRouter - -from app.access import auth, user -from app.intake_profile import controller as intake_profile -from app.tenant_housing_orgs import controller as housing_org - - -api_router = APIRouter() -api_router.include_router(auth.router, prefix="/auth", tags=["auth"]) -api_router.include_router(user.router, prefix="/user", tags=["user"]) -api_router.include_router(intake_profile.router, prefix="/intake-profile", tags=["intake_profile"]) -api_router.include_router(housing_org.router, prefix="/housing-orgs", tags=["tenant_housing_orgs"]) diff --git a/api-v2/app/main.py b/api-v2/app/main.py index ba078449..28d99969 100644 --- a/api-v2/app/main.py +++ b/api-v2/app/main.py @@ -2,7 +2,7 @@ from contextlib import asynccontextmanager -from app.api.main import api_router +from app.modules.router import api_router # from core.config import settings from app.seed import init_db diff --git a/api-v2/app/access/auth.py b/api-v2/app/modules/access/auth_controller.py similarity index 96% rename from api-v2/app/access/auth.py rename to api-v2/app/modules/access/auth_controller.py index 7a1dd234..94a08b4a 100644 --- a/api-v2/app/access/auth.py +++ b/api-v2/app/modules/access/auth_controller.py @@ -8,10 +8,10 @@ from botocore.exceptions import ClientError -from app.access.schemas import UserCreate, UserSignInRequest, UserSignInResponse, ForgotPasswordRequest, ForgotPasswordResponse, ConfirmForgotPasswordResponse, ConfirmForgotPasswordRequest, RefreshTokenResponse +from app.modules.access.schemas import UserCreate, UserSignInRequest, UserSignInResponse, ForgotPasswordRequest, ForgotPasswordResponse, ConfirmForgotPasswordResponse, ConfirmForgotPasswordRequest, RefreshTokenResponse -from app.access.crud import create_user, delete_user, get_user -from app.api.deps import ( +from app.modules.access.crud import create_user, delete_user, get_user +from app.modules.deps import ( get_db, get_cognito_client, requires_auth, diff --git a/api-v2/app/access/crud.py b/api-v2/app/modules/access/crud.py similarity index 90% rename from api-v2/app/access/crud.py rename to api-v2/app/modules/access/crud.py index fc7ce869..8340d895 100644 --- a/api-v2/app/access/crud.py +++ b/api-v2/app/modules/access/crud.py @@ -1,7 +1,7 @@ from sqlalchemy.orm import Session -import app.access.models as models -import app.access.schemas as schemas +import app.modules.access.models as models +import app.modules.access.schemas as schemas def get_role(db: Session, role: int): diff --git a/api-v2/app/access/models.py b/api-v2/app/modules/access/models.py similarity index 100% rename from api-v2/app/access/models.py rename to api-v2/app/modules/access/models.py diff --git a/api-v2/app/access/schemas.py b/api-v2/app/modules/access/schemas.py similarity index 100% rename from api-v2/app/access/schemas.py rename to api-v2/app/modules/access/schemas.py diff --git a/api-v2/app/access/user.py b/api-v2/app/modules/access/user_controller.py similarity index 85% rename from api-v2/app/access/user.py rename to api-v2/app/modules/access/user_controller.py index b85685e9..2d190e71 100644 --- a/api-v2/app/access/user.py +++ b/api-v2/app/modules/access/user_controller.py @@ -4,9 +4,9 @@ from sqlalchemy.orm import Session -from app.access.schemas import User -from app.access.crud import get_user -from app.api.deps import get_db +from app.modules.access.schemas import User +from app.modules.access.crud import get_user +from app.modules.deps import get_db router = APIRouter() diff --git a/api-v2/app/access/user_repo.py b/api-v2/app/modules/access/user_repo.py similarity index 100% rename from api-v2/app/access/user_repo.py rename to api-v2/app/modules/access/user_repo.py diff --git a/api-v2/app/access/user_roles.py b/api-v2/app/modules/access/user_roles.py similarity index 100% rename from api-v2/app/access/user_roles.py rename to api-v2/app/modules/access/user_roles.py diff --git a/api-v2/app/api/deps.py b/api-v2/app/modules/deps.py similarity index 100% rename from api-v2/app/api/deps.py rename to api-v2/app/modules/deps.py diff --git a/api-v2/app/api/__init__.py b/api-v2/app/modules/intake_profile/__init__.py similarity index 100% rename from api-v2/app/api/__init__.py rename to api-v2/app/modules/intake_profile/__init__.py diff --git a/api-v2/app/intake_profile/controller.py b/api-v2/app/modules/intake_profile/controller.py similarity index 96% rename from api-v2/app/intake_profile/controller.py rename to api-v2/app/modules/intake_profile/controller.py index f8134cbc..6f90ed85 100644 --- a/api-v2/app/intake_profile/controller.py +++ b/api-v2/app/modules/intake_profile/controller.py @@ -3,7 +3,7 @@ from fastapi import Depends, APIRouter, HTTPException, Response, Security from fastapi.responses import RedirectResponse -from app.api.deps import ( +from app.modules.deps import ( get_db, get_cognito_client, requires_auth, diff --git a/api-v2/app/intake_profile/forms/forms.py b/api-v2/app/modules/intake_profile/forms/forms.py similarity index 100% rename from api-v2/app/intake_profile/forms/forms.py rename to api-v2/app/modules/intake_profile/forms/forms.py diff --git a/api-v2/app/intake_profile/model.py b/api-v2/app/modules/intake_profile/model.py similarity index 100% rename from api-v2/app/intake_profile/model.py rename to api-v2/app/modules/intake_profile/model.py diff --git a/api-v2/app/intake_profile/repository.py b/api-v2/app/modules/intake_profile/repository.py similarity index 100% rename from api-v2/app/intake_profile/repository.py rename to api-v2/app/modules/intake_profile/repository.py diff --git a/api-v2/app/intake_profile/schemas.py b/api-v2/app/modules/intake_profile/schemas.py similarity index 100% rename from api-v2/app/intake_profile/schemas.py rename to api-v2/app/modules/intake_profile/schemas.py diff --git a/api-v2/app/intake_profile/__init__.py b/api-v2/app/modules/matching/__init__.py similarity index 100% rename from api-v2/app/intake_profile/__init__.py rename to api-v2/app/modules/matching/__init__.py diff --git a/api-v2/app/matching/controller.py b/api-v2/app/modules/matching/controller.py similarity index 100% rename from api-v2/app/matching/controller.py rename to api-v2/app/modules/matching/controller.py diff --git a/api-v2/app/matching/model.py b/api-v2/app/modules/matching/model.py similarity index 100% rename from api-v2/app/matching/model.py rename to api-v2/app/modules/matching/model.py diff --git a/api-v2/app/matching/schemas.py b/api-v2/app/modules/matching/schemas.py similarity index 100% rename from api-v2/app/matching/schemas.py rename to api-v2/app/modules/matching/schemas.py diff --git a/api-v2/app/matching/__init__.py b/api-v2/app/modules/onboarding/__init__.py similarity index 100% rename from api-v2/app/matching/__init__.py rename to api-v2/app/modules/onboarding/__init__.py diff --git a/api-v2/app/onboarding/controller.py b/api-v2/app/modules/onboarding/controller.py similarity index 100% rename from api-v2/app/onboarding/controller.py rename to api-v2/app/modules/onboarding/controller.py diff --git a/api-v2/app/onboarding/model.py b/api-v2/app/modules/onboarding/model.py similarity index 100% rename from api-v2/app/onboarding/model.py rename to api-v2/app/modules/onboarding/model.py diff --git a/api-v2/app/onboarding/schemas.py b/api-v2/app/modules/onboarding/schemas.py similarity index 100% rename from api-v2/app/onboarding/schemas.py rename to api-v2/app/modules/onboarding/schemas.py diff --git a/api-v2/app/modules/router.py b/api-v2/app/modules/router.py new file mode 100644 index 00000000..3e47807d --- /dev/null +++ b/api-v2/app/modules/router.py @@ -0,0 +1,22 @@ +from fastapi import APIRouter + +from app.modules.access import auth_controller, user_controller +from app.modules.intake_profile import controller as intake_profile +from app.modules.tenant_housing_orgs import controller as housing_org + + +api_router = APIRouter() + + +api_router.include_router( + auth_controller.router, prefix="/auth", tags=["auth"] +) +api_router.include_router( + user_controller.router, prefix="/user", tags=["user"] +) +api_router.include_router( + intake_profile.router, prefix="/intake-profile", tags=["intake_profile"] +) +api_router.include_router( + housing_org.router, prefix="/housing-orgs", tags=["tenant_housing_orgs"] +) diff --git a/api-v2/app/onboarding/__init__.py b/api-v2/app/modules/stays/__init__.py similarity index 100% rename from api-v2/app/onboarding/__init__.py rename to api-v2/app/modules/stays/__init__.py diff --git a/api-v2/app/stays/controller.py b/api-v2/app/modules/stays/controller.py similarity index 100% rename from api-v2/app/stays/controller.py rename to api-v2/app/modules/stays/controller.py diff --git a/api-v2/app/stays/model.py b/api-v2/app/modules/stays/model.py similarity index 100% rename from api-v2/app/stays/model.py rename to api-v2/app/modules/stays/model.py diff --git a/api-v2/app/stays/schemas.py b/api-v2/app/modules/stays/schemas.py similarity index 100% rename from api-v2/app/stays/schemas.py rename to api-v2/app/modules/stays/schemas.py diff --git a/api-v2/app/stays/__init__.py b/api-v2/app/modules/tenant_housing_orgs/__init__.py similarity index 100% rename from api-v2/app/stays/__init__.py rename to api-v2/app/modules/tenant_housing_orgs/__init__.py diff --git a/api-v2/app/tenant_housing_orgs/controller.py b/api-v2/app/modules/tenant_housing_orgs/controller.py similarity index 99% rename from api-v2/app/tenant_housing_orgs/controller.py rename to api-v2/app/modules/tenant_housing_orgs/controller.py index 4973f4f7..bc93c8ed 100644 --- a/api-v2/app/tenant_housing_orgs/controller.py +++ b/api-v2/app/modules/tenant_housing_orgs/controller.py @@ -4,7 +4,7 @@ from fastapi.responses import RedirectResponse from sqlalchemy.orm import Session -from app.api.deps import ( +from app.modules.deps import ( get_db, ) router = APIRouter() diff --git a/api-v2/app/tenant_housing_orgs/crud.py b/api-v2/app/modules/tenant_housing_orgs/crud.py similarity index 100% rename from api-v2/app/tenant_housing_orgs/crud.py rename to api-v2/app/modules/tenant_housing_orgs/crud.py diff --git a/api-v2/app/tenant_housing_orgs/models.py b/api-v2/app/modules/tenant_housing_orgs/models.py similarity index 100% rename from api-v2/app/tenant_housing_orgs/models.py rename to api-v2/app/modules/tenant_housing_orgs/models.py diff --git a/api-v2/app/tenant_housing_orgs/schemas.py b/api-v2/app/modules/tenant_housing_orgs/schemas.py similarity index 100% rename from api-v2/app/tenant_housing_orgs/schemas.py rename to api-v2/app/modules/tenant_housing_orgs/schemas.py diff --git a/api-v2/app/seed.py b/api-v2/app/seed.py index 6433c7bf..4eef07bf 100644 --- a/api-v2/app/seed.py +++ b/api-v2/app/seed.py @@ -1,7 +1,7 @@ from sqlalchemy import event from app.core.db import Base, engine -from app.access.models import Role +from app.modules.access.models import Role INITIAL_ROLES = [ {"type": "admin"}, diff --git a/api-v2/app/tenant_housing_orgs/__init__.py b/api-v2/app/tenant_housing_orgs/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/api-v2/tests/conftest.py b/api-v2/tests/conftest.py index ccf91230..db269276 100644 --- a/api-v2/tests/conftest.py +++ b/api-v2/tests/conftest.py @@ -9,7 +9,7 @@ from app.main import app as main_app from app.core.db import Base -from app.api.deps import get_db +from app.modules.deps import get_db @pytest.fixture From c93999876b21039e78a7c1e49189b4e147fd4a35 Mon Sep 17 00:00:00 2001 From: Erik Date: Thu, 12 Sep 2024 11:34:26 -0700 Subject: [PATCH 39/70] Update import --- app/src/components/intake-profile/IntakeProfileGroups.tsx | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/app/src/components/intake-profile/IntakeProfileGroups.tsx b/app/src/components/intake-profile/IntakeProfileGroups.tsx index 342c15cf..e2d3a523 100644 --- a/app/src/components/intake-profile/IntakeProfileGroups.tsx +++ b/app/src/components/intake-profile/IntakeProfileGroups.tsx @@ -19,11 +19,10 @@ import {useOutletContext} from 'react-router-dom'; import {InitialValues} from 'src/views/IntakeProfile'; import {AdditionalGuestsField} from './fields/AdditionaGuestsField'; import {FieldGroup, Fields, Guest, Pet} from 'src/services/profile'; -import {AdditionalPetsField} from './AdditionalPetsField'; +import {AdditionalPetsField} from './fields/AdditionalPetsField'; import {phoneRegExp} from '../../views/IntakeProfile/constants/index'; import {DatePickerField} from './fields/DatePickerField'; - export interface OutletContext { groupId: string; fieldGroups: FieldGroup[]; From 27617cd751aec7f8dfe08a9fb77255bae26cfd22 Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Thu, 12 Sep 2024 11:42:31 -0700 Subject: [PATCH 40/70] fastapi-migration: Rename stays to relationship_management The workflow is called Relationship Management so the name of the package was updated to reflect that. --- api-v2/app/{stays => relationship_managment}/__init__.py | 0 api-v2/app/{stays => relationship_managment}/controller.py | 0 api-v2/app/{stays => relationship_managment}/model.py | 0 api-v2/app/{stays => relationship_managment}/schemas.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename api-v2/app/{stays => relationship_managment}/__init__.py (100%) rename api-v2/app/{stays => relationship_managment}/controller.py (100%) rename api-v2/app/{stays => relationship_managment}/model.py (100%) rename api-v2/app/{stays => relationship_managment}/schemas.py (100%) diff --git a/api-v2/app/stays/__init__.py b/api-v2/app/relationship_managment/__init__.py similarity index 100% rename from api-v2/app/stays/__init__.py rename to api-v2/app/relationship_managment/__init__.py diff --git a/api-v2/app/stays/controller.py b/api-v2/app/relationship_managment/controller.py similarity index 100% rename from api-v2/app/stays/controller.py rename to api-v2/app/relationship_managment/controller.py diff --git a/api-v2/app/stays/model.py b/api-v2/app/relationship_managment/model.py similarity index 100% rename from api-v2/app/stays/model.py rename to api-v2/app/relationship_managment/model.py diff --git a/api-v2/app/stays/schemas.py b/api-v2/app/relationship_managment/schemas.py similarity index 100% rename from api-v2/app/stays/schemas.py rename to api-v2/app/relationship_managment/schemas.py From 3055067e589e8163aa3fb45d39b0c7f98faff76d Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Thu, 12 Sep 2024 11:43:38 -0700 Subject: [PATCH 41/70] fastapi-migration: Follow best practice for primary key name The primary key for the HousingOrg and HousingProgram have been updated to reflect better naming. By convention, a descriptive name is preferred for the primary key. --- api-v2/app/tenant_housing_orgs/controller.py | 12 ++-- api-v2/app/tenant_housing_orgs/crud.py | 4 +- api-v2/app/tenant_housing_orgs/models.py | 13 +++-- api-v2/app/tenant_housing_orgs/schemas.py | 2 +- .../test_housing_orgs_controller.py | 56 +++++++++---------- 5 files changed, 42 insertions(+), 45 deletions(-) diff --git a/api-v2/app/tenant_housing_orgs/controller.py b/api-v2/app/tenant_housing_orgs/controller.py index f4db4261..2de4750c 100644 --- a/api-v2/app/tenant_housing_orgs/controller.py +++ b/api-v2/app/tenant_housing_orgs/controller.py @@ -34,7 +34,7 @@ def create_housing_org( db_org = crud.read_housing_org_by_name(session, housing_org.org_name) if db_org: redirect_url = request.url_for('get_housing_org', - **{'housing_org_id': db_org.id}) + **{'housing_org_id': db_org.housing_org_id}) return RedirectResponse(url=redirect_url, status_code=status.HTTP_303_SEE_OTHER) @@ -49,11 +49,7 @@ def create_housing_org( def get_housing_org( housing_org_id: int, session: Session = Depends(get_db) ) -> schemas.HousingOrg | None: - """Get details about a housing org from an ID. - - :param org_id: The ID of the housing org to read, update or delete - :type org_id: int - """ + """Get details about a housing org from an ID.""" housing_org = crud.read_housing_org_by_id(session, housing_org_id) if not housing_org: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, @@ -79,14 +75,14 @@ def put_housing_org( If the representation contains a Housing Org ID that does match the ID given in the path, then a HTTP 409 Conflict will be returned. """ - if body.id is not None and body.id != housing_org_id: + if body.housing_org_id is not None and body.housing_org_id != housing_org_id: raise HTTPException( status_code=status.HTTP_409_CONFLICT, detail= "The Housing Org ID in the path mismatches the ID in the request body." ) - housing_org = models.HousingOrg(id=housing_org_id, org_name=body.org_name) + housing_org = models.HousingOrg(housing_org_id=housing_org_id, org_name=body.org_name) with session.begin(): was_created = crud.upsert_housing_org(session, housing_org) diff --git a/api-v2/app/tenant_housing_orgs/crud.py b/api-v2/app/tenant_housing_orgs/crud.py index 1df0044b..0c298cf4 100644 --- a/api-v2/app/tenant_housing_orgs/crud.py +++ b/api-v2/app/tenant_housing_orgs/crud.py @@ -43,7 +43,7 @@ def upsert_housing_org(session: Session, was_created = False db_housing_org = session.query( - models.HousingOrg).filter_by(id=housing_org.id).first() + models.HousingOrg).filter_by(housing_org_id=housing_org.housing_org_id).first() if db_housing_org: db_housing_org.org_name = housing_org.org_name else: @@ -55,5 +55,5 @@ def upsert_housing_org(session: Session, def delete_housing_org(session: Session, housing_org: models.HousingOrg): """Delete a HousingOrg.""" - housing_org = session.get(models.HousingOrg, housing_org.id) + housing_org = session.get(models.HousingOrg, housing_org.housing_org_id) session.delete(housing_org) diff --git a/api-v2/app/tenant_housing_orgs/models.py b/api-v2/app/tenant_housing_orgs/models.py index c69cf55a..efe07f25 100644 --- a/api-v2/app/tenant_housing_orgs/models.py +++ b/api-v2/app/tenant_housing_orgs/models.py @@ -15,19 +15,20 @@ class HousingOrg(Base): __tablename__ = "housing_orgs" - id: Mapped[intpk] + housing_org_id: Mapped[intpk] org_name: Mapped[str] = mapped_column(String, nullable=False, unique=True) - programs: Mapped[List["HousingProgram"]] = relationship(back_populates="housing_org") + programs: Mapped[List["HousingProgram"]] = relationship( + back_populates="housing_org") def __repr__(self): - return f"HousingOrg(id={id},org_name='{self.org_name}')" + return f"HousingOrg(housing_org_id={self.housing_org_id},org_name='{self.org_name}')" class HousingProgram(Base): __tablename__ = "housing_programs" - id: Mapped[intpk] + housing_program_id: Mapped[intpk] program_name: Mapped[str] = mapped_column(String, nullable=False) - housing_org_id: Mapped[int] = mapped_column(ForeignKey('housing_orgs.id'), - nullable=False) + housing_org_id: Mapped[int] = mapped_column( + ForeignKey('housing_orgs.housing_org_id'), nullable=False) housing_org: Mapped["HousingOrg"] = relationship(back_populates="programs") diff --git a/api-v2/app/tenant_housing_orgs/schemas.py b/api-v2/app/tenant_housing_orgs/schemas.py index ab666265..876c6b14 100644 --- a/api-v2/app/tenant_housing_orgs/schemas.py +++ b/api-v2/app/tenant_housing_orgs/schemas.py @@ -3,7 +3,7 @@ class HousingOrg(BaseModel): - id: int | None = None + housing_org_id: int | None = None org_name: str model_config = ConfigDict(from_attributes=True) diff --git a/api-v2/tests/integration/test_housing_orgs_controller.py b/api-v2/tests/integration/test_housing_orgs_controller.py index c471e9d9..c568fb72 100644 --- a/api-v2/tests/integration/test_housing_orgs_controller.py +++ b/api-v2/tests/integration/test_housing_orgs_controller.py @@ -22,8 +22,8 @@ def populate_test_database(client: TestClient, num_entries: int) -> list[int]: assert org is not None, ( f"test setup failure. failed to create org no {i}." "cannot perform endpoint test!") - assert 'id' in org - ids.append(org["id"]) + assert "housing_org_id" in org + ids.append(org["housing_org_id"]) return ids @@ -38,10 +38,10 @@ def test_create_housing_org(client): assert response_obj["org_name"] == requested_org["org_name"] # GET - response = client.get(f"{PATH}/{response_obj['id']}") + response = client.get(f"{PATH}/{response_obj['housing_org_id']}") assert response.status_code == 200 response_obj = response.json() - assert response_obj["id"] == 1 + assert response_obj["housing_org_id"] == 1 assert response_obj["org_name"] == requested_org["org_name"] @@ -53,10 +53,10 @@ def test_create_duplicate_housing_org_redirects(client): response = client.post(PATH, json=requested_org) assert response.status_code == 201 response_obj = response.json() - assert response_obj["id"] is not None + assert response_obj["housing_org_id"] is not None assert response_obj["org_name"] == requested_org["org_name"] - org_id = response_obj["id"] + org_id = response_obj["housing_org_id"] # POST 2 of 2 should redirect instead of creating a new one # Explicitly turn on following redirects to get a HTTP 200. @@ -67,7 +67,7 @@ def test_create_duplicate_housing_org_redirects(client): response = client.post(PATH, follow_redirects=True, json=requested_org) assert response.status_code == 200, "Should have redirected to existing resource." response_obj = response.json() - assert response_obj["id"] is not None + assert response_obj["housing_org_id"] is not None assert response_obj["org_name"] == requested_org["org_name"] @@ -88,14 +88,14 @@ def test_create_with_extra_data(client): response_body = response.json() assert response.status_code == 201 - assert 'org_name' in response_body - assert 'id' in response_body - assert response_body['org_name'] == create_request['org_name'] - assert 'extra_int' not in response_body, "We should not send back request json extra fields" - assert 'extra_bool' not in response_body, "We should not send back request json extra fields" - assert 'extra_string' not in response_body, "We should not send back request json extra fields" - - response = client.get(f"{PATH}/{response_body['id']}") + assert "org_name" in response_body + assert "housing_org_id" in response_body + assert response_body["org_name"] == create_request["org_name"] + assert "extra_int" not in response_body, "We should not send back request json extra fields" + assert "extra_bool" not in response_body, "We should not send back request json extra fields" + assert "extra_string" not in response_body, "We should not send back request json extra fields" + + response = client.get(f"{PATH}/{response_body['housing_org_id']}") assert response.status_code == 200, "POST succeeded but the housing org doesn't exist." assert response_body["org_name"] == create_request["org_name"] @@ -120,7 +120,7 @@ def test_delete_housing_org(client: TestClient): using a delete request. """ ids = populate_test_database(client=client, num_entries=1) - path = f'{PATH}/{ids[0]}' + path = f"{PATH}/{ids[0]}" response = client.delete(path) assert response.status_code == 204 @@ -152,11 +152,11 @@ def test_delete_nonexistant_org(client: TestClient): def test_get_nonexistent_org(client: TestClient): populate_test_database(client=client, num_entries=8) - response = client.get(f"{PATH}/{999}") + response = client.get(f"{PATH}/999") response_body = response.json() assert response.status_code == 404 - assert 'org_name' not in response_body + assert "org_name" not in response_body def test_get_housing_orgs(client: TestClient): @@ -196,17 +196,17 @@ def test_put_update_housing_org(client: TestClient): response_obj = response.json() assert response_obj["org_name"] == updated_org["org_name"] - assert response_obj["id"] == ids[0] + assert response_obj["housing_org_id"] == ids[0] def test_put_create_housing_org_no_id(client: TestClient): put_body = {"org_name": "New Housing Org Name"} - response = client.put(f"{PATH}/{999}", json=put_body) + response = client.put(f"{PATH}/999", json=put_body) assert response.status_code == 201 def test_put_create_housing_org_mismatch_id(client: TestClient): - failed_put_body = {"id": 1, "org_name": "New Housing Org Name"} + failed_put_body = {"housing_org_id": 1, "org_name": "New Housing Org Name"} response = client.put(f"{PATH}/{999}", json=failed_put_body) assert response.status_code == 409 @@ -230,11 +230,11 @@ def test_put_with_extra_data(client: TestClient): assert response.status_code == 200 - assert 'org_name' in response_body - assert 'id' in response_body - assert 'extra_int' not in response_body, "We should not send back request json extra fields" - assert 'extra_bool' not in response_body, "We should not send back request json extra fields" - assert 'extra_string' not in response_body, "We should not send back request json extra fields" + assert "org_name" in response_body + assert "housing_org_id" in response_body + assert "extra_int" not in response_body, "We should not send back request json extra fields" + assert "extra_bool" not in response_body, "We should not send back request json extra fields" + assert "extra_string" not in response_body, "We should not send back request json extra fields" - assert response_body['org_name'] == update_request["org_name"] - assert response_body['id'] == ids[0] + assert response_body["org_name"] == update_request["org_name"] + assert response_body["housing_org_id"] == ids[0] From 7370f0b1a67ec573da5bf68da4dbfa8447df5c7a Mon Sep 17 00:00:00 2001 From: Erik Date: Thu, 12 Sep 2024 13:03:27 -0700 Subject: [PATCH 42/70] Update proxy path --- app/vite.config.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/vite.config.ts b/app/vite.config.ts index 9c8ecad2..a4653185 100644 --- a/app/vite.config.ts +++ b/app/vite.config.ts @@ -11,7 +11,7 @@ function huuApiBaseUrl(envHuuApiBaseUrl: string, mode: string): URL | never { return new URL(envHuuApiBaseUrl); } catch { if (mode === 'development' || mode === 'test') { - return new URL('http://localhost:38429/api'); + return new URL('http://localhost:8000/api'); } else { throw new Error('VITE_HUU_API_BASE_URL is not configured with a URL'); } @@ -43,7 +43,7 @@ export default defineConfig(({mode}) => { }, plugins: [react()], server: { - port: 38428, + port: 34828, proxy: { '/api': { target: apiBaseUrl.origin, From 6b5a0dd4c4bec029ea72dfca4c9028f846cff6d1 Mon Sep 17 00:00:00 2001 From: Erik Date: Thu, 12 Sep 2024 16:51:49 -0700 Subject: [PATCH 43/70] Create user api on front end --- app/src/app/authSlice.ts | 6 ++--- .../authentication/ProtectedRoute.tsx | 2 +- app/src/services/auth.ts | 23 +--------------- app/src/services/user.ts | 27 +++++++++++++++++++ .../views/IntakeProfile/constants/index.ts | 2 -- 5 files changed, 32 insertions(+), 28 deletions(-) create mode 100644 app/src/services/user.ts diff --git a/app/src/app/authSlice.ts b/app/src/app/authSlice.ts index cce1fe6d..6b77c119 100644 --- a/app/src/app/authSlice.ts +++ b/app/src/app/authSlice.ts @@ -1,6 +1,6 @@ import {createSlice, PayloadAction} from '@reduxjs/toolkit'; -import {User} from '../services/auth'; +import {User, userAPI} from '../services/user'; import {RootState} from './store'; import {authApi} from '../services/auth'; @@ -34,8 +34,8 @@ export const authSlice = createSlice({ extraReducers: builder => { builder // Add a matcher to update auth state with user returned from the user query - .addMatcher(authApi.endpoints.user.matchFulfilled, (state, {payload}) => { - state.user = payload.user; + .addMatcher(userAPI.endpoints.user.matchFulfilled, (state, {payload}) => { + state.user = payload; }) .addMatcher( authApi.endpoints.session.matchFulfilled, diff --git a/app/src/components/authentication/ProtectedRoute.tsx b/app/src/components/authentication/ProtectedRoute.tsx index a71bea81..1cafd1ba 100644 --- a/app/src/components/authentication/ProtectedRoute.tsx +++ b/app/src/components/authentication/ProtectedRoute.tsx @@ -2,7 +2,7 @@ import React from 'react'; import {Navigate, useLocation} from 'react-router-dom'; import {useAuth} from '../../app/hooks/useAuth'; import {Loading} from '../common'; -import {useUserQuery} from '../../services/auth'; +import {useUserQuery} from '../../services/user'; export const ProtectedRoute = ({children}: {children: JSX.Element}) => { const {user} = useAuth(); diff --git a/app/src/services/auth.ts b/app/src/services/auth.ts index 3d0b453c..f109a293 100644 --- a/app/src/services/auth.ts +++ b/app/src/services/auth.ts @@ -1,18 +1,5 @@ import {api} from './api'; - -export interface UserRole { - name: 'Guest' | 'Host' | 'Coordinator' | 'Admin'; -} -export interface User { - email: string; - firstName: string; - lastName: string; - role: UserRole; -} - -export interface UserResponse { - user: User; -} +import {User} from './user'; export interface SignUpHostResponse { user: User; @@ -193,13 +180,6 @@ const authApi = api.injectEndpoints({ withCredentials: true, }), }), - user: build.query({ - query: () => ({ - url: 'auth/user', - method: 'GET', - withCredentials: true, - }), - }), resendConfirmationCode: build.mutation< ResendConfirmationCodeResponse, ResendConfirmationCodeRequest @@ -228,7 +208,6 @@ export const { useForgotPasswordMutation, useConfirmForgotPasswordMutation, useSessionMutation, - useUserQuery, usePrivateQuery, useResendConfirmationCodeMutation, } = authApi; diff --git a/app/src/services/user.ts b/app/src/services/user.ts new file mode 100644 index 00000000..57cc1bd6 --- /dev/null +++ b/app/src/services/user.ts @@ -0,0 +1,27 @@ +import {api} from './api'; + +export interface UserRole { + type: 'guest' | 'host' | 'coordinator' | 'admin'; +} +export interface User { + email: string; + firstName: string; + lastName: string; + role: UserRole; +} + +const userAPI = api.injectEndpoints({ + endpoints: build => ({ + user: build.query({ + query: () => ({ + url: 'user/', + method: 'GET', + withCredentials: true, + }), + }), + }), +}); + +export {userAPI}; + +export const {useUserQuery} = userAPI; diff --git a/app/src/views/IntakeProfile/constants/index.ts b/app/src/views/IntakeProfile/constants/index.ts index e9e3eb67..aa7c7304 100644 --- a/app/src/views/IntakeProfile/constants/index.ts +++ b/app/src/views/IntakeProfile/constants/index.ts @@ -1,4 +1,3 @@ - import {faker} from '@faker-js/faker'; import {array, object, string} from 'yup'; import {InitialValues} from '..'; @@ -189,4 +188,3 @@ export const buildValidationSchema = ( [groupId]: object().shape({...schema.fields}), }); }; - From 531970df7e4a638a0a940e95c5c49ac141d1db95 Mon Sep 17 00:00:00 2001 From: Erik Date: Thu, 12 Sep 2024 16:52:09 -0700 Subject: [PATCH 44/70] Update role type --- .../authentication/hooks/useAuthenticateWithOAuth.ts | 10 +++++----- app/src/utils/test/browser.ts | 2 +- app/src/views/SignIn.tsx | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/app/src/components/authentication/hooks/useAuthenticateWithOAuth.ts b/app/src/components/authentication/hooks/useAuthenticateWithOAuth.ts index 393e3be9..caf3b3cb 100644 --- a/app/src/components/authentication/hooks/useAuthenticateWithOAuth.ts +++ b/app/src/components/authentication/hooks/useAuthenticateWithOAuth.ts @@ -14,10 +14,10 @@ import { // TODO: Maybe store this in a more global location? with routes? export const redirectsByRole = { - Guest: '/guest', - Host: '/host', - Coordinator: '/coordinator', - Admin: '/coordinator', + guest: '/guest', + host: '/host', + coordinator: '/coordinator', + admin: '/coordinator', }; interface UseAuthenticateWithOAuth { @@ -55,7 +55,7 @@ export const useAuthenticateWithOAuth = ({ .then(response => { const {token, user} = response; dispatch(setCredentials({user, token})); - navigate(redirectsByRole[user.role.name]); + navigate(redirectsByRole[user.role.type]); }) .catch(err => { if (isFetchBaseQueryError(err)) { diff --git a/app/src/utils/test/browser.ts b/app/src/utils/test/browser.ts index 9999dc0e..67277d73 100644 --- a/app/src/utils/test/browser.ts +++ b/app/src/utils/test/browser.ts @@ -14,7 +14,7 @@ export const enableMocking = async () => { onUnhandledRequest(req, print) { // Ignore any requests from these URLs. const excludedRoutes = [ - '/api/auth/user', + '/api/user', '/api/auth/session', '/api/auth/refresh', ]; diff --git a/app/src/views/SignIn.tsx b/app/src/views/SignIn.tsx index 83eccdbf..d2cce685 100644 --- a/app/src/views/SignIn.tsx +++ b/app/src/views/SignIn.tsx @@ -57,7 +57,7 @@ export const SignIn = () => { dispatch(setCredentials({user, token})); - navigate(redirectsByRole[user.role.name]); + navigate(redirectsByRole[user.role.type]); } catch (err) { if (isFetchBaseQueryError(err)) { // you can access all properties of `FetchBaseQueryError` here From f842cf3f14922ca15e82e169b7bc169044627419 Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Thu, 12 Sep 2024 17:02:25 -0700 Subject: [PATCH 45/70] fastapi-migration: Document API naming convention in README. --- api-v2/README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/api-v2/README.md b/api-v2/README.md index f168da9f..095f2e91 100644 --- a/api-v2/README.md +++ b/api-v2/README.md @@ -55,3 +55,9 @@ To exit the virtual environment, within the shell run: ```shell exit ``` + +## Conventions + +### API Endpoints + +A path segment with spaces must be replace the spaces with a hyphen `-`. For example, `https://dev.homeunite.us/api/housing-orgs`. From 3a75d0ac81d62467ad0334e220d6470e9bcc8b3f Mon Sep 17 00:00:00 2001 From: Erik Date: Thu, 12 Sep 2024 20:23:26 -0700 Subject: [PATCH 46/70] Merge and move relationship management folder --- api-v2/app/modules/{stays => relationship_managment}/__init__.py | 0 .../app/modules/{stays => relationship_managment}/controller.py | 0 api-v2/app/modules/{stays => relationship_managment}/model.py | 0 api-v2/app/modules/{stays => relationship_managment}/schemas.py | 0 api-v2/app/relationship_managment/__init__.py | 0 api-v2/app/relationship_managment/controller.py | 0 api-v2/app/relationship_managment/model.py | 0 api-v2/app/relationship_managment/schemas.py | 0 8 files changed, 0 insertions(+), 0 deletions(-) rename api-v2/app/modules/{stays => relationship_managment}/__init__.py (100%) rename api-v2/app/modules/{stays => relationship_managment}/controller.py (100%) rename api-v2/app/modules/{stays => relationship_managment}/model.py (100%) rename api-v2/app/modules/{stays => relationship_managment}/schemas.py (100%) delete mode 100644 api-v2/app/relationship_managment/__init__.py delete mode 100644 api-v2/app/relationship_managment/controller.py delete mode 100644 api-v2/app/relationship_managment/model.py delete mode 100644 api-v2/app/relationship_managment/schemas.py diff --git a/api-v2/app/modules/stays/__init__.py b/api-v2/app/modules/relationship_managment/__init__.py similarity index 100% rename from api-v2/app/modules/stays/__init__.py rename to api-v2/app/modules/relationship_managment/__init__.py diff --git a/api-v2/app/modules/stays/controller.py b/api-v2/app/modules/relationship_managment/controller.py similarity index 100% rename from api-v2/app/modules/stays/controller.py rename to api-v2/app/modules/relationship_managment/controller.py diff --git a/api-v2/app/modules/stays/model.py b/api-v2/app/modules/relationship_managment/model.py similarity index 100% rename from api-v2/app/modules/stays/model.py rename to api-v2/app/modules/relationship_managment/model.py diff --git a/api-v2/app/modules/stays/schemas.py b/api-v2/app/modules/relationship_managment/schemas.py similarity index 100% rename from api-v2/app/modules/stays/schemas.py rename to api-v2/app/modules/relationship_managment/schemas.py diff --git a/api-v2/app/relationship_managment/__init__.py b/api-v2/app/relationship_managment/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/api-v2/app/relationship_managment/controller.py b/api-v2/app/relationship_managment/controller.py deleted file mode 100644 index e69de29b..00000000 diff --git a/api-v2/app/relationship_managment/model.py b/api-v2/app/relationship_managment/model.py deleted file mode 100644 index e69de29b..00000000 diff --git a/api-v2/app/relationship_managment/schemas.py b/api-v2/app/relationship_managment/schemas.py deleted file mode 100644 index e69de29b..00000000 From bc3484c70360bc33f3430a98771df5c29c9df906 Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Sat, 14 Sep 2024 23:55:39 -0700 Subject: [PATCH 47/70] fastapi-migration: Test authentication Updated and rearranged dependencies, fixed relationship management spelling error, rewrote test_authentication.py. --- api-v2/app/core/config.py | 15 +- api-v2/app/core/db.py | 31 +- api-v2/app/main.py | 11 +- .../{relationship_managment => }/__init__.py | 0 .../stays => app/modules/access}/__init__.py | 0 api-v2/app/modules/access/auth_controller.py | 253 ++++---- api-v2/app/modules/access/schemas.py | 10 +- api-v2/app/modules/access/user_controller.py | 33 +- api-v2/app/modules/access/user_repo.py | 14 +- api-v2/app/modules/access/user_roles.py | 8 +- api-v2/app/modules/deps.py | 88 +-- .../app/modules/intake_profile/controller.py | 16 +- .../__init__.py} | 0 .../controller.py} | 0 .../model.py} | 0 .../relationship_management/schemas.py | 0 .../modules/tenant_housing_orgs/controller.py | 18 +- api-v2/app/seed.py | 10 +- api-v2/app/utils.py | 20 - api-v2/poetry.lock | 422 ++++++------- api-v2/pyproject.toml | 4 + api-v2/tests/cognito_setup.py | 211 +++++++ api-v2/tests/conftest.py | 98 +-- api-v2/tests/integration/setup_utils.py | 72 --- .../tests/integration/test_authentication.py | 567 ++++++++++-------- api-v2/tests/integration/test_user_repo.py | 103 ++-- .../unit/relationship_management/__init__.py | 0 27 files changed, 1102 insertions(+), 902 deletions(-) rename api-v2/app/modules/{relationship_managment => }/__init__.py (100%) rename api-v2/{tests/unit/stays => app/modules/access}/__init__.py (100%) rename api-v2/app/modules/{relationship_managment/controller.py => relationship_management/__init__.py} (100%) rename api-v2/app/modules/{relationship_managment/model.py => relationship_management/controller.py} (100%) rename api-v2/app/modules/{relationship_managment/schemas.py => relationship_management/model.py} (100%) create mode 100644 api-v2/app/modules/relationship_management/schemas.py delete mode 100644 api-v2/app/utils.py create mode 100644 api-v2/tests/cognito_setup.py delete mode 100644 api-v2/tests/integration/setup_utils.py create mode 100644 api-v2/tests/unit/relationship_management/__init__.py diff --git a/api-v2/app/core/config.py b/api-v2/app/core/config.py index 2ab6716c..38e3b937 100644 --- a/api-v2/app/core/config.py +++ b/api-v2/app/core/config.py @@ -1,12 +1,9 @@ +from functools import lru_cache from pydantic_settings import BaseSettings, SettingsConfigDict -from pydantic_settings import BaseSettings - class Settings(BaseSettings): - model_config = SettingsConfigDict( - env_file=".env" - ) + model_config = SettingsConfigDict(env_file=".env") COGNITO_CLIENT_ID: str COGNITO_CLIENT_SECRET: str @@ -15,10 +12,10 @@ class Settings(BaseSettings): COGNITO_USER_POOL_ID: str COGNITO_ACCESS_ID: str COGNITO_ACCESS_KEY: str - SECRET_KEY: str - CONFIG_PROFILE: str ROOT_URL: str - ENV: str DATABASE_URL: str -settings = Settings() + +@lru_cache +def get_settings(): + return Settings() diff --git a/api-v2/app/core/db.py b/api-v2/app/core/db.py index d9457f50..4da47064 100644 --- a/api-v2/app/core/db.py +++ b/api-v2/app/core/db.py @@ -1,13 +1,32 @@ +"""Shared database components.""" from sqlalchemy import create_engine -from sqlalchemy.orm import DeclarativeBase, sessionmaker -from app.core.config import settings +from sqlalchemy.orm import sessionmaker, DeclarativeBase -engine = create_engine( - settings.DATABASE_URL, connect_args={"check_same_thread": False} -) +_db_engine = None +_DbSessionFactory = None -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) class Base(DeclarativeBase): pass + + +def init_db(engine): + Base.metadata.create_all(bind=engine, checkfirst=True) + + +def db_engine(settings): + global _db_engine + if _db_engine is None: + _db_engine = create_engine(settings.DATABASE_URL, + connect_args={"check_same_thread": False}) + return _db_engine + + +def db_session_factory(engine): + global _DbSessionFactory + if _DbSessionFactory is None: + _DbSessionFactory = sessionmaker(autocommit=False, + autoflush=False, + bind=engine) + return _DbSessionFactory diff --git a/api-v2/app/main.py b/api-v2/app/main.py index 28d99969..b6adf287 100644 --- a/api-v2/app/main.py +++ b/api-v2/app/main.py @@ -1,19 +1,20 @@ from fastapi import FastAPI from contextlib import asynccontextmanager - from app.modules.router import api_router -# from core.config import settings -from app.seed import init_db +import app.core.db as db +import app.core.config as config @asynccontextmanager async def lifespan(app: FastAPI): - init_db() + settings = config.get_settings() + engine = db.db_engine(settings) + import seed + db.init_db(engine) yield app = FastAPI(lifespan=lifespan) - app.include_router(api_router, prefix="/api") diff --git a/api-v2/app/modules/relationship_managment/__init__.py b/api-v2/app/modules/__init__.py similarity index 100% rename from api-v2/app/modules/relationship_managment/__init__.py rename to api-v2/app/modules/__init__.py diff --git a/api-v2/tests/unit/stays/__init__.py b/api-v2/app/modules/access/__init__.py similarity index 100% rename from api-v2/tests/unit/stays/__init__.py rename to api-v2/app/modules/access/__init__.py diff --git a/api-v2/app/modules/access/auth_controller.py b/api-v2/app/modules/access/auth_controller.py index 94a08b4a..442a8cdc 100644 --- a/api-v2/app/modules/access/auth_controller.py +++ b/api-v2/app/modules/access/auth_controller.py @@ -4,30 +4,20 @@ from fastapi import Depends, APIRouter, HTTPException, Response, Security, Request from fastapi.responses import RedirectResponse -from sqlalchemy.orm import Session from botocore.exceptions import ClientError - -from app.modules.access.schemas import UserCreate, UserSignInRequest, UserSignInResponse, ForgotPasswordRequest, ForgotPasswordResponse, ConfirmForgotPasswordResponse, ConfirmForgotPasswordRequest, RefreshTokenResponse +from app.modules.access.schemas import ( + UserCreate, UserSignInRequest, UserSignInResponse, ForgotPasswordRequest, + ForgotPasswordResponse, ConfirmForgotPasswordResponse, + ConfirmForgotPasswordRequest, RefreshTokenResponse) from app.modules.access.crud import create_user, delete_user, get_user -from app.modules.deps import ( - get_db, - get_cognito_client, - requires_auth, - allow_roles, - role_to_cognito_group_map, - calc_secret_hash -) - -from app.utils import calc_secret_hash -from app.core.config import settings +from app.modules.deps import (SettingsDep, DbSessionDep, CognitoIdpDep, + SecretHashFuncDep, requires_auth, allow_roles, + role_to_cognito_group_map) router = APIRouter() -cognito_client_id = settings.COGNITO_CLIENT_ID -root_url = settings.ROOT_URL - # Helper function to set session cookies def set_session_cookie(response: Response, auth_response: dict): @@ -38,19 +28,16 @@ def set_session_cookie(response: Response, auth_response: dict): response.set_cookie("id_token", id_token) -""" -# Sign up route - -This route is used to Sign up a new user -""" - - @router.post("/signup") -def signup( - body: UserCreate, - db: Session = Depends(get_db), - cognito_client=Depends(get_cognito_client), -): +def signup(body: UserCreate, + settings: SettingsDep, + db: DbSessionDep, + cognito_client: CognitoIdpDep, + calc_secret_hash: SecretHashFuncDep): + """Sign up route. + + This route is used to Sign up a new user. + """ # Create user in database user = create_user(db, body) if user is None: @@ -59,11 +46,11 @@ def signup( # Add user to cognito try: response = cognito_client.sign_up( - ClientId=cognito_client_id, + ClientId=settings.COGNITO_CLIENT_ID, SecretHash=calc_secret_hash(body.email), Username=user.email, Password=body.password, - ClientMetadata={"url": root_url}, + ClientMetadata={"url": settings.ROOT_URL}, ) except Exception as e: logging.error(f"Failed to create user: {e}") @@ -84,26 +71,20 @@ def signup( return response -""" -# Sign in route - -This route is used to sign in a user and start a new session -""" +@router.post("/signin", response_model=UserSignInResponse) +def signin(body: UserSignInRequest, + response: Response, + settings: SettingsDep, + db: DbSessionDep, + cognito_client: CognitoIdpDep, + calc_secret_hash: SecretHashFuncDep): + """Sign in route. - -@router.post( - "/signin", - response_model=UserSignInResponse, -) -def signin( - body: UserSignInRequest, - response: Response, - db: Session = Depends(get_db), - cognito_client=Depends(get_cognito_client), -): + This route is used to sign in a user and start a new session. + """ try: auth_response = cognito_client.initiate_auth( - ClientId=cognito_client_id, + ClientId=settings.COGNITO_CLIENT_ID, AuthFlow="USER_PASSWORD_AUTH", AuthParameters={ "USERNAME": body.email, @@ -120,12 +101,11 @@ def signin( }, ) - if ( - auth_response.get("ChallengeName") - and auth_response["ChallengeName"] == "NEW_PASSWORD_REQUIRED" - ): + if (auth_response.get("ChallengeName") + and auth_response["ChallengeName"] == "NEW_PASSWORD_REQUIRED"): userId = auth_response["ChallengeParameters"]["USER_ID_FOR_SRP"] sessionId = auth_response["Session"] + root_url = settings.ROOT_URL return RedirectResponse( f"{root_url}/create-password?userId={userId}&sessionId={sessionId}" ) @@ -142,13 +122,6 @@ def signin( } -""" -# Secret route - -This route is a secret route that requires authentication and the guest role -""" - - @router.get( "/secret", dependencies=[ @@ -157,42 +130,52 @@ def signin( ], ) def secret(): - return {"message": "Welcome to the secret route"} + """Secret route. - -''' -# Current session route - -This route is used to get the current session and user info upon page refresh -''' + This route is a secret route that requires authentication and the guest role. + """ + return {"message": "Welcome to the secret route"} @router.get("/session", response_model=UserSignInResponse) -def current_session(request: Request, cognito_client=Depends(get_cognito_client), db: Session = Depends(get_db)): +def current_session(request: Request, + settings: SettingsDep, + db: DbSessionDep, + cognito_client: CognitoIdpDep, + calc_secret_hash: SecretHashFuncDep): + """Current session route. + + This route is used to get the current session and user info upon page refresh. + """ id_token = request.cookies.get('id_token') refresh_token = request.cookies.get('refresh_token') if None in (id_token, refresh_token): raise HTTPException(status_code=401, detail="Missing session cookies") - - decoded_id_token = jwt.decode( - id_token, algorithms=["RS256"], options={"verify_signature": False} - ) + + decoded_id_token = jwt.decode(id_token, + algorithms=["RS256"], + options={"verify_signature": False}) user = get_user(db, decoded_id_token['email']) try: auth_response = cognito_client.initiate_auth( - ClientId=cognito_client_id, + ClientId=settings.COGNITO_CLIENT_ID, AuthFlow='REFRESH_TOKEN', AuthParameters={ - 'REFRESH_TOKEN': refresh_token, - 'SECRET_HASH': calc_secret_hash(decoded_id_token["cognito:username"]) - } - ) + 'REFRESH_TOKEN': + refresh_token, + 'SECRET_HASH': + calc_secret_hash(decoded_id_token["email"]) + }) except ClientError as e: code = e.response['Error']['Code'] message = e.response['Error']['Message'] - raise HTTPException(status_code=400, detail={"code": code, "message": message}) + raise HTTPException(status_code=400, + detail={ + "code": code, + "message": message + }) return { "user": user, @@ -200,98 +183,104 @@ def current_session(request: Request, cognito_client=Depends(get_cognito_client) } -''' -# Refresh route - -This route is used to refresh the current access token during session -''' - - @router.get("/refresh", response_model=RefreshTokenResponse) -def refresh(request: Request, cognito_client=Depends(get_cognito_client)): +def refresh(request: Request, + settings: SettingsDep, + cognito_client: CognitoIdpDep, + calc_secret_hash: SecretHashFuncDep): + """Refresh route. + + This route is used to refresh the current access token during session. + """ refresh_token = request.cookies.get('refresh_token') id_token = request.cookies.get('id_token') if None in (refresh_token, id_token): - raise HTTPException(status_code=401, detail="Missing refresh token or id token") + raise HTTPException(status_code=401, + detail="Missing refresh token or id token") - decoded = jwt.decode(id_token, algorithms=["RS256"], options={"verify_signature": False}) + decoded = jwt.decode(id_token, + algorithms=["RS256"], + options={"verify_signature": False}) try: response = cognito_client.initiate_auth( - ClientId=cognito_client_id, + ClientId=settings.COGNITO_CLIENT_ID, AuthFlow='REFRESH_TOKEN', AuthParameters={ 'REFRESH_TOKEN': refresh_token, - 'SECRET_HASH': calc_secret_hash(decoded["cognito:username"]) - } - ) + 'SECRET_HASH': calc_secret_hash(decoded["email"]) + }) except ClientError as e: code = e.response['Error']['Code'] message = e.response['Error']['Message'] - raise HTTPException(status_code=400, detail={"code": code, "message": message}) + raise HTTPException(status_code=400, + detail={ + "code": code, + "message": message + }) access_token = response['AuthenticationResult']['AccessToken'] # Return access token - return { - "token": access_token - } - -""" -# Forgot Password Route - -This route handles forgot password requests by hashing credentials and sending to AWS Cognito. - -""" + return {"token": access_token} @router.post("/forgot_password", response_model=ForgotPasswordResponse) -def forgot_password( - body: ForgotPasswordRequest, - cognito_client=Depends(get_cognito_client) -): +def forgot_password(body: ForgotPasswordRequest, + settings: SettingsDep, + cognito_client: CognitoIdpDep, + calc_secret_hash: SecretHashFuncDep): + """Forgot Password Route. + + This route handles forgot password requests by hashing credentials + and sending to AWS Cognito. + """ secret_hash = calc_secret_hash(body.email) - + try: - response = cognito_client.forgot_password( - ClientId=cognito_client_id , - SecretHash=secret_hash, - Username=body.email - ) + cognito_client.forgot_password(ClientId=settings.COGNITO_CLIENT_ID, + SecretHash=secret_hash, + Username=body.email) except boto3.exceptions.Boto3Error as e: code = e.response['Error']['Code'] message = e.response['Error']['Message'] - raise HTTPException(status_code=401, detail={"code": code, "message": message}) - - return {"message": "Password reset instructions sent"} + raise HTTPException(status_code=401, + detail={ + "code": code, + "message": message + }) + return {"message": "Password reset instructions sent"} -""" -# Confirm forgot password route - -This route handles forgot password confirmation code requests by receiving the confirmation code and sending to AWS Cognito to verify. -""" +@router.post("/confirm_forgot_password", + response_model=ConfirmForgotPasswordResponse) +def confirm_forgot_password(body: ConfirmForgotPasswordRequest, + settings: SettingsDep, + cognito_client: CognitoIdpDep, + calc_secret_hash: SecretHashFuncDep): + """Confirm forgot password route. -@router.post("/confirm_forgot_password", response_model=ConfirmForgotPasswordResponse) -def confirm_forgot_password( - body: ConfirmForgotPasswordRequest, - cognito_client=Depends(get_cognito_client) -): + This route handles forgot password confirmation code requests by receiving + the confirmation code and sending to AWS Cognito to verify. + """ secret_hash = calc_secret_hash(body.email) - + try: - response = cognito_client.confirm_forgot_password( + cognito_client.confirm_forgot_password( ClientId=settings.COGNITO_CLIENT_ID, SecretHash=secret_hash, Username=body.email, ConfirmationCode=body.code, - Password=body.password - ) + Password=body.password) except boto3.exceptions.Boto3Error as e: code = e.response['Error']['Code'] message = e.response['Error']['Message'] - raise HTTPException(status_code=401, detail={"code": code, "message": message}) - - return {"message": "Password has been reset successfully"} \ No newline at end of file + raise HTTPException(status_code=401, + detail={ + "code": code, + "message": message + }) + + return {"message": "Password has been reset successfully"} diff --git a/api-v2/app/modules/access/schemas.py b/api-v2/app/modules/access/schemas.py index a8d567c4..aa5d8a46 100644 --- a/api-v2/app/modules/access/schemas.py +++ b/api-v2/app/modules/access/schemas.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, ConfigDict +from pydantic import BaseModel, ConfigDict, EmailStr from enum import Enum @@ -18,7 +18,7 @@ class RoleBase(BaseModel): class UserBase(BaseModel): - email: str + email: EmailStr firstName: str middleName: str | None = None lastName: str | None = None @@ -37,7 +37,7 @@ class User(UserBase): class UserSignInRequest(BaseModel): - email: str + email: EmailStr password: str @@ -49,7 +49,7 @@ class UserSignInResponse(BaseModel): class RefreshTokenResponse(BaseModel): token: str class ForgotPasswordRequest(BaseModel): - email: str + email: EmailStr class ForgotPasswordResponse(BaseModel): @@ -59,7 +59,7 @@ class ForgotPasswordResponse(BaseModel): class ConfirmForgotPasswordRequest(BaseModel): - email: str + email: EmailStr code: str password: str diff --git a/api-v2/app/modules/access/user_controller.py b/api-v2/app/modules/access/user_controller.py index 2d190e71..370d8d99 100644 --- a/api-v2/app/modules/access/user_controller.py +++ b/api-v2/app/modules/access/user_controller.py @@ -1,34 +1,31 @@ import jwt -from fastapi import APIRouter, Request, Depends, HTTPException -from sqlalchemy.orm import Session - +from fastapi import APIRouter, Request, HTTPException from app.modules.access.schemas import User from app.modules.access.crud import get_user -from app.modules.deps import get_db - +from app.modules.deps import DbSessionDep router = APIRouter() -''' -# Get user route - -This route is used to get the current user info -''' - @router.get("/", response_model=User) -def get_user_info(request: Request, db: Session = Depends(get_db)): +def get_user_info(request: Request, db: DbSessionDep): + """Get user route. + + This route is used to get the current user info. + """ id_token = request.cookies.get('id_token') - if(id_token is None): + if (id_token is None): raise HTTPException(status_code=401, detail="Missing id token") - - decoded = jwt.decode(id_token, algorithms=["RS256"], options={"verify_signature": False}) + + decoded = jwt.decode(id_token, + algorithms=["RS256"], + options={"verify_signature": False}) email = decoded['email'] - if(email is None): + if (email is None): raise HTTPException(status_code=401, detail="Email not found in token") user = get_user(db, email) - - return user \ No newline at end of file + + return user diff --git a/api-v2/app/modules/access/user_repo.py b/api-v2/app/modules/access/user_repo.py index 58913202..cec2082d 100644 --- a/api-v2/app/modules/access/user_repo.py +++ b/api-v2/app/modules/access/user_repo.py @@ -1,7 +1,5 @@ -from typing import List - -from .models import UnmatchedGuestCase, UnmatchedGuestCaseStatus, User, Role -from .user_roles import UmatchedCaseStatus, UserRole +from app.modules.access.models import UnmatchedGuestCase, UnmatchedGuestCaseStatus, User, Role +from app.modules.access.user_roles import UmatchedCaseStatus, UserRole class UnmatchedCaseRepository: @@ -41,7 +39,7 @@ def __init__(self, session): self.session = session def _get_role(self, role: UserRole) -> Role: - db_role = self.session.query(Role).filter_by(name=role.value).first() + db_role = self.session.query(Role).filter_by(type=role.value).first() if not db_role: raise ValueError(f"{role.value} is not a valid user role type") return db_role @@ -57,7 +55,7 @@ def add_user(self, firstName=firstName, middleName=middleName, lastName=lastName, - role_id=new_role.id) + roleId=new_role.id) self.session.add(new_user) self.session.commit() @@ -77,11 +75,11 @@ def get_user_by_id(self, id: int) -> User: def get_user(self, email: str) -> User: return self.session.query(User).filter_by(email=email).first() - def get_all_users(self) -> List[User]: + def get_all_users(self) -> list[User]: return self.session.query(User).all() def get_user_id(self, email: str) -> int: return self.session.query(User).filter_by(email=email).first().id - def get_users_with_role(self, role: UserRole) -> List[User]: + def get_users_with_role(self, role: UserRole) -> list[User]: return self.session.query(User).filter_by(role=self._get_role(role)) diff --git a/api-v2/app/modules/access/user_roles.py b/api-v2/app/modules/access/user_roles.py index b6dc43af..cc46272a 100644 --- a/api-v2/app/modules/access/user_roles.py +++ b/api-v2/app/modules/access/user_roles.py @@ -2,10 +2,10 @@ class UserRole(Enum): - ADMIN = "Admin" - GUEST = "Guest" - HOST = "Host" - COORDINATOR = "Coordinator" + ADMIN = "admin" + GUEST = "guest" + HOST = "host" + COORDINATOR = "coordinator" class UmatchedCaseStatus(Enum): diff --git a/api-v2/app/modules/deps.py b/api-v2/app/modules/deps.py index ecd7b22b..cf7d5c1f 100644 --- a/api-v2/app/modules/deps.py +++ b/api-v2/app/modules/deps.py @@ -4,41 +4,55 @@ import hmac import base64 -from fastapi import Request, HTTPException +from typing import Annotated, Any, Callable + +from fastapi import Depends, Request, HTTPException from fastapi.security import SecurityScopes +from sqlalchemy.orm import Session + +import app.core.db as db +import app.core.config as config + +SettingsDep = Annotated[config.Settings, Depends(config.get_settings)] + -from app.core.db import SessionLocal -from app.core.config import settings +def db_engine(settings: SettingsDep): + return db.db_engine(settings) -cognito_region = settings.COGNITO_REGION -cognito_access_id = settings.COGNITO_ACCESS_ID -cognito_access_key = settings.COGNITO_ACCESS_KEY +DbEngineDep = Annotated[Any, Depends(db_engine)] -def get_db(): - db = SessionLocal() + +def db_session(engine: DbEngineDep): + session_factory = db.db_session_factory(engine) + session = session_factory() try: - yield db + yield session finally: - db.close() + session.close() + +DbSessionDep = Annotated[Session, Depends(db_session)] -def get_cognito_client(): + +def get_cognito_client(settings: SettingsDep): return boto3.client( "cognito-idp", - region_name=cognito_region, - aws_access_key_id=cognito_access_id, - aws_secret_access_key=cognito_access_key, + region_name=settings.COGNITO_REGION, + aws_access_key_id=settings.COGNITO_ACCESS_ID, + aws_secret_access_key=settings.COGNITO_ACCESS_KEY, ) -def requires_auth(request: Request): +CognitoIdpDep = Annotated[Any, Depends(get_cognito_client)] + + +def requires_auth(request: Request, cognito_client: CognitoIdpDep): # Check for Authorization header auth_header = request.headers.get("Authorization") if auth_header is None: - raise HTTPException( - status_code=401, detail="Missing Authorization header" - ) + raise HTTPException(status_code=401, + detail="Missing Authorization header") # Check for Bearer token token = auth_header.split(" ")[1] @@ -46,16 +60,15 @@ def requires_auth(request: Request): raise HTTPException(status_code=401, detail="Missing token") # Decode token - decoded_access_token = jwt.decode( - token, algorithms=["RS256"], options={"verify_signature": False} - ) + decoded_access_token = jwt.decode(token, + algorithms=["RS256"], + options={"verify_signature": False}) # Check if token is expired if decoded_access_token["exp"] < time.time(): raise HTTPException(status_code=401, detail="Token expired") try: - cognito_client = get_cognito_client() cognito_client.get_user(AccessToken=token) except Exception: raise HTTPException(status_code=401, detail="Invalid token") @@ -76,26 +89,29 @@ def allow_roles(request: Request, security_scopes: SecurityScopes): if id_token is None: raise HTTPException(status_code=401, detail="Missing id_token") - decoded_id_token = jwt.decode( - id_token, algorithms=["RS256"], options={"verify_signature": False} - ) + decoded_id_token = jwt.decode(id_token, + algorithms=["RS256"], + options={"verify_signature": False}) groups = decoded_id_token.get("cognito:groups") - contains_group = any( - role_to_cognito_group_map[scope] in groups - for scope in security_scopes.scopes - ) + contains_group = any(role_to_cognito_group_map[scope] in groups + for scope in security_scopes.scopes) if not contains_group: raise HTTPException(status_code=403, detail="Unauthorized") return True +def secret_hash_func(settings: SettingsDep): + + def hash(username: str) -> str: + message = username + settings.COGNITO_CLIENT_ID + secret = bytearray(settings.COGNITO_CLIENT_SECRET, "utf-8") + dig = hmac.new(secret, msg=message.encode("utf-8"), + digestmod="sha256").digest() + return base64.b64encode(dig).decode() + + return hash + -def calc_secret_hash(username: str) -> str: - message = username + settings.COGNITO_CLIENT_ID - secret = bytearray(settings.COGNITO_CLIENT_SECRET, "utf-8") - dig = hmac.new( - secret, msg=message.encode("utf-8"), digestmod="sha256" - ).digest() - return base64.b64encode(dig).decode() +SecretHashFuncDep = Annotated[Callable, Depends(secret_hash_func)] diff --git a/api-v2/app/modules/intake_profile/controller.py b/api-v2/app/modules/intake_profile/controller.py index 6f90ed85..c183f0d9 100644 --- a/api-v2/app/modules/intake_profile/controller.py +++ b/api-v2/app/modules/intake_profile/controller.py @@ -1,15 +1,11 @@ -import logging from fastapi import Depends, APIRouter, HTTPException, Response, Security -from fastapi.responses import RedirectResponse - -from app.modules.deps import ( - get_db, - get_cognito_client, - requires_auth, - allow_roles, - role_to_cognito_group_map, -) +# from fastapi.responses import RedirectResponse + +# from app.modules.deps import ( +# DbSessionDep, +# CognitoIdpDep, +# ) router = APIRouter() diff --git a/api-v2/app/modules/relationship_managment/controller.py b/api-v2/app/modules/relationship_management/__init__.py similarity index 100% rename from api-v2/app/modules/relationship_managment/controller.py rename to api-v2/app/modules/relationship_management/__init__.py diff --git a/api-v2/app/modules/relationship_managment/model.py b/api-v2/app/modules/relationship_management/controller.py similarity index 100% rename from api-v2/app/modules/relationship_managment/model.py rename to api-v2/app/modules/relationship_management/controller.py diff --git a/api-v2/app/modules/relationship_managment/schemas.py b/api-v2/app/modules/relationship_management/model.py similarity index 100% rename from api-v2/app/modules/relationship_managment/schemas.py rename to api-v2/app/modules/relationship_management/model.py diff --git a/api-v2/app/modules/relationship_management/schemas.py b/api-v2/app/modules/relationship_management/schemas.py new file mode 100644 index 00000000..e69de29b diff --git a/api-v2/app/modules/tenant_housing_orgs/controller.py b/api-v2/app/modules/tenant_housing_orgs/controller.py index 4d24ef43..8b2dcb77 100644 --- a/api-v2/app/modules/tenant_housing_orgs/controller.py +++ b/api-v2/app/modules/tenant_housing_orgs/controller.py @@ -5,12 +5,10 @@ from . import crud, models, schemas from typing import Any -from fastapi import APIRouter, Depends, Request, Response, HTTPException, status +from fastapi import APIRouter, Request, Response, HTTPException, status from fastapi.responses import RedirectResponse -from sqlalchemy.orm import Session -from app.modules.deps import ( - get_db, ) +from app.modules.deps import DbSessionDep router = APIRouter() @@ -21,7 +19,7 @@ def create_housing_org( housing_org: schemas.HousingOrg, request: Request, - session: Session = Depends(get_db)) -> Any: + session: DbSessionDep) -> Any: """Create a housing org. A housing org is created if it is not already in @@ -46,9 +44,7 @@ def create_housing_org( @router.get("/{housing_org_id}") -def get_housing_org( - housing_org_id: int, session: Session = Depends(get_db) -) -> schemas.HousingOrg | None: +def get_housing_org(housing_org_id: int, session: DbSessionDep) -> schemas.HousingOrg | None: """Get details about a housing org from an ID.""" housing_org = crud.read_housing_org_by_id(session, housing_org_id) if not housing_org: @@ -58,7 +54,7 @@ def get_housing_org( @router.get("/") -def get_housing_orgs(session: Session = Depends(get_db)) -> list[ +def get_housing_orgs(session: DbSessionDep) -> list[ schemas.HousingOrg]: """Get a list of all housing orgs.""" return crud.read_housing_orgs(session) @@ -69,7 +65,7 @@ def put_housing_org( housing_org_id: int, body: schemas.HousingOrg, response: Response, - session: Session = Depends(get_db)) -> schemas.HousingOrg: + session: DbSessionDep) -> schemas.HousingOrg: """Create or Update a Housing Org with the given ID. If the representation contains a Housing Org ID that does match the ID given @@ -95,7 +91,7 @@ def put_housing_org( @router.delete("/{housing_org_id}", status_code=status.HTTP_204_NO_CONTENT) def delete_housing_org(housing_org_id: int, - session: Session = Depends(get_db)): + session: DbSessionDep): """Delete a housing org. :param housing_org_id: The ID of the housing org to delete. diff --git a/api-v2/app/seed.py b/api-v2/app/seed.py index 4eef07bf..238a55de 100644 --- a/api-v2/app/seed.py +++ b/api-v2/app/seed.py @@ -1,6 +1,5 @@ from sqlalchemy import event -from app.core.db import Base, engine from app.modules.access.models import Role INITIAL_ROLES = [ @@ -11,15 +10,14 @@ ] -# This method receives a table, a connection and inserts data to that table. def initialize_table(target, connection, **kw): + """Initialize a DB table. + + This method receives a table, a connection and inserts data to that table. + """ for role in INITIAL_ROLES: connection.execute(target.insert(), role) return event.listen(Role.__table__, "after_create", initialize_table) - - -def init_db(): - Base.metadata.create_all(bind=engine, checkfirst=True) diff --git a/api-v2/app/utils.py b/api-v2/app/utils.py deleted file mode 100644 index 652dcd32..00000000 --- a/api-v2/app/utils.py +++ /dev/null @@ -1,20 +0,0 @@ -import hmac -import base64 - -from app.core.config import settings - - -# Helper function to calculate secret hash -def calc_secret_hash(email: str) -> str: - cognito_client_id = settings.COGNITO_CLIENT_ID - cognito_client_secret = settings.COGNITO_CLIENT_SECRET - - if cognito_client_id is None: - print("COGNITO_CLIENT_ID is not set") - - message = email + cognito_client_id - secret = bytearray(cognito_client_secret, "utf-8") - dig = hmac.new( - secret, msg=message.encode("utf-8"), digestmod="sha256" - ).digest() - return base64.b64encode(dig).decode() diff --git a/api-v2/poetry.lock b/api-v2/poetry.lock index 6e1163ef..64665e43 100644 --- a/api-v2/poetry.lock +++ b/api-v2/poetry.lock @@ -52,17 +52,17 @@ trio = ["trio (>=0.23)"] [[package]] name = "boto3" -version = "1.35.14" +version = "1.35.19" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.14-py3-none-any.whl", hash = "sha256:c3e138e9041d59cd34cdc28a587dfdc899dba02ea26ebc3e10fb4bc88e5cf31b"}, - {file = "boto3-1.35.14.tar.gz", hash = "sha256:7bc78d7140c353b10a637927fe4bc4c4d95a464d1b8f515d5844def2ee52cbd5"}, + {file = "boto3-1.35.19-py3-none-any.whl", hash = "sha256:84b3fe1727945bc3cada832d969ddb3dc0d08fce1677064ca8bdc13a89c1a143"}, + {file = "boto3-1.35.19.tar.gz", hash = "sha256:9979fe674780a0b7100eae9156d74ee374cd1638a9f61c77277e3ce712f3e496"}, ] [package.dependencies] -botocore = ">=1.35.14,<1.36.0" +botocore = ">=1.35.19,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -71,13 +71,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.14" +version = "1.35.19" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.14-py3-none-any.whl", hash = "sha256:24823135232f88266b66ae8e1d0f3d40872c14cd976781f7fe52b8f0d79035a0"}, - {file = "botocore-1.35.14.tar.gz", hash = "sha256:8515a2fc7ca5bcf0b10016ba05ccf2d642b7cb77d8773026ff2fa5aa3bf38d2e"}, + {file = "botocore-1.35.19-py3-none-any.whl", hash = "sha256:c83f7f0cacfe7c19b109b363ebfa8736e570d24922f16ed371681f58ebab44a9"}, + {file = "botocore-1.35.19.tar.gz", hash = "sha256:42d6d8db7250cbd7899f786f9861e02cab17dc238f64d6acb976098ed9809625"}, ] [package.dependencies] @@ -86,7 +86,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} [package.extras] -crt = ["awscrt (==0.21.2)"] +crt = ["awscrt (==0.21.5)"] [[package]] name = "cachetools" @@ -565,69 +565,77 @@ typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "greenlet" -version = "3.0.3" +version = "3.1.0" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, + {file = "greenlet-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a814dc3100e8a046ff48faeaa909e80cdb358411a3d6dd5293158425c684eda8"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a771dc64fa44ebe58d65768d869fcfb9060169d203446c1d446e844b62bdfdca"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e49a65d25d7350cca2da15aac31b6f67a43d867448babf997fe83c7505f57bc"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cd8518eade968bc52262d8c46727cfc0826ff4d552cf0430b8d65aaf50bb91d"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76dc19e660baea5c38e949455c1181bc018893f25372d10ffe24b3ed7341fb25"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0a5b1c22c82831f56f2f7ad9bbe4948879762fe0d59833a4a71f16e5fa0f682"}, + {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2651dfb006f391bcb240635079a68a261b227a10a08af6349cba834a2141efa1"}, + {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3e7e6ef1737a819819b1163116ad4b48d06cfdd40352d813bb14436024fcda99"}, + {file = "greenlet-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:ffb08f2a1e59d38c7b8b9ac8083c9c8b9875f0955b1e9b9b9a965607a51f8e54"}, + {file = "greenlet-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9730929375021ec90f6447bff4f7f5508faef1c02f399a1953870cdb78e0c345"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:713d450cf8e61854de9420fb7eea8ad228df4e27e7d4ed465de98c955d2b3fa6"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c3446937be153718250fe421da548f973124189f18fe4575a0510b5c928f0cc"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ddc7bcedeb47187be74208bc652d63d6b20cb24f4e596bd356092d8000da6d6"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44151d7b81b9391ed759a2f2865bbe623ef00d648fed59363be2bbbd5154656f"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cea1cca3be76c9483282dc7760ea1cc08a6ecec1f0b6ca0a94ea0d17432da19"}, + {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:619935a44f414274a2c08c9e74611965650b730eb4efe4b2270f91df5e4adf9a"}, + {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:221169d31cada333a0c7fd087b957c8f431c1dba202c3a58cf5a3583ed973e9b"}, + {file = "greenlet-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:01059afb9b178606b4b6e92c3e710ea1635597c3537e44da69f4531e111dd5e9"}, + {file = "greenlet-3.1.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:24fc216ec7c8be9becba8b64a98a78f9cd057fd2dc75ae952ca94ed8a893bf27"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d07c28b85b350564bdff9f51c1c5007dfb2f389385d1bc23288de51134ca303"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:243a223c96a4246f8a30ea470c440fe9db1f5e444941ee3c3cd79df119b8eebf"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26811df4dc81271033a7836bc20d12cd30938e6bd2e9437f56fa03da81b0f8fc"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9d86401550b09a55410f32ceb5fe7efcd998bd2dad9e82521713cb148a4a15f"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26d9c1c4f1748ccac0bae1dbb465fb1a795a75aba8af8ca871503019f4285e2a"}, + {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cd468ec62257bb4544989402b19d795d2305eccb06cde5da0eb739b63dc04665"}, + {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a53dfe8f82b715319e9953330fa5c8708b610d48b5c59f1316337302af5c0811"}, + {file = "greenlet-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:28fe80a3eb673b2d5cc3b12eea468a5e5f4603c26aa34d88bf61bba82ceb2f9b"}, + {file = "greenlet-3.1.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:76b3e3976d2a452cba7aa9e453498ac72240d43030fdc6d538a72b87eaff52fd"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655b21ffd37a96b1e78cc48bf254f5ea4b5b85efaf9e9e2a526b3c9309d660ca"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f4c2027689093775fd58ca2388d58789009116844432d920e9147f91acbe64"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76e5064fd8e94c3f74d9fd69b02d99e3cdb8fc286ed49a1f10b256e59d0d3a0b"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4bf607f690f7987ab3291406e012cd8591a4f77aa54f29b890f9c331e84989"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:037d9ac99540ace9424cb9ea89f0accfaff4316f149520b4ae293eebc5bded17"}, + {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:90b5bbf05fe3d3ef697103850c2ce3374558f6fe40fd57c9fac1bf14903f50a5"}, + {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:726377bd60081172685c0ff46afbc600d064f01053190e4450857483c4d44484"}, + {file = "greenlet-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:d46d5069e2eeda111d6f71970e341f4bd9aeeee92074e649ae263b834286ecc0"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81eeec4403a7d7684b5812a8aaa626fa23b7d0848edb3a28d2eb3220daddcbd0"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a3dae7492d16e85ea6045fd11cb8e782b63eac8c8d520c3a92c02ac4573b0a6"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b5ea3664eed571779403858d7cd0a9b0ebf50d57d2cdeafc7748e09ef8cd81a"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22f4e26400f7f48faef2d69c20dc055a1f3043d330923f9abe08ea0aecc44df"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13ff8c8e54a10472ce3b2a2da007f915175192f18e6495bad50486e87c7f6637"}, + {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9671e7282d8c6fcabc32c0fb8d7c0ea8894ae85cee89c9aadc2d7129e1a9954"}, + {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:184258372ae9e1e9bddce6f187967f2e08ecd16906557c4320e3ba88a93438c3"}, + {file = "greenlet-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:a0409bc18a9f85321399c29baf93545152d74a49d92f2f55302f122007cfda00"}, + {file = "greenlet-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9eb4a1d7399b9f3c7ac68ae6baa6be5f9195d1d08c9ddc45ad559aa6b556bce6"}, + {file = "greenlet-3.1.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a8870983af660798dc1b529e1fd6f1cefd94e45135a32e58bd70edd694540f33"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfcfb73aed40f550a57ea904629bdaf2e562c68fa1164fa4588e752af6efdc3f"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9482c2ed414781c0af0b35d9d575226da6b728bd1a720668fa05837184965b7"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d58ec349e0c2c0bc6669bf2cd4982d2f93bf067860d23a0ea1fe677b0f0b1e09"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd65695a8df1233309b701dec2539cc4b11e97d4fcc0f4185b4a12ce54db0491"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:665b21e95bc0fce5cab03b2e1d90ba9c66c510f1bb5fdc864f3a377d0f553f6b"}, + {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3c59a06c2c28a81a026ff11fbf012081ea34fb9b7052f2ed0366e14896f0a1d"}, + {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415b9494ff6240b09af06b91a375731febe0090218e2898d2b85f9b92abcda0"}, + {file = "greenlet-3.1.0-cp38-cp38-win32.whl", hash = "sha256:1544b8dd090b494c55e60c4ff46e238be44fdc472d2589e943c241e0169bcea2"}, + {file = "greenlet-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:7f346d24d74c00b6730440f5eb8ec3fe5774ca8d1c9574e8e57c8671bb51b910"}, + {file = "greenlet-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:db1b3ccb93488328c74e97ff888604a8b95ae4f35f4f56677ca57a4fc3a4220b"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44cd313629ded43bb3b98737bba2f3e2c2c8679b55ea29ed73daea6b755fe8e7"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fad7a051e07f64e297e6e8399b4d6a3bdcad3d7297409e9a06ef8cbccff4f501"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3967dcc1cd2ea61b08b0b276659242cbce5caca39e7cbc02408222fb9e6ff39"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d45b75b0f3fd8d99f62eb7908cfa6d727b7ed190737dec7fe46d993da550b81a"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d004db911ed7b6218ec5c5bfe4cf70ae8aa2223dffbb5b3c69e342bb253cb28"}, + {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9505a0c8579899057cbefd4ec34d865ab99852baf1ff33a9481eb3924e2da0b"}, + {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fd6e94593f6f9714dbad1aaba734b5ec04593374fa6638df61592055868f8b8"}, + {file = "greenlet-3.1.0-cp39-cp39-win32.whl", hash = "sha256:d0dd943282231480aad5f50f89bdf26690c995e8ff555f26d8a5b9887b559bcc"}, + {file = "greenlet-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:ac0adfdb3a21dc2a24ed728b61e72440d297d0fd3a577389df566651fcd08f97"}, + {file = "greenlet-3.1.0.tar.gz", hash = "sha256:b395121e9bbe8d02a750886f108d540abe66075e61e22f7353d9acb0b81be0f0"}, ] [package.extras] @@ -741,15 +749,18 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "idna" -version = "3.8" +version = "3.9" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ - {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, - {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, + {file = "idna-3.9-py3-none-any.whl", hash = "sha256:69297d5da0cc9281c77efffb4e730254dd45943f45bbfb461de5991713989b1e"}, + {file = "idna-3.9.tar.gz", hash = "sha256:e5c5dafde284f26e9e0f28f6ea2d6400abd5ca099864a67f576f3981c6476124"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -931,13 +942,13 @@ files = [ [[package]] name = "moto" -version = "5.0.13" +version = "5.0.14" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "moto-5.0.13-py2.py3-none-any.whl", hash = "sha256:984377a9c4536543fc09f49a1d5210c61c4a4f55c79719f7d9f8dcdd9bf55ea5"}, - {file = "moto-5.0.13.tar.gz", hash = "sha256:ddf8864f0d61af88fd07a4e5eac428c6bebf4fcd10023f8e756e65e9e7b7e4a5"}, + {file = "moto-5.0.14-py2.py3-none-any.whl", hash = "sha256:c738ffe85d3844ef37b865951736c4faf2e0f3e4f05db87bdad97a6c01b88174"}, + {file = "moto-5.0.14.tar.gz", hash = "sha256:0f849243269fd03372426c302b18cb605302da32620d7f0266be6a40735b2acd"}, ] [package.dependencies] @@ -953,24 +964,24 @@ werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +all = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.6)", "pyparsing (>=3.0.7)", "setuptools"] apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] appsync = ["graphql-core"] awslambda = ["docker (>=3.0.0)"] batch = ["docker (>=3.0.0)"] -cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.6)", "pyparsing (>=3.0.7)", "setuptools"] cognitoidp = ["joserfc (>=0.9.0)"] -dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.5)"] -dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.5)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.6)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.6)"] events = ["jsonpath-ng"] glue = ["pyparsing (>=3.0.7)"] iotdata = ["jsondiff (>=1.1.2)"] -proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] -resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)"] -s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.5)"] -s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.5)"] -server = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.6)", "pyparsing (>=3.0.7)", "setuptools"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.6)", "pyparsing (>=3.0.7)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.6)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.6)"] +server = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.6)", "pyparsing (>=3.0.7)", "setuptools"] ssm = ["PyYAML (>=5.1)"] stepfunctions = ["antlr4-python3-runtime", "jsonpath-ng"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] @@ -988,19 +999,19 @@ files = [ [[package]] name = "platformdirs" -version = "4.3.1" +version = "4.3.3" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.3.1-py3-none-any.whl", hash = "sha256:facaa5a3c57aa1e053e3da7b49e0cc31fe0113ca42a4659d5c2e98e545624afe"}, - {file = "platformdirs-4.3.1.tar.gz", hash = "sha256:63b79589009fa8159973601dd4563143396b35c5f93a58b36f9049ff046949b1"}, + {file = "platformdirs-4.3.3-py3-none-any.whl", hash = "sha256:50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5"}, + {file = "platformdirs-4.3.3.tar.gz", hash = "sha256:d4e0b7d8ec176b341fb03cb11ca12d0276faa8c485f9cd218f613840463fc2c0"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -1111,123 +1122,123 @@ files = [ [[package]] name = "pydantic" -version = "2.9.0" +version = "2.9.1" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.9.0-py3-none-any.whl", hash = "sha256:f66a7073abd93214a20c5f7b32d56843137a7a2e70d02111f3be287035c45370"}, - {file = "pydantic-2.9.0.tar.gz", hash = "sha256:c7a8a9fdf7d100afa49647eae340e2d23efa382466a8d177efcd1381e9be5598"}, + {file = "pydantic-2.9.1-py3-none-any.whl", hash = "sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612"}, + {file = "pydantic-2.9.1.tar.gz", hash = "sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.23.2" +annotated-types = ">=0.6.0" +pydantic-core = "2.23.3" typing-extensions = [ {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, {version = ">=4.6.1", markers = "python_version < \"3.13\""}, ] -tzdata = {version = "*", markers = "python_version >= \"3.9\""} [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.23.2" +version = "2.23.3" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.23.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7d0324a35ab436c9d768753cbc3c47a865a2cbc0757066cb864747baa61f6ece"}, - {file = "pydantic_core-2.23.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:276ae78153a94b664e700ac362587c73b84399bd1145e135287513442e7dfbc7"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:964c7aa318da542cdcc60d4a648377ffe1a2ef0eb1e996026c7f74507b720a78"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1cf842265a3a820ebc6388b963ead065f5ce8f2068ac4e1c713ef77a67b71f7c"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae90b9e50fe1bd115b24785e962b51130340408156d34d67b5f8f3fa6540938e"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ae65fdfb8a841556b52935dfd4c3f79132dc5253b12c0061b96415208f4d622"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c8aa40f6ca803f95b1c1c5aeaee6237b9e879e4dfb46ad713229a63651a95fb"}, - {file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c53100c8ee5a1e102766abde2158077d8c374bee0639201f11d3032e3555dfbc"}, - {file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6b9dd6aa03c812017411734e496c44fef29b43dba1e3dd1fa7361bbacfc1354"}, - {file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b18cf68255a476b927910c6873d9ed00da692bb293c5b10b282bd48a0afe3ae2"}, - {file = "pydantic_core-2.23.2-cp310-none-win32.whl", hash = "sha256:e460475719721d59cd54a350c1f71c797c763212c836bf48585478c5514d2854"}, - {file = "pydantic_core-2.23.2-cp310-none-win_amd64.whl", hash = "sha256:5f3cf3721eaf8741cffaf092487f1ca80831202ce91672776b02b875580e174a"}, - {file = "pydantic_core-2.23.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:7ce8e26b86a91e305858e018afc7a6e932f17428b1eaa60154bd1f7ee888b5f8"}, - {file = "pydantic_core-2.23.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e9b24cca4037a561422bf5dc52b38d390fb61f7bfff64053ce1b72f6938e6b2"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:753294d42fb072aa1775bfe1a2ba1012427376718fa4c72de52005a3d2a22178"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:257d6a410a0d8aeb50b4283dea39bb79b14303e0fab0f2b9d617701331ed1515"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8319e0bd6a7b45ad76166cc3d5d6a36c97d0c82a196f478c3ee5346566eebfd"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a05c0240f6c711eb381ac392de987ee974fa9336071fb697768dfdb151345ce"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d5b0ff3218858859910295df6953d7bafac3a48d5cd18f4e3ed9999efd2245f"}, - {file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:96ef39add33ff58cd4c112cbac076726b96b98bb8f1e7f7595288dcfb2f10b57"}, - {file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0102e49ac7d2df3379ef8d658d3bc59d3d769b0bdb17da189b75efa861fc07b4"}, - {file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6612c2a844043e4d10a8324c54cdff0042c558eef30bd705770793d70b224aa"}, - {file = "pydantic_core-2.23.2-cp311-none-win32.whl", hash = "sha256:caffda619099cfd4f63d48462f6aadbecee3ad9603b4b88b60cb821c1b258576"}, - {file = "pydantic_core-2.23.2-cp311-none-win_amd64.whl", hash = "sha256:6f80fba4af0cb1d2344869d56430e304a51396b70d46b91a55ed4959993c0589"}, - {file = "pydantic_core-2.23.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4c83c64d05ffbbe12d4e8498ab72bdb05bcc1026340a4a597dc647a13c1605ec"}, - {file = "pydantic_core-2.23.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6294907eaaccf71c076abdd1c7954e272efa39bb043161b4b8aa1cd76a16ce43"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a801c5e1e13272e0909c520708122496647d1279d252c9e6e07dac216accc41"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cc0c316fba3ce72ac3ab7902a888b9dc4979162d320823679da270c2d9ad0cad"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b06c5d4e8701ac2ba99a2ef835e4e1b187d41095a9c619c5b185c9068ed2a49"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82764c0bd697159fe9947ad59b6db6d7329e88505c8f98990eb07e84cc0a5d81"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b1a195efd347ede8bcf723e932300292eb13a9d2a3c1f84eb8f37cbbc905b7f"}, - {file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7efb12e5071ad8d5b547487bdad489fbd4a5a35a0fc36a1941517a6ad7f23e0"}, - {file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5dd0ec5f514ed40e49bf961d49cf1bc2c72e9b50f29a163b2cc9030c6742aa73"}, - {file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:820f6ee5c06bc868335e3b6e42d7ef41f50dfb3ea32fbd523ab679d10d8741c0"}, - {file = "pydantic_core-2.23.2-cp312-none-win32.whl", hash = "sha256:3713dc093d5048bfaedbba7a8dbc53e74c44a140d45ede020dc347dda18daf3f"}, - {file = "pydantic_core-2.23.2-cp312-none-win_amd64.whl", hash = "sha256:e1895e949f8849bc2757c0dbac28422a04be031204df46a56ab34bcf98507342"}, - {file = "pydantic_core-2.23.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:da43cbe593e3c87d07108d0ebd73771dc414488f1f91ed2e204b0370b94b37ac"}, - {file = "pydantic_core-2.23.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:64d094ea1aa97c6ded4748d40886076a931a8bf6f61b6e43e4a1041769c39dd2"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:084414ffe9a85a52940b49631321d636dadf3576c30259607b75516d131fecd0"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:043ef8469f72609c4c3a5e06a07a1f713d53df4d53112c6d49207c0bd3c3bd9b"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3649bd3ae6a8ebea7dc381afb7f3c6db237fc7cebd05c8ac36ca8a4187b03b30"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6db09153d8438425e98cdc9a289c5fade04a5d2128faff8f227c459da21b9703"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5668b3173bb0b2e65020b60d83f5910a7224027232c9f5dc05a71a1deac9f960"}, - {file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c7b81beaf7c7ebde978377dc53679c6cba0e946426fc7ade54251dfe24a7604"}, - {file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:ae579143826c6f05a361d9546446c432a165ecf1c0b720bbfd81152645cb897d"}, - {file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:19f1352fe4b248cae22a89268720fc74e83f008057a652894f08fa931e77dced"}, - {file = "pydantic_core-2.23.2-cp313-none-win32.whl", hash = "sha256:e1a79ad49f346aa1a2921f31e8dbbab4d64484823e813a002679eaa46cba39e1"}, - {file = "pydantic_core-2.23.2-cp313-none-win_amd64.whl", hash = "sha256:582871902e1902b3c8e9b2c347f32a792a07094110c1bca6c2ea89b90150caac"}, - {file = "pydantic_core-2.23.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:743e5811b0c377eb830150d675b0847a74a44d4ad5ab8845923d5b3a756d8100"}, - {file = "pydantic_core-2.23.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6650a7bbe17a2717167e3e23c186849bae5cef35d38949549f1c116031b2b3aa"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56e6a12ec8d7679f41b3750ffa426d22b44ef97be226a9bab00a03365f217b2b"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810ca06cca91de9107718dc83d9ac4d2e86efd6c02cba49a190abcaf33fb0472"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:785e7f517ebb9890813d31cb5d328fa5eda825bb205065cde760b3150e4de1f7"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ef71ec876fcc4d3bbf2ae81961959e8d62f8d74a83d116668409c224012e3af"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50ac34835c6a4a0d456b5db559b82047403c4317b3bc73b3455fefdbdc54b0a"}, - {file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16b25a4a120a2bb7dab51b81e3d9f3cde4f9a4456566c403ed29ac81bf49744f"}, - {file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:41ae8537ad371ec018e3c5da0eb3f3e40ee1011eb9be1da7f965357c4623c501"}, - {file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07049ec9306ec64e955b2e7c40c8d77dd78ea89adb97a2013d0b6e055c5ee4c5"}, - {file = "pydantic_core-2.23.2-cp38-none-win32.whl", hash = "sha256:086c5db95157dc84c63ff9d96ebb8856f47ce113c86b61065a066f8efbe80acf"}, - {file = "pydantic_core-2.23.2-cp38-none-win_amd64.whl", hash = "sha256:67b6655311b00581914aba481729971b88bb8bc7996206590700a3ac85e457b8"}, - {file = "pydantic_core-2.23.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:358331e21a897151e54d58e08d0219acf98ebb14c567267a87e971f3d2a3be59"}, - {file = "pydantic_core-2.23.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d9f15ffe68bcd3898b0ad7233af01b15c57d91cd1667f8d868e0eacbfe3f87"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0123655fedacf035ab10c23450163c2f65a4174f2bb034b188240a6cf06bb123"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6e3ccebdbd6e53474b0bb7ab8b88e83c0cfe91484b25e058e581348ee5a01a5"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc535cb898ef88333cf317777ecdfe0faac1c2a3187ef7eb061b6f7ecf7e6bae"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aab9e522efff3993a9e98ab14263d4e20211e62da088298089a03056980a3e69"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05b366fb8fe3d8683b11ac35fa08947d7b92be78ec64e3277d03bd7f9b7cda79"}, - {file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7568f682c06f10f30ef643a1e8eec4afeecdafde5c4af1b574c6df079e96f96c"}, - {file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cdd02a08205dc90238669f082747612cb3c82bd2c717adc60f9b9ecadb540f80"}, - {file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a2ab4f410f4b886de53b6bddf5dd6f337915a29dd9f22f20f3099659536b2f6"}, - {file = "pydantic_core-2.23.2-cp39-none-win32.whl", hash = "sha256:0448b81c3dfcde439551bb04a9f41d7627f676b12701865c8a2574bcea034437"}, - {file = "pydantic_core-2.23.2-cp39-none-win_amd64.whl", hash = "sha256:4cebb9794f67266d65e7e4cbe5dcf063e29fc7b81c79dc9475bd476d9534150e"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e758d271ed0286d146cf7c04c539a5169a888dd0b57026be621547e756af55bc"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f477d26183e94eaafc60b983ab25af2a809a1b48ce4debb57b343f671b7a90b6"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da3131ef2b940b99106f29dfbc30d9505643f766704e14c5d5e504e6a480c35e"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329a721253c7e4cbd7aad4a377745fbcc0607f9d72a3cc2102dd40519be75ed2"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7706e15cdbf42f8fab1e6425247dfa98f4a6f8c63746c995d6a2017f78e619ae"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e64ffaf8f6e17ca15eb48344d86a7a741454526f3a3fa56bc493ad9d7ec63936"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dd59638025160056687d598b054b64a79183f8065eae0d3f5ca523cde9943940"}, - {file = "pydantic_core-2.23.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:12625e69b1199e94b0ae1c9a95d000484ce9f0182f9965a26572f054b1537e44"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d813fd871b3d5c3005157622ee102e8908ad6011ec915a18bd8fde673c4360e"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1eb37f7d6a8001c0f86dc8ff2ee8d08291a536d76e49e78cda8587bb54d8b329"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce7eaf9a98680b4312b7cebcdd9352531c43db00fca586115845df388f3c465"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f087879f1ffde024dd2788a30d55acd67959dcf6c431e9d3682d1c491a0eb474"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ce883906810b4c3bd90e0ada1f9e808d9ecf1c5f0b60c6b8831d6100bcc7dd6"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a8031074a397a5925d06b590121f8339d34a5a74cfe6970f8a1124eb8b83f4ac"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23af245b8f2f4ee9e2c99cb3f93d0e22fb5c16df3f2f643f5a8da5caff12a653"}, - {file = "pydantic_core-2.23.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c57e493a0faea1e4c38f860d6862ba6832723396c884fbf938ff5e9b224200e2"}, - {file = "pydantic_core-2.23.2.tar.gz", hash = "sha256:95d6bf449a1ac81de562d65d180af5d8c19672793c81877a2eda8fde5d08f2fd"}, + {file = "pydantic_core-2.23.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6"}, + {file = "pydantic_core-2.23.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba"}, + {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee"}, + {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe"}, + {file = "pydantic_core-2.23.3-cp310-none-win32.whl", hash = "sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b"}, + {file = "pydantic_core-2.23.3-cp310-none-win_amd64.whl", hash = "sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83"}, + {file = "pydantic_core-2.23.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27"}, + {file = "pydantic_core-2.23.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8"}, + {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48"}, + {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5"}, + {file = "pydantic_core-2.23.3-cp311-none-win32.whl", hash = "sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1"}, + {file = "pydantic_core-2.23.3-cp311-none-win_amd64.whl", hash = "sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab"}, + {file = "pydantic_core-2.23.3-cp312-none-win32.whl", hash = "sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c"}, + {file = "pydantic_core-2.23.3-cp312-none-win_amd64.whl", hash = "sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b"}, + {file = "pydantic_core-2.23.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f"}, + {file = "pydantic_core-2.23.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855"}, + {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4"}, + {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d"}, + {file = "pydantic_core-2.23.3-cp313-none-win32.whl", hash = "sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8"}, + {file = "pydantic_core-2.23.3-cp313-none-win_amd64.whl", hash = "sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1"}, + {file = "pydantic_core-2.23.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d063c6b9fed7d992bcbebfc9133f4c24b7a7f215d6b102f3e082b1117cddb72c"}, + {file = "pydantic_core-2.23.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6cb968da9a0746a0cf521b2b5ef25fc5a0bee9b9a1a8214e0a1cfaea5be7e8a4"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbefe079a520c5984e30e1f1f29325054b59534729c25b874a16a5048028d16"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbaaf2ef20d282659093913da9d402108203f7cb5955020bd8d1ae5a2325d1c4"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb539d7e5dc4aac345846f290cf504d2fd3c1be26ac4e8b5e4c2b688069ff4cf"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e6f33503c5495059148cc486867e1d24ca35df5fc064686e631e314d959ad5b"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04b07490bc2f6f2717b10c3969e1b830f5720b632f8ae2f3b8b1542394c47a8e"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03795b9e8a5d7fda05f3873efc3f59105e2dcff14231680296b87b80bb327295"}, + {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c483dab0f14b8d3f0df0c6c18d70b21b086f74c87ab03c59250dbf6d3c89baba"}, + {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b2682038e255e94baf2c473dca914a7460069171ff5cdd4080be18ab8a7fd6e"}, + {file = "pydantic_core-2.23.3-cp38-none-win32.whl", hash = "sha256:f4a57db8966b3a1d1a350012839c6a0099f0898c56512dfade8a1fe5fb278710"}, + {file = "pydantic_core-2.23.3-cp38-none-win_amd64.whl", hash = "sha256:13dd45ba2561603681a2676ca56006d6dee94493f03d5cadc055d2055615c3ea"}, + {file = "pydantic_core-2.23.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82da2f4703894134a9f000e24965df73cc103e31e8c31906cc1ee89fde72cbd8"}, + {file = "pydantic_core-2.23.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dd9be0a42de08f4b58a3cc73a123f124f65c24698b95a54c1543065baca8cf0e"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89b731f25c80830c76fdb13705c68fef6a2b6dc494402987c7ea9584fe189f5d"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6de1ec30c4bb94f3a69c9f5f2182baeda5b809f806676675e9ef6b8dc936f28"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb68b41c3fa64587412b104294b9cbb027509dc2f6958446c502638d481525ef"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c3980f2843de5184656aab58698011b42763ccba11c4a8c35936c8dd6c7068c"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94f85614f2cba13f62c3c6481716e4adeae48e1eaa7e8bac379b9d177d93947a"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:510b7fb0a86dc8f10a8bb43bd2f97beb63cffad1203071dc434dac26453955cd"}, + {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1eba2f7ce3e30ee2170410e2171867ea73dbd692433b81a93758ab2de6c64835"}, + {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b259fd8409ab84b4041b7b3f24dcc41e4696f180b775961ca8142b5b21d0e70"}, + {file = "pydantic_core-2.23.3-cp39-none-win32.whl", hash = "sha256:40d9bd259538dba2f40963286009bf7caf18b5112b19d2b55b09c14dde6db6a7"}, + {file = "pydantic_core-2.23.3-cp39-none-win_amd64.whl", hash = "sha256:5a8cd3074a98ee70173a8633ad3c10e00dcb991ecec57263aacb4095c5efb958"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e89513f014c6be0d17b00a9a7c81b1c426f4eb9224b15433f3d98c1a071f8433"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4f62c1c953d7ee375df5eb2e44ad50ce2f5aff931723b398b8bc6f0ac159791a"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2718443bc671c7ac331de4eef9b673063b10af32a0bb385019ad61dcf2cc8f6c"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d90e08b2727c5d01af1b5ef4121d2f0c99fbee692c762f4d9d0409c9da6541"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b676583fc459c64146debea14ba3af54e540b61762dfc0613dc4e98c3f66eeb"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:50e4661f3337977740fdbfbae084ae5693e505ca2b3130a6d4eb0f2281dc43b8"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:68f4cf373f0de6abfe599a38307f4417c1c867ca381c03df27c873a9069cda25"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:59d52cf01854cb26c46958552a21acb10dd78a52aa34c86f284e66b209db8cab"}, + {file = "pydantic_core-2.23.3.tar.gz", hash = "sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690"}, ] [package.dependencies] @@ -1235,13 +1246,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.4.0" +version = "2.5.2" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"}, - {file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"}, + {file = "pydantic_settings-2.5.2-py3-none-any.whl", hash = "sha256:2c912e55fd5794a59bf8c832b9de832dcfdf4778d79ff79b708744eed499a907"}, + {file = "pydantic_settings-2.5.2.tar.gz", hash = "sha256:f90b139682bee4d2065273d5185d71d37ea46cfe57e1b5ae184fc6a0b2484ca0"}, ] [package.dependencies] @@ -1307,13 +1318,13 @@ testing = ["covdefaults (>=2.3)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytes [[package]] name = "pytest" -version = "8.3.2" +version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, - {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] @@ -1519,13 +1530,13 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy [[package]] name = "rich" -version = "13.8.0" +version = "13.8.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.8.0-py3-none-any.whl", hash = "sha256:2e85306a063b9492dffc86278197a60cbece75bcb766022f3436f567cae11bdc"}, - {file = "rich-13.8.0.tar.gz", hash = "sha256:a5ac1f1cd448ade0d59cc3356f7db7a7ccda2c8cbae9c7a90c28ff463d3e91f4"}, + {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"}, + {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"}, ] [package.dependencies] @@ -1674,13 +1685,13 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "starlette" -version = "0.38.4" +version = "0.38.5" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.38.4-py3-none-any.whl", hash = "sha256:526f53a77f0e43b85f583438aee1a940fd84f8fd610353e8b0c1a77ad8a87e76"}, - {file = "starlette-0.38.4.tar.gz", hash = "sha256:53a7439060304a208fea17ed407e998f46da5e5d9b1addfea3040094512a6379"}, + {file = "starlette-0.38.5-py3-none-any.whl", hash = "sha256:632f420a9d13e3ee2a6f18f437b0a9f1faecb0bc42e1942aa2ea0e379a4c4206"}, + {file = "starlette-0.38.5.tar.gz", hash = "sha256:04a92830a9b6eb1442c766199d62260c3d4dc9c4f9188360626b1e0273cb7077"}, ] [package.dependencies] @@ -1743,26 +1754,15 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -2081,4 +2081,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "8d1850b557390ef1a13451ff6b99189c5f60d9d3cb588d60ce5535e9cc19c0d4" +content-hash = "1b6a30e72e81c23468cee1c6dfe89e08b755f042eba78c99716a8ebc1beab80c" diff --git a/api-v2/pyproject.toml b/api-v2/pyproject.toml index a771a5d9..0b64c698 100644 --- a/api-v2/pyproject.toml +++ b/api-v2/pyproject.toml @@ -35,6 +35,10 @@ boto3 = "^1.35.13" # from environment variables or secret files. pydantic-settings = "^2.4.0" +# email-validator validates e-mail addresses. It is used with Pydantic to validates +# an e-mail address in Pydantic schemas. +email-validator = "^2.2.0" + # pyjwt is used to decode information contained in a JSON Web Token pyjwt = {extras = ["crypto"], version = "^2.9.0"} diff --git a/api-v2/tests/cognito_setup.py b/api-v2/tests/cognito_setup.py new file mode 100644 index 00000000..88432ea2 --- /dev/null +++ b/api-v2/tests/cognito_setup.py @@ -0,0 +1,211 @@ +import re +import uuid + + +class AWSTemporaryUserpool(): + """Provide a temporary user pool for development and testing purposes. + + The provided userpool is empty. If mocking is enabled then changes to + the userpool will be destroyed when the application exists. If mocking + is not disabled then destroy() must be called to remove the temporary + user data from AWS Cognito. It is recommended to use the context manager + to avoid accidentally persisting development data on AWS. + """ + + def __init__(self, cognito_client): + self.cognito_client = cognito_client + self.tmp_userpool_id = None + self.tmp_client_id = None + self.tmp_client_secret = None + + def create(self): + unique_poolname = f"TestUserPool{str(uuid.uuid4())}" + mock_pool_resp = self.cognito_client.create_user_pool( + PoolName=unique_poolname, + UsernameAttributes=['email']) + mock_pool_id = mock_pool_resp['UserPool']['Id'] + + client_response = self.cognito_client.create_user_pool_client( + UserPoolId=mock_pool_id, + ClientName="MockUserPoolClient", + GenerateSecret=True, + ExplicitAuthFlows=[ + 'ALLOW_USER_PASSWORD_AUTH', # Enable USER_PASSWORD_AUTH flow + 'ALLOW_REFRESH_TOKEN_AUTH' # You can add other auth flows as needed + ]) + + self.tmp_userpool_id = mock_pool_id + self.tmp_client_id = client_response['UserPoolClient']['ClientId'] + self.tmp_client_secret = client_response['UserPoolClient']['ClientSecret'] + + self.cognito_client.create_group(GroupName='Admins', + UserPoolId=mock_pool_id) + self.cognito_client.create_group(GroupName='Hosts', + UserPoolId=mock_pool_id) + self.cognito_client.create_group(GroupName='Guests', + UserPoolId=mock_pool_id) + self.cognito_client.create_group(GroupName='Coordinators', + UserPoolId=mock_pool_id) + + def destroy(self): + self.cognito_client.delete_user_pool_client( + UserPoolId=self.tmp_userpool_id, + ClientId=self.tmp_client_id) + self.cognito_client.delete_user_pool(UserPoolId=self.tmp_userpool_id) + self.tmp_userpool_id = None + self.tmp_client_id = None + self.tmp_client_secret = None + + def __enter__(self): + self.create() + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.destroy() + return self + + +# class AWSMockService(): +# """Start and stop AWS Cognito mocking using moto. + +# The mocking service will stop when the context exits. The +# mocked AWS Cognito requests will not persist outside of the context. +# """ + +# TEST_USERS = [{ +# "email": "test@test.com", +# "password": "Test!123" +# }, { +# "email": "testhost@test.com", +# "password": "Test!123" +# }, { +# "email": "testcoordinator@test.com", +# "password": "Test!123" +# }] + +# def __init__(self, flask_app): +# from moto import mock_cognitoidp +# self.userpool = AWSTemporaryUserpool(flask_app) +# self.mock_service = mock_cognitoidp() +# self.app = flask_app +# self.app.after_request(self.auto_signup_user_after_request) +# self.app.before_request(self.create_test_users) +# self.test_users_created = False + +# def create_test_users(self): +# """Create a set of test users before the first request is made.""" +# if self.test_users_created == True: +# return + +# for user in AWSMockService.TEST_USERS: +# email = user["email"] + +# try: +# remove_user({"email": email}) +# except AuthError: +# # This error is expected if the local database +# # Does not have the test user yet. We can ignore it. +# pass + +# try: +# signUpAdmin({ +# "email": email, +# "password": user["password"], +# "firstName": "testuser_firstname", +# "lastName": "testuser_lastname" +# }) +# self._auto_signup_user(email) +# self.app.logger.info(f"Created test user: {email}") +# except AuthError as e: +# self.app.logger.warning( +# f"Failed to create test user: {email}: {e.error}") + +# self.test_users_created = True + +# def add_aws_userpool_user(self, email, password, attributes=None): +# """Adds a new user to the temporary user pool. + +# Adds the user with the given username, password, and attributes. +# Attributes should be a list of dictionaries, each containing a 'Name' and 'Value' key. +# """ +# if attributes is None: +# attributes = [] + +# try: +# response = self.app.boto_client.admin_create_user( +# UserPoolId=self.app.config["COGNITO_USER_POOL_ID"], +# Username=email, +# TemporaryPassword=password, +# UserAttributes=attributes, +# MessageAction='SUPPRESS') +# self._auto_signup_user(email) +# self.app.logger.info( +# f"Added user {email} to the temporary user pool") +# return response +# except Exception as e: +# self.app.logger.error(f"Failed to add user {email}: {str(e)}") +# raise + +# def _auto_signup_user(self, email) -> bool: +# """Auto-confirm a new user. + +# Return True if successful and false otherwise. +# """ +# confirm_response = self.app.boto_client.admin_confirm_sign_up( +# UserPoolId=self.app.config["COGNITO_USER_POOL_ID"], Username=email) +# if confirm_response['ResponseMetadata']['HTTPStatusCode'] == 200: +# self.app.logger.info(f"Auto-confirmed new user: {email}") +# return True +# else: +# self.app.logger.warning( +# f"Failed to auto-confirm new user: {email}") +# return False + +# def auto_signup_user_after_request(self, response): +# """Automatically verify new users by listening for signup. + +# Confirms the user if the signup was successful. +# """ +# # The alternative approaches are to use a lambda pre-signup +# # trigger to automatically verify new users, or to include +# # conditional login within our endpoint. The lambda approach +# # requires more overhead, and conditional logic within the endpoint +# # risks adding a bug to the production code. +# if request.endpoint and ('signup' in request.endpoint.lower() +# ) and 200 <= response.status_code < 300: +# email = request.json['email'] +# if self._auto_signup_user(email): +# new_response = response.get_json() +# new_response['UserConfirmed'] = True +# response.data = json.dumps(new_response) +# return response + +# def start(self): +# self.mock_service.start() +# self.app._boto_client = None +# self.app.config["COGNITO_REGION"] = "us-east-1" +# self.app.config["COGNITO_ACCESS_ID"] = self.mock_service.FAKE_KEYS[ +# 'AWS_ACCESS_KEY_ID'] +# self.app.config["COGNITO_ACCESS_KEY"] = self.mock_service.FAKE_KEYS[ +# 'AWS_SECRET_ACCESS_KEY'] +# self.userpool.create() + +# self.app.logger.info("Started mock AWS Cognito service") + +# def stop(self): +# self.userpool.destroy() +# self.mock_service.stop() +# self.app.config["COGNITO_REGION"] = None +# self.app.config["COGNITO_ACCESS_ID"] = None +# self.app.config["COGNITO_ACCESS_KEY"] = None +# self.app._boto_client = None + +# self.app.logger.info("Stopped mock AWS Cognito service") + +# def __enter__(self): +# self.start() +# return self + +# def __exit__(self, exc_type, exc_value, traceback): +# self.stop() +# return self diff --git a/api-v2/tests/conftest.py b/api-v2/tests/conftest.py index db269276..268f6133 100644 --- a/api-v2/tests/conftest.py +++ b/api-v2/tests/conftest.py @@ -1,19 +1,26 @@ import os import pytest -from pytest import MonkeyPatch + +from fastapi.testclient import TestClient + +import boto3 +from moto import mock_aws + import sqlalchemy as sa -from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm import sessionmaker, Session from sqlalchemy.pool import StaticPool -from fastapi.testclient import TestClient from app.main import app as main_app from app.core.db import Base -from app.modules.deps import get_db +from app.core.config import get_settings +from app.modules.deps import db_session, get_cognito_client + +import tests.cognito_setup as cognito_setup @pytest.fixture -def client(): +def session_factory() -> Session: SQLALCHEMY_DATABASE_URL = "sqlite+pysqlite:///:memory:" engine = sa.create_engine( @@ -25,53 +32,52 @@ def client(): autoflush=False, bind=engine) + import app.seed Base.metadata.create_all(bind=engine) - def override_get_db(): + return TestingSessionLocal + + +@pytest.fixture(scope="module") +def api_settings(): + """Configure test settings.""" + os.environ["COGNITO_ACCESS_ID"] = "testing" + os.environ["COGNITO_ACCESS_KEY"] = "testing" + os.environ["COGNITO_CLIENT_ID"] = "testing" + os.environ["COGNITO_CLIENT_SECRET"] = "testing" + os.environ["COGNITO_REGION"] = "us-east-1" + os.environ["COGNITO_REDIRECT_URI"] = "testing" + os.environ["COGNITO_USER_POOL_ID"] = "testing" + os.environ["ROOT_URL"] = "testing" + os.environ["DATABASE_URL"] = "testing" + return get_settings() + + +@pytest.fixture(scope="module") +def cognito_client(api_settings): + """Return a mocked Cognito IDP client.""" + with mock_aws(): + client = boto3.client("cognito-idp", + region_name=api_settings.COGNITO_REGION) + with cognito_setup.AWSTemporaryUserpool(client) as temp_pool: + api_settings.COGNITO_USER_POOL_ID = temp_pool.tmp_userpool_id + api_settings.COGNITO_CLIENT_ID = temp_pool.tmp_client_id + api_settings.COGNITO_CLIENT_SECRET = temp_pool.tmp_client_secret + + yield client + + +@pytest.fixture +def client(session_factory) -> TestClient: + + def override_db_session(): try: - session = TestingSessionLocal() + session = session_factory() yield session finally: session.close() - main_app.dependency_overrides[get_db] = override_get_db + main_app.dependency_overrides[db_session] = override_db_session + main_app.dependency_overrides[get_cognito_client] = lambda: None return TestClient(main_app) - - -# @pytest.fixture -# def empty_environment(monkeypatch: MonkeyPatch) -> MonkeyPatch: -# """Create an isolated environment for testing purposes. - -# The environment variables are cleared to ensure the -# configuration object is not dependent on the machine configuration. -# """ -# for env_var in os.environ.keys(): -# monkeypatch.delenv(env_var) -# return monkeypatch - -# @pytest.fixture -# def fake_prod_env(empty_environment: MonkeyPatch) -> MonkeyPatch: -# """Define a fake production environment. - -# Define a fake production environment by setting each of the required -# production configuration variables with fake values. -# """ -# empty_environment.setenv("SECRET_KEY", -# "A completely made up fake secret !@#$12234") -# empty_environment.setenv("DATABASE_URL", "sqlite:///:memory:") -# empty_environment.setenv("COGNITO_CLIENT_ID", "Totally fake client id") -# empty_environment.setenv("COGNITO_CLIENT_SECRET", -# "Yet another fake secret12") -# empty_environment.setenv("COGNITO_REGION", -# "Not even the region actually exists") -# empty_environment.setenv( -# "COGNITO_REDIRECT_URI", -# "Redirect your way back to writing more test cases") -# empty_environment.setenv("COGNITO_USER_POOL_ID", -# "Water's warm. IDs are fake") -# empty_environment.setenv("COGNITO_ACCESS_ID", -# "If you need fake access, use this ID") -# empty_environment.setenv("COGNITO_ACCESS_KEY", -# "WARNING: This is a real-ly fake key 12345a6sdf") -# return empty_environment diff --git a/api-v2/tests/integration/setup_utils.py b/api-v2/tests/integration/setup_utils.py deleted file mode 100644 index 279d4d60..00000000 --- a/api-v2/tests/integration/setup_utils.py +++ /dev/null @@ -1,72 +0,0 @@ -from fastapi import TestClient - - -def signup_user(client: TestClient, - email: str, - password: str, - firstName: str = None, - middleName: str = None, - lastName: str = None) -> None: - if not firstName: firstName = "firstName" - if not lastName: lastName = "lastName" - if not middleName: middleName = "" - - signup_response = client.post('/api/auth/signup/host', - json={ - 'email': email, - 'password': password, - 'firstName': firstName, - 'middleName': middleName, - 'lastName': lastName - }) - # Currently the signup returns different response structures for auth - # errors and "Bad Request" errors. Ideally the structure of the response - # would always be the same where there is an error. - assert signup_response.status_code != 400, f"User factory failed to signup user: {signup_response.status}, {signup_response.text}" - assert signup_response.status_code == 200, f"User factory failed to signup user: {signup_response.json['message']}" - - -def confirm_user(boto_client, email: str) -> None: - confirm_response = boto_client.admin_confirm_sign_up( - UserPoolId=app.config["COGNITO_USER_POOL_ID"], Username=email) - assert confirm_response['ResponseMetadata'][ - 'HTTPStatusCode'] == 200, f"User factory failed to confirm user" - - -def create_user(client: TestClient, - email: str, - password: str, - firstName: str = None, - middleName: str = None, - lastName: str = None) -> None: - ''' - Signup and confirm a new user. Fail the test if the - signup or confirm operation fails. - ''' - signup_user(client, email, password, firstName, middleName, lastName) - confirm_user(client, email) - - -def signin_user(client: TestClient, email: str, password: str) -> str: - ''' - Signin a user and return the JWT. Fail the test if the - signin operation fails. - ''' - response = client.post('/api/auth/signin', - json={ - 'email': email, - 'password': password - }) - assert response.status_code == 200, "Signin failed" - assert "token" in response.json, "Signin succeeded but no token provided" - return response.json['token'] - - -def create_and_signin_user(client: TestClient, email: str, - password: str) -> (str, str): - ''' - Signup, confirm, and signin a new user. Return the JWT. - Fail the test if the signup, confirm, or signin operation fails. - ''' - create_user(client, email, password) - return signin_user(client, email, password) diff --git a/api-v2/tests/integration/test_authentication.py b/api-v2/tests/integration/test_authentication.py index b59a9dea..5f5d5ae8 100644 --- a/api-v2/tests/integration/test_authentication.py +++ b/api-v2/tests/integration/test_authentication.py @@ -1,329 +1,376 @@ import string -import re +import secrets import pytest -from werkzeug.http import parse_cookie -from openapi_server.models.database import DataAccessLayer, User -from tests.setup_utils import create_user, create_and_signin_user +from fastapi.testclient import TestClient + +from app.modules.deps import get_cognito_client +from app.modules.access.models import User + +PATH = "/api/auth" +secretsGenerator = secrets.SystemRandom() + + +@pytest.fixture +def client(client, cognito_client) -> TestClient: + client.app.dependency_overrides[ + get_cognito_client] = lambda: cognito_client + + return client + + +def signup_user(client: TestClient, + email: str, + password: str, + first_name: str = None, + middle_name: str = None, + last_name: str = None) -> None: + if not first_name: + first_name = "firstName" + if not last_name: + last_name = "lastName" + if not middle_name: + middle_name = "" + + signup_response = client.post(PATH + '/signup', + json={ + 'email': email, + 'password': password, + 'firstName': first_name, + 'middleName': middle_name, + 'lastName': last_name, + 'role': 'host', + }) + assert signup_response.status_code != 400, signup_response.text + assert signup_response.status_code == 200, signup_response.text + + +def confirm_user(cognito_client, api_settings, email: str) -> None: + confirm_response = cognito_client.admin_confirm_sign_up( + UserPoolId=api_settings.COGNITO_USER_POOL_ID, Username=email) + assert confirm_response['ResponseMetadata'][ + 'HTTPStatusCode'] == 200, confirm_response + + +def create_user(client: TestClient, + api_settings, + cognito_client, + email: str, + password: str, + firstName: str = None, + middleName: str = None, + lastName: str = None) -> None: + """Sign-up and confirm a new user. + + Fail the test if the signup or confirm operation fails. + """ + signup_user(client, email, password, firstName, middleName, lastName) + confirm_user(cognito_client, api_settings, email) + + +def signin_user(client: TestClient, email: str, password: str) -> str: + """ + Signin a user and return the JWT. Fail the test if the + signin operation fails. + """ + response = client.post(PATH + '/signin', + json={ + 'email': email, + 'password': password + }) + assert response.status_code == 200, "Signin failed" + assert "token" in response.json(), "Signin succeeded but no token provided" + return response.json()['token'] + + +def create_and_signin_user(client: TestClient, api_settings, cognito_client, + email: str, password: str) -> (str, str): + """ + Signup, confirm, and signin a new user. Return the JWT. + Fail the test if the signup, confirm, or signin operation fails. + """ + create_user(client, api_settings, cognito_client, email, password) + return signin_user(client, email, password) def strip_punctuation(text): return text.translate(str.maketrans("", "", string.punctuation)) + def test_signin_with_fake_credentials(client): - response = client.post( - '/api/auth/signin', - json = { - 'email': 'inbox928@placeholder.org', - 'password': '_pp#FXo;h$i~' - } - ) - assert response.status_code == 401 + response = client.post(PATH + '/signin', + json={ + 'email': 'mdndndkde@email.com', + 'password': '_pp#FXo;h$i~' + }) + + body = response.json() + assert response.status_code == 400, body + assert body["detail"]["code"] == "UserNotFoundException", body + def test_signin_without_email_format(client): - ''' - Attempts to login using an email field that - does not follow the email format will return a - 400 error instead of 401. - ''' - response = client.post( - '/api/auth/signin', - json = { - 'email': 'notta_email', - 'password': '_pp#FXo;h$i~' - } - ) - - assert response.status_code == 400 - assert "is not a email" in strip_punctuation(response.json["detail"].lower()) - -@pytest.mark.parametrize('endpoint', ['/api/auth/signup/host','/api/auth/signup/coordinator']) -def test_signup_with_missing_fields(client, endpoint): - ''' - Attempts to login without all required fields returns - a bad request error. - ''' - BAD_SIGNUP_REQUESTS = [ - { - 'email': 'inbox928@placeholder.org', - 'password': 'Fakepass%^&7!asdf' - }, - { - 'email': 'inbox928@placeholder.org', - 'password': 'Fakepass%^&7!asdf', - 'lastName': 'Josh' - }, - { - 'email': 'inbox928@placeholder.org', - 'firstName': 'Josh', - 'lastName': 'Douglas' - }, - { - 'password': 'Fakepass%^&7!asdf', - 'firstName': 'Josh', - 'lastName': 'Douglas' - }, - { - } - ] + """Test login using a malformed email.""" + response = client.post(PATH + '/signin', + json={ + 'email': 'not_an_email', + 'password': '_pp#FXo;h$i~' + }) + + assert response.status_code == 422 + assert "not a valid email address" in response.text, response + + +def test_signup_with_missing_fields(client): + """Test attempts to sign-up without all required fields.""" + BAD_SIGNUP_REQUESTS = [{ + 'email': 'inbox928@placeholder.org', + 'password': 'Fakepass%^&7!asdf' + }, { + 'email': 'inbox928@placeholder.org', + 'password': 'Fakepass%^&7!asdf', + 'lastName': 'test' + }, { + 'email': 'inbox928@placeholder.org', + 'firstName': 'test', + 'lastName': 'test' + }, { + 'password': 'Fakepass%^&7!asdf', + 'firstName': 'test', + 'lastName': 'test' + }, { + 'password': 'Fakepass%^&7!asdf', + 'role': 'guest' + }, {}] for req in BAD_SIGNUP_REQUESTS: - response = client.post(endpoint, json = req) - assert response.status_code == 400, req - assert 'detail' in response.json and 'required property' in response.json['detail'], req + response = client.post(PATH + '/signup', json=req) + assert response.status_code == 422, req + assert 'Field required' in response.text, req + def test_refresh_without_cookie(client): - ''' - Attempts to use the refresh endpoint without a session - cookie attached should return a 'cookie missing' - error instead of an authentication failure. - ''' - response = client.get( - 'api/auth/refresh' - ) - assert response.status_code == 400 - assert "missing cookie" in response.json['detail'].lower() + """Test attempts to use the refresh endpoint without a session cookie.""" + response = client.get(PATH + '/refresh') + assert response.status_code == 401 + assert "Missing refresh token" in response.json()['detail'] + def test_session_without_cookie(client): - ''' - Attempts to use the refresh endpoint without a session - cookie attached should return a 'cookie missing' - error instead of an authentication failure. - ''' + """Test attempt to use the session endpoint without a session cookie. + """ response = client.get( - 'api/auth/session', - headers={"Authorization": "Bearer fake_jwt_token_here"} - ) - assert response.status_code == 400 - assert re.search(r"missing cookie.*session", response.json['detail'], flags=re.IGNORECASE) + PATH + '/session', + headers={"Authorization": "Bearer fake_jwt_token_here"}) + assert response.status_code == 401 + assert "Missing session cookies" in response.json()['detail'] + def test_incorrect_JWT_fail_auth(client): - ''' - Attempts to use an incorrect JWT with the user endpoint returns - and authentication error. - ''' + """Test attempts to use an incorrect JWT with the user endpoint.""" response = client.get( - 'api/auth/user', - headers={"Authorization": "Bearer fake_jwt_token_here"} - ) + '/api/user', headers={"Authorization": "Bearer fake_jwt_token_here"}) assert response.status_code == 401 - assert re.search(r"invalid.*token", response.json['message'], flags=re.IGNORECASE) + assert "Missing id token" in response.json()['detail'] -def _signup_unconfirmed(signup_endpoint, client, is_mocking): - email = 'inbox928@placeholder.org' + +def _signup_unconfirmed(signup_endpoint, role, client, expect_user_confirmed): + email = f'{secretsGenerator.randint(1_000, 2_000)}@email.com' password = 'Fakepass%^&7!asdf' - signup_response = client.post( - signup_endpoint, - json = { - 'email': email, - 'password': password, - "firstName": "valid name", - "lastName": "valid name" - } - ) - - assert signup_response.status_code == 200, "Signup attempt failed" - expect_user_confirmed = is_mocking - assert signup_response.json["UserConfirmed"] == expect_user_confirmed, ( - "When using the real AWS service newly signed up users should not be confirmed. " - "Mocked users, however, should be auto-confirmed for convenience.") - - signin_response = client.post( - '/api/auth/signin', - json = { - 'email': email, - 'password': password - } - ) - - + signup_response = client.post(signup_endpoint, + json={ + 'email': email, + 'password': password, + "firstName": "valid name", + "role": role, + }) + + assert signup_response.status_code == 200, signup_response.text + assert signup_response.json( + )["UserConfirmed"] == expect_user_confirmed, signup_response.text + + signin_response = client.post(PATH + '/signin', + json={ + 'email': email, + 'password': password + }) + if expect_user_confirmed: - assert signin_response.status_code == 200, "Mocked users should be able to signin without confirmation." - assert "token" in signin_response.json, "Signin succeeded but no token provided" + assert signin_response.status_code == 200, signin_response.text + assert "token" in signin_response.json(), signin_response.text else: - assert signin_response.status_code == 401, ( - "When using the real AWS service signin should fail since user is unconfirmed. ") - assert signin_response.json["code"] == "UserNotConfirmedException" + assert signin_response.status_code == 400, signin_response.text + assert signin_response.json()["detail"][ + "code"] == "UserNotConfirmedException", signin_response.text -def test_signup_unconfirmed_host(client, is_mocking): - ''' - Use the host signup endpoint to + +def test_signup_unconfirmed_host(client): + """ + Use the host signup endpoint to test that unconfirmed accounts cannot be used to login to the API. Mocked users are automatically confirmed. - ''' - _signup_unconfirmed('/api/auth/signup/host', client, is_mocking) + """ + _signup_unconfirmed(PATH + "/signup", "host", client, False) + -def test_signup_unconfirmed_coordinator(client, is_mocking): - ''' - Use the coordinator signup endpoint to +def test_signup_unconfirmed_coordinator(client): + """ + Use the coordinator signup endpoint to test that unconfirmed accounts cannot be used to login to the API. Mocked users are automatically confirmed. - ''' - _signup_unconfirmed('/api/auth/signup/coordinator', client, is_mocking) - -def test_signup_confirmed(client): - ''' - Test that confirmed accounts can be used to login to the API. - ''' - EMAIL = 'inbox928@placeholder.org' - PASSWORD = 'Fakepass%^&7!asdf' - create_user(client, EMAIL, PASSWORD) - - signin_response = client.post( - '/api/auth/signin', - json = { - 'email': EMAIL, - 'password': PASSWORD - } - ) + """ + _signup_unconfirmed(PATH + "/signup", "coordinator", client, False) + + +def test_signup_confirmed(client, api_settings, cognito_client): + """Test that confirmed accounts can be used to login to the API.""" + email = f'{secretsGenerator.randint(1_000, 2_000)}@email.com' + password = 'Fakepass%^&7!asdf' + create_user(client, api_settings, cognito_client, email, password) + + signin_response = client.post(PATH + '/signin', + json={ + 'email': email, + 'password': password + }) assert signin_response.status_code == 200, "Signup attempt failed" - assert "token" in signin_response.json, "Signin succeeded but no token provided" - assert len(signin_response.json["token"]) > 0 + assert "token" in signin_response.json( + ), "Signin succeeded but no token provided" + assert len(signin_response.json()["token"]) > 0 + def test_weak_passwords_rejected(client): - ''' - Test that attempting to signup a new user with a password + """Test that attempting to sign-up a new user with a password that does not meet AWS Cognito password complexity requirements returns a valid error. - ''' - email = 'inbox928@placeholder.org' + """ + email = f'{secretsGenerator.randint(1_000, 2_000)}@email.com' password = 'weakpa55' - signup_response = client.post( - '/api/auth/signup/host', - json = { - 'email': email, - 'password': password, - 'firstName': 'unqiue', - 'lastName': 'name' - } - ) + signup_response = client.post(PATH + '/signup', + json={ + 'email': email, + 'password': password, + 'firstName': 'unqiue', + 'lastName': 'name', + 'role': 'host' + }) assert signup_response.status_code == 400, "The weak password worked for signup!" - assert "password did not conform with policy" in signup_response.json["message"].lower() - -# TODO: This test is currently disabled because the token returned from moto is different from the token returned from the real AWS service. -@pytest.mark.skip(reason="There is a bug involving the contents of the token being returned from moto being different from the token returned from the real AWS service.") -def test_basic_auth_flow(client): - ''' - Create a new user, confirm it, and login using the - /signin endpoint, and use the returned JWT to access + assert "Failed to create user" in signup_response.text, signup_response.text + + +def test_basic_auth_flow(client, api_settings, cognito_client): + """Create a new user, confirm it, and login using the + /signin endpoint, and use the returned JWT to access a protected endpoint. - ''' - EMAIL = 'inbox928@placeholder.org' + """ + EMAIL = f'{secretsGenerator.randint(1_000, 2_000)}@email.com' PASSWORD = 'Fake4!@#$2589FFF' - FIRST_NAME = "PNAU" - LAST_NAME = "Hyperbolic" - create_user(client, EMAIL, PASSWORD, firstName=FIRST_NAME, lastName=LAST_NAME) - - response = client.post( - '/api/auth/signin', - json = { - 'email': EMAIL, - 'password': PASSWORD - } - ) + FIRST_NAME = "test" + LAST_NAME = "test" + create_user(client, + api_settings, + cognito_client, + EMAIL, + PASSWORD, + firstName=FIRST_NAME, + lastName=LAST_NAME) + + response = client.post(PATH + '/signin', + json={ + 'email': EMAIL, + 'password': PASSWORD + }) assert response.status_code == 200, "Signin failed" - assert 'token' in response.json, 'Signin succeeded but token field missing from response' - jwt = response.json['token'] + assert 'token' in response.json(), 'Signin succeeded but token field missing from response' + jwt = response.json()['token'] assert jwt is not None, 'Signin succeeded but returned empty jwt' assert len(jwt) > 0 - response = client.get( - 'api/auth/user', - headers={"Authorization": f"Bearer {jwt}"} - ) + response = client.get('/api/user', + headers={"Authorization": f"Bearer {jwt}"}) assert response.status_code == 200, '/user authentication failed' - assert 'user' in response.json - assert 'email' in response.json['user'] - assert response.json['user']['email'] == EMAIL - assert response.json['user']['firstName'] == FIRST_NAME - assert response.json['user']['middleName'] == '' - assert response.json['user']['lastName'] == LAST_NAME - -def test_signin_returns_session_cookie(client): - ''' - Test that the /signin endpoint returns a session cookie. + assert response.json()['email'] == EMAIL + assert response.json()['firstName'] == FIRST_NAME + assert response.json()['middleName'] == '' + assert response.json()['lastName'] == LAST_NAME + + +def test_signin_returns_refresh_token(client, api_settings, cognito_client): + """Test that the /signin endpoint returns a session cookie. + The session cookie stores the refresh token. - ''' - EMAIL = 'inbox928@placeholder.org' + """ + EMAIL = f'{secretsGenerator.randint(1_000, 2_000)}@email.com' PASSWORD = 'Fake4!@#$2589FFF' - create_user(client, EMAIL, PASSWORD) - response = client.post( - '/api/auth/signin', - json = { - 'email': EMAIL, - 'password': PASSWORD - } - ) + create_user(client, api_settings, cognito_client, EMAIL, PASSWORD) + response = client.post(PATH + '/signin', + json={ + 'email': EMAIL, + 'password': PASSWORD + }) assert response.status_code == 200, "Signin failed" - all_cookies = map(parse_cookie, response.headers.getlist("Set-Cookie")) - session_cookie_filter = filter(lambda cookie: "session" in cookie, all_cookies) - session_cookie = next(session_cookie_filter) - assert len(session_cookie["session"]) > 0, "Session cookie is empty" - with pytest.raises(StopIteration): - # Only one session cookie should be available - next(session_cookie_filter) - -def test_refresh_endpoint(client): - ''' - Test refreshing a JWT using the /refresh endpoint. - ''' - EMAIL = 'inbox928@placeholder.org' + all_cookies = response.cookies + assert all_cookies.get("refresh_token"), "Session cookie is empty" + + +def test_refresh_endpoint(client, api_settings, cognito_client): + """Test refreshing a JWT using the /refresh endpoint.""" + EMAIL = f'{secretsGenerator.randint(1_000, 2_000)}@email.com' PASSWORD = 'Fake4!@#$2589FFF' - create_and_signin_user(client, EMAIL, PASSWORD) + create_and_signin_user(client, api_settings, cognito_client, EMAIL, + PASSWORD) # The test_client automatically attaches the session cookie to the request # The session cookie stores the refresh token. - response = client.get( - 'api/auth/refresh', - ) + response = client.get(PATH + '/refresh', ) - assert response.status_code == 200, f"refresh failed: {response.json}" - assert 'token' in response.json, 'refresh succeeded but token field missing from response' + assert response.status_code == 200, response.text + assert 'token' in response.json(), response.text -def test_session_endpoint(client): - ''' - Test refreshing a JWT using the /session endpoint. - ''' - EMAIL = 'inbox928@placeholder.org' + +def test_session_endpoint(client, api_settings, cognito_client): + """Test refreshing a JWT using the /session endpoint.""" + EMAIL = f'{secretsGenerator.randint(1_000, 2_000)}@email.com' PASSWORD = 'Fake4!@#$2589FFF' - jwt = create_and_signin_user(client, EMAIL, PASSWORD) + jwt = create_and_signin_user(client, api_settings, cognito_client, EMAIL, + PASSWORD) # The test_client automatically attaches the session cookie to the request # The session cookie stores the refresh token. - response = client.get( - 'api/auth/session', - headers={"Authorization": f"Bearer {jwt}"} - ) + response = client.get(PATH + '/session', + headers={"Authorization": f"Bearer {jwt}"}) - assert response.status_code == 200, f"session failed: {response.json}" - assert 'token' in response.json, 'session succeeded but token field missing from response' + assert response.status_code == 200, response.text + assert 'token' in response.json(), response.text -def test_user_signup_rollback(app): - """ Verify that a failed signup with cognito - reverts the local DB entry of the user's email.""" +def test_user_signup_rollback(client, api_settings, cognito_client, session_factory): + """Test that a failed sign-up with Cognito. + + Ensure the local DB entry of the user's email is deleted.""" rollback_email = 'test_user_signup_rollback@fake.com' - signup_response = app.test_client().post( - '/api/auth/signup/host', - json = { - 'email': rollback_email, - 'password': 'lol', - 'firstName': 'firstname', - 'lastName': 'lastname' - } - ) + signup_response = client.post(PATH + '/signup', + json={ + 'email': rollback_email, + 'password': 'lol', + 'firstName': 'firstname', + 'lastName': 'lastname', + 'role': 'host', + }) + assert signup_response.status_code == 400 - with pytest.raises(app.boto_client.exceptions.UserNotFoundException): - app.boto_client.admin_delete_user( - UserPoolId=app.config['COGNITO_USER_POOL_ID'], - Username=rollback_email - ) - with DataAccessLayer.session() as sess: - rolledback_user = sess.query(User).filter_by(email=rollback_email).first() - # This assertion will fail on `main` because no rollback is happening - assert rolledback_user is None \ No newline at end of file + + with pytest.raises(cognito_client.exceptions.UserNotFoundException): + cognito_client.admin_delete_user( + UserPoolId=api_settings.COGNITO_USER_POOL_ID, + Username=rollback_email) + + with session_factory() as sess: + rolledback_user = sess.query(User).filter_by( + email=rollback_email).first() + assert rolledback_user is None diff --git a/api-v2/tests/integration/test_user_repo.py b/api-v2/tests/integration/test_user_repo.py index a929e808..c1bd4b51 100644 --- a/api-v2/tests/integration/test_user_repo.py +++ b/api-v2/tests/integration/test_user_repo.py @@ -1,52 +1,69 @@ -import pytest +import pytest +from sqlalchemy.orm import Session from sqlalchemy.exc import IntegrityError -from openapi_server.models.database import User -from openapi_server.models.user_roles import UserRole -from openapi_server.repositories.user_repo import UserRepository +from app.modules.access.models import User +from app.modules.access.user_roles import UserRole +from app.modules.access.user_repo import UserRepository -def test_user_role_required(empty_db_session): - new_user = User(email="realemail@fakedomain.com", firstName="realemail@fakedomain.com", middleName="realemail@fakedomain.com", +def test_user_role_required(session_factory: Session): + with session_factory() as empty_db_session: + new_user = User(email="realemail@fakedomain.com", + firstName="realemail@fakedomain.com", + middleName="realemail@fakedomain.com", lastName="realemail@fakedomain.com") - empty_db_session.add(new_user) - with pytest.raises(IntegrityError, match="NOT NULL constraint failed"): - empty_db_session.commit() + empty_db_session.add(new_user) + with pytest.raises(IntegrityError, match="NOT NULL constraint failed"): + empty_db_session.commit() - with pytest.raises(TypeError): - repo = UserRepository(empty_db_session) - repo.add_user(email="realemail@fakedomain.com", firstName="realemail@fakedomain.com", middleName="realemail@fakedomain.com", + with pytest.raises(TypeError): + repo = UserRepository(empty_db_session) + repo.add_user(email="realemail@fakedomain.com", + firstName="realemail@fakedomain.com", + middleName="realemail@fakedomain.com", lastName="realemail@fakedomain.com") -def test_add_user_firstname_only(empty_db_session): - ''' - Verify that user middle and last name are not required. + +def test_add_user_firstname_only(session_factory: Session): + """Verify that user middle and last name are not required. + In some cultures, such as Indonesian and Icelandic, people may have only one name. - ''' - repo = UserRepository(empty_db_session) - new_user = repo.add_user(email="realemail@fakedomain.com", firstName="name", role=UserRole.GUEST) - assert new_user.role.name == UserRole.GUEST.value - assert new_user.firstName == "name" - assert new_user.middleName == None - assert new_user.lastName == None - assert new_user.email == "realemail@fakedomain.com" - -def test_single_char_name(empty_db_session): - ''' - Verify that user names can be just one character, per the + """ + with session_factory() as empty_db_session: + repo = UserRepository(empty_db_session) + new_user = repo.add_user(email="realemail@fakedomain.com", + firstName="name", + role=UserRole.GUEST) + assert new_user.role.type == UserRole.GUEST.value + assert new_user.firstName == "name" + assert new_user.middleName == None + assert new_user.lastName == None + assert new_user.email == "realemail@fakedomain.com" + + +def test_single_char_name(session_factory: Session): + """Verify that user names can be just one character, per the US Web Design System Guidance. - ''' - repo = UserRepository(empty_db_session) - new_user = repo.add_user(email="realemail@fakedomain.com", firstName="n", role=UserRole.GUEST) - assert new_user.role.name == UserRole.GUEST.value - assert new_user.firstName == "n" - assert new_user.middleName == None - assert new_user.lastName == None - assert new_user.email == "realemail@fakedomain.com" - -def test_firstname_required(empty_db_session): - ''' - Test that the firstname must at least contain one non-space character. - ''' - repo = UserRepository(empty_db_session) - with pytest.raises(ValueError, match="firstName must contain at least one non-space character"): - repo.add_user(email="realemail@fakedomain.com", firstName=" ", role=UserRole.GUEST) \ No newline at end of file + """ + with session_factory() as empty_db_session: + repo = UserRepository(empty_db_session) + new_user = repo.add_user(email="realemail@fakedomain.com", + firstName="n", + role=UserRole.GUEST) + assert new_user.role.type == UserRole.GUEST.value + assert new_user.firstName == "n" + assert new_user.middleName == None + assert new_user.lastName == None + assert new_user.email == "realemail@fakedomain.com" + + +def test_firstname_required(session_factory: Session): + """Test that the firstname must at least contain one non-space character.""" + with session_factory() as empty_db_session: + repo = UserRepository(empty_db_session) + with pytest.raises( + ValueError, + match="firstName must contain at least one non-space character"): + repo.add_user(email="realemail@fakedomain.com", + firstName=" ", + role=UserRole.GUEST) diff --git a/api-v2/tests/unit/relationship_management/__init__.py b/api-v2/tests/unit/relationship_management/__init__.py new file mode 100644 index 00000000..e69de29b From 46eb2cf90537be44e2fb3718f9e8361969449988 Mon Sep 17 00:00:00 2001 From: "Mr. Paul" Date: Sun, 15 Sep 2024 00:03:40 -0700 Subject: [PATCH 48/70] fastapi-migration: Delete test_mocking These tests aren't needed anymore. --- api-v2/tests/integration/test_mocking.py | 208 ----------------------- 1 file changed, 208 deletions(-) delete mode 100644 api-v2/tests/integration/test_mocking.py diff --git a/api-v2/tests/integration/test_mocking.py b/api-v2/tests/integration/test_mocking.py deleted file mode 100644 index ce7a5a70..00000000 --- a/api-v2/tests/integration/test_mocking.py +++ /dev/null @@ -1,208 +0,0 @@ -import pytest -import json -import requests -from pathlib import Path - -from openapi_server.configs.mock_aws import AWSTemporaryUserpool, AWSMockService -from tests.setup_utils import signup_user, signin_user - -def get_user_pools(boto_client): - """Helper function to count the number of user pools.""" - MAXRESULT = 60 - response = boto_client.list_user_pools(MaxResults=60) - result = response['UserPools'] - assert len(result) < MAXRESULT, ("Number of userpools exceeds 60. " + - "To get an accurate count delete user pools or implement pagination.") - return result - -def delete_temporary_userpools(app): - ''' - Delete all of the AWS Cognito temporary userpools, except - for the one in use by the current app. - - Please proceed with caution before using or modifying - this method because production userpools can be deleted - if the name is modified (unless delete protection is in place). - ''' - cur_app_poolid = app.config["COGNITO_USER_POOL_ID"] - for pool in get_user_pools(app.boto_client): - if (AWSTemporaryUserpool.is_temp_pool(pool["Name"]) - and pool["Id"] != cur_app_poolid): - app.boto_client.delete_user_pool( - UserPoolId=pool["Id"] - ) - -def count_user_pools(boto_client): - return len(get_user_pools(boto_client)) - -def tmp_userpool_count(boto_client): - user_pools = get_user_pools(boto_client) - return sum(AWSTemporaryUserpool.is_temp_pool(pool["Name"]) for pool in user_pools) - -def count_users_in_userpool(app): - user_count = 0 - pagination_token = None - userpool_id = app.config["COGNITO_USER_POOL_ID"] - while True: - if pagination_token: - response = app.boto_client.list_users(UserPoolId=userpool_id, PaginationToken=pagination_token) - else: - response = app.boto_client.list_users(UserPoolId=userpool_id) - - user_count += len(response['Users']) - - pagination_token = response.get('PaginationToken') - if not pagination_token: - break - - return user_count - -def test_AWSTemporaryUserpool_cleanup(app): - ''' - Test the temporary userpool is deleted when - destroy() is called. - ''' - initial_count = count_user_pools(app.boto_client) - - # Using the context manager to automatically create and destroy the user pool - with AWSTemporaryUserpool(app): - assert count_user_pools(app.boto_client) == (initial_count + 1), "Userpool was not created!" - - # After exiting the block, the user pool should be destroyed - final_count = count_user_pools(app.boto_client) - - assert initial_count == final_count, "User pool was not properly deleted" - -def test_AWSTemporaryUserpool_is_temp_pool_strs(): - ''' - Test that the AWSTemporaryUserpool.is_temp_pool strictly - matches the temporary user pool naming format. These tests - safegaurd against accidentally removing a production user - pool in the event of a resource leak. - ''' - istmp = AWSTemporaryUserpool.is_temp_pool - assert istmp("TestUserPoola809bcbf-800a-4da0-870f-a1205e8bf40a"), "t1" - assert istmp("TestUserPoola908dc0b-afb6-4f8a-aa50-96ca9b813b05"), "t2" - assert istmp("TestUserPoolca0ddbf1-53fe-4bdc-bbf7-262e97d32399"), "t3" - assert istmp("TestUserPool6eaa346c-3b55-456d-86b5-2f48ffee0b9a"), "t4" - assert istmp("TestUserPoolced3909b-36b9-4479-8584-087cfe8d7479"), "t5" - assert istmp("TestUserPool46eec7e1-10fb-46fe-8303-46310c63406c"), "t6" - - assert not istmp(""), "f1" - assert not istmp("Home Unite Us"), "f2" - assert not istmp("testUserPoola809bcbf-800a-4da0-870f-a1205e8bf40a"), "f3" - assert not istmp("estUserPoola809bcbf-800a-4da0-870f-a1205e8bf40a"), "f4" - assert not istmp("TestuserPoola809bcbf-800a-4da0-870f-a1205e8bf40a"), "f5" - assert not istmp("TestUserPool a809bcbf-800a-4da0-870f-a1205e8bf40a"), "f6" - assert not istmp("TestUserPool_a809bcbf-800a-4da0-870f-a1205e8bf40a"), "f7" - assert not istmp("TestUserPoola809bcbf_800a-4da0-870f-a1205e8bf40a"), "f8" - assert not istmp("TestUserPoola809bcbf-800ab-4da0-870f-a1205e8bf40a"), "f9" - assert not istmp("TestUserPoola809bcbf-800a-4da01-870f-a1205e8bf40a"), "f9" - assert not istmp("TestUserPoola809bcbf-800a-4da0a-870f-a1205e8bf40a"), "f10" - assert not istmp("TestUserPoola809bcbf-800a-4da0-870f1-a1205e8bf40a"), "f11" - assert not istmp("TestUserPoola809bcbf-800-4da0-870f-a1205e8bf40a"), "f12" - assert not istmp("TestUserPoola809bcbf-800a-4da0-870f-a1205e8bf40a1"), "f13" - assert not istmp("TestUserPoola809bcbf-800a-4da0-870f-a1205e8bf40aa"), "f14" - assert not istmp("TestUserPoola809bcbf-800a-4da0-870f-a1205e8bf40"), "f15" - - with pytest.raises(TypeError): - istmp(None) - -def test_AWSTemporaryUserpool_is_temp_pool_real(app): - ''' - Test that is_temp_pool properly identifies newly - created temporary userpools. - ''' - def _poolset(userpools: dict) -> set: - return set((pool["Name"] for pool in userpools)) - - existing_pools = _poolset(get_user_pools(app.boto_client)) - with AWSTemporaryUserpool(app): - new_pool = _poolset(get_user_pools(app.boto_client)).difference(existing_pools) - assert len(new_pool) == 1, "More than one temp user pool was created!" - assert AWSTemporaryUserpool.is_temp_pool(new_pool.pop()) - -def test_AWSTemporaryUserpool_count(app): - ''' - Test that the application and/or test suites do not - have a resource leak that is polluting the AWS Cognito - user pools with temporary user pools. We'll set the limit - to 30 userpools. If this number is exceeded then the developer - needs to delete the userpools. - ''' - # This function can be used to cleanup leaked - # temporary tools. Leave it commented out before - # pushing, however, to make sure we can detect - # resource leaks when they occur. - #delete_temporary_userpools(app) - assert tmp_userpool_count(app.boto_client) <= 30, ("AWS Cognito has a large number of temporary " - "userpools. We may have a userpool resource leak. " - "Delete the unused pools and search for a resource leak.") - -def test_AWSMockService(app, is_mocking): - ''' - Test that starting the AWSMocking service properly - activates and deactivates the moto mocking service. - - Ensure that calls to AWSCognito are properly - intercepted. - ''' - # Moto uses regex patterns to intercept all cognito public key requests - moto_fake_key_url = "https://cognito-idp.us-west-2.amazonaws.com/somekey/.well-known/jwks.json" - if is_mocking: - import moto - moto_dir = Path(moto.__file__).parent - jwks_file_path = moto_dir / "cognitoidp" / "resources" / "jwks-public.json" - assert jwks_file_path.is_file(), "Moto public key not found. Can't proceed with test" - with open(jwks_file_path, 'r') as file: - moto_jwks = json.load(file) - - actual_jwks = get_json_from_url(moto_fake_key_url) - assert actual_jwks is not None - assert actual_jwks == moto_jwks, ("The mocking service does not appear to have been started correctly " - "moto should intercept calls to AWS cognito jwks.json and return the " - "fake public key stored in the moto resources folder.") - else: - # If mocking is not enabled then our fake url request will fail - with pytest.raises(requests.exceptions.HTTPError): - get_json_from_url(moto_fake_key_url) - -def get_json_from_url(url): - response = requests.get(url) - # Raises an HTTPError if the response was an unsuccessful status code - response.raise_for_status() - return response.json() - -def test_signup_confirmation(client, is_mocking): - ''' - Test that the signup confirmation works with any confirmation - code when authentication mocking is enabled. - - When mocking is disabled a real confirmation code will be - required, so the confirmation should fail. - ''' - email = 'nottaemail@gmail.com' - signup_user(client.application, email, 'Passw0rd!') - - response = client.post( - '/api/auth/confirm', - json = { - 'email': email, - 'code': 'fakeCode' - } - ) - - if is_mocking: - assert response.status_code == 200 - else: - assert response.status_code == 401 - assert "invalid code" in response.json["message"].lower() - -def test_mock_config_includes_test_users(client, is_mocking): - ''' - Test that the mock configuration includes test users. - ''' - if not is_mocking: - pytest.skip("Test only applies to mock configurations") - for user in AWSMockService.TEST_USERS: - signin_user(client, user["email"], user["password"]) \ No newline at end of file From 28f2f665de967c1a2dce56a9ac8c3f3aee7c3763 Mon Sep 17 00:00:00 2001 From: Erik Date: Tue, 17 Sep 2024 17:30:32 -0700 Subject: [PATCH 49/70] Update import --- api-v2/app/main.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api-v2/app/main.py b/api-v2/app/main.py index b6adf287..169c0821 100644 --- a/api-v2/app/main.py +++ b/api-v2/app/main.py @@ -10,11 +10,11 @@ async def lifespan(app: FastAPI): settings = config.get_settings() engine = db.db_engine(settings) - import seed + import app.seed db.init_db(engine) yield app = FastAPI(lifespan=lifespan) -app.include_router(api_router, prefix="/api") +app.include_router(api_router, prefix="/api") \ No newline at end of file From a3e37419b9096faad072977c103b972c68f6366b Mon Sep 17 00:00:00 2001 From: Erik Date: Tue, 17 Sep 2024 17:30:55 -0700 Subject: [PATCH 50/70] Update confim sign up route to work with FastAPI --- api-v2/app/modules/access/auth_controller.py | 42 +++++++++++++++++++- 1 file changed, 40 insertions(+), 2 deletions(-) diff --git a/api-v2/app/modules/access/auth_controller.py b/api-v2/app/modules/access/auth_controller.py index 442a8cdc..abc6929a 100644 --- a/api-v2/app/modules/access/auth_controller.py +++ b/api-v2/app/modules/access/auth_controller.py @@ -27,6 +27,21 @@ def set_session_cookie(response: Response, auth_response: dict): response.set_cookie("refresh_token", refresh_token) response.set_cookie("id_token", id_token) +@router.get('/signup/confirm') +def confirm_sign_up(code: str, username: str, client_id: str, email: str, settings: SettingsDep, cognito_client: CognitoIdpDep, calc_secret_hash: SecretHashFuncDep): + secret_hash = calc_secret_hash(email) + + try: + cognito_client.confirm_sign_up( + ClientId=client_id, + SecretHash=secret_hash, + Username=email, + ConfirmationCode=code + ) + + return RedirectResponse(f"{settings.ROOT_URL}/email-verification-success") + except Exception as e: + return RedirectResponse(f"{settings.ROOT_URL}/email-verification-error") @router.post("/signup") def signup(body: UserCreate, @@ -34,12 +49,17 @@ def signup(body: UserCreate, db: DbSessionDep, cognito_client: CognitoIdpDep, calc_secret_hash: SecretHashFuncDep): + """Sign up route. This route is used to Sign up a new user. """ # Create user in database - user = create_user(db, body) + try: + user = create_user(db, body) + except Exception as e: + raise HTTPException(status_code=400, detail="Failed to create user") + if user is None: raise HTTPException(status_code=400, detail="User already exists") @@ -65,7 +85,6 @@ def signup(body: UserCreate, GroupName=role_to_cognito_group_map[body.role], ) except Exception as e: - print(e) raise HTTPException(status_code=400, detail="Failed to confirm user") return response @@ -121,6 +140,25 @@ def signin(body: UserSignInRequest, "token": auth_response["AuthenticationResult"]["AccessToken"], } +# @router.get( +# "/signout", dependencies=[ +# Depends(requires_auth) +# ]) +# def signout(response: Response, cognito_client: CognitoIdpDep): + + + +# # Signout user +# response = cognito_client.global_sign_out( +# AccessToken=access_token +# ) + +# # Remove refresh token cookie +# session.pop('refresh_token', None) + +# # send response +# return response + @router.get( "/secret", From 77837b0c6b765e7e2e036c72c690754506c79f0e Mon Sep 17 00:00:00 2001 From: Erik Date: Tue, 17 Sep 2024 17:31:21 -0700 Subject: [PATCH 51/70] Update sign up form on frontend --- .../components/authentication/SignUpForm.tsx | 29 ++++-------- app/src/services/auth.ts | 32 +++---------- app/src/views/SignUp.tsx | 46 +++++++------------ 3 files changed, 31 insertions(+), 76 deletions(-) diff --git a/app/src/components/authentication/SignUpForm.tsx b/app/src/components/authentication/SignUpForm.tsx index 4efcd2d3..88867ab5 100644 --- a/app/src/components/authentication/SignUpForm.tsx +++ b/app/src/components/authentication/SignUpForm.tsx @@ -8,7 +8,7 @@ import { } from '@mui/material'; import GoogleIcon from '@mui/icons-material/Google'; import {useFormik} from 'formik'; -import {SignUpHostRequest, SignUpCoordinatorRequest} from '../../services/auth'; +import {SignUpRequest} from '../../services/auth'; import {PasswordValidation} from '../common/PasswordValidation'; import {signUpVaildationSchema} from '../../utils/PasswordValidationSchema'; import {PasswordField} from './PasswordField'; @@ -20,20 +20,12 @@ export interface SignUpFormProps { password, firstName, lastName, - }: SignUpHostRequest | SignUpCoordinatorRequest) => Promise; + }: Omit) => Promise; type: string; - getTokenIsLoading: boolean; - signUpHostIsLoading: boolean; - signUpCoordinatorIsLoading: boolean; + isLoading: boolean; } -export const SignUpForm = ({ - onSubmit, - type, - getTokenIsLoading, - signUpHostIsLoading, - signUpCoordinatorIsLoading, -}: SignUpFormProps) => { +export const SignUpForm = ({onSubmit, type, isLoading}: SignUpFormProps) => { const { handleSubmit, handleChange, @@ -119,22 +111,17 @@ export const SignUpForm = ({ variant="contained" size="large" type="submit" - disabled={ - !isValid || - !dirty || - signUpHostIsLoading || - signUpCoordinatorIsLoading - } + disabled={!isValid || !dirty || isLoading} fullWidth > Sign up - {signUpHostIsLoading || signUpCoordinatorIsLoading ? ( + {isLoading ? ( ) : null} or diff --git a/app/src/services/auth.ts b/app/src/services/auth.ts index f109a293..8e0a7fc1 100644 --- a/app/src/services/auth.ts +++ b/app/src/services/auth.ts @@ -1,27 +1,17 @@ import {api} from './api'; import {User} from './user'; -export interface SignUpHostResponse { +export interface SignUpResponse { user: User; token: string; } -export interface SignUpHostRequest { - firstName: string; - lastName: string; - email: string; - password: string; -} -export interface SignUpCoordinatorResponse { - user: User; - token: string; -} - -export interface SignUpCoordinatorRequest { +export interface SignUpRequest { firstName: string; lastName: string; email: string; password: string; + role: string; } export interface SignInResponse { @@ -81,18 +71,9 @@ export interface ResendConfirmationCodeResponse { const authApi = api.injectEndpoints({ endpoints: build => ({ - signUpHost: build.mutation({ - query: credentials => ({ - url: '/auth/signup/host', - method: 'POST', - withCredentials: true, - body: credentials, - }), - }), - // prettier-ignore - signUpCoordinator: build.mutation({ + signUp: build.mutation({ query: credentials => ({ - url: '/auth/signup/coordinator', + url: '/auth/signup', method: 'POST', withCredentials: true, body: credentials, @@ -197,8 +178,7 @@ const authApi = api.injectEndpoints({ export {authApi}; export const { - useSignUpHostMutation, - useSignUpCoordinatorMutation, + useSignUpMutation, useSignInMutation, useSignOutMutation, useVerificationMutation, diff --git a/app/src/views/SignUp.tsx b/app/src/views/SignUp.tsx index dc632032..d27fb6c2 100644 --- a/app/src/views/SignUp.tsx +++ b/app/src/views/SignUp.tsx @@ -12,11 +12,9 @@ import CloseIcon from '@mui/icons-material/Close'; import {useNavigate, useParams} from 'react-router-dom'; import {SignUpForm} from '../components/authentication/SignUpForm'; import { - SignUpHostRequest, - SignUpCoordinatorRequest, - useSignUpHostMutation, - useSignUpCoordinatorMutation, useGoogleSignUpMutation, + useSignUpMutation, + SignUpRequest, } from '../services/auth'; import {isErrorWithMessage, isFetchBaseQueryError} from '../app/helpers'; import {FormContainer} from '../components/authentication'; @@ -27,10 +25,8 @@ export const SignUp = () => { const {type} = useParams(); const navigate = useNavigate(); - const [signUpHost, {isLoading: signUpHostIsLoading}] = - useSignUpHostMutation(); - const [signUpCoordinator, {isLoading: signUpCoordinatorIsLoading}] = - useSignUpCoordinatorMutation(); + const [signUp, {isLoading: signUpIsLoading}] = useSignUpMutation(); + const [googleSignUp, {isLoading: getTokenIsLoading}] = useGoogleSignUpMutation(); // get type from params @@ -51,25 +47,19 @@ export const SignUp = () => { password, firstName, lastName, - }: SignUpHostRequest | SignUpCoordinatorRequest) => { - try { - if (type === 'host') { - await signUpHost({ - firstName, - lastName, - email, - password, - }).unwrap(); - } + }: Omit) => { + if (!type) { + return; + } - if (type === 'coordinator') { - await signUpCoordinator({ - firstName, - lastName, - email, - password, - }).unwrap(); - } + try { + await signUp({ + email, + password, + firstName, + lastName, + role: type, + }); navigate(`/signup/success?email=${email}`); } catch (err) { @@ -116,9 +106,7 @@ export const SignUp = () => { From 12cfac2ca0ca95abe9d7d06a2a06c86d8c98f863 Mon Sep 17 00:00:00 2001 From: Erik Date: Wed, 18 Sep 2024 11:26:32 -0700 Subject: [PATCH 52/70] Change isLoading variable and Remove Google auth button --- .../components/authentication/SignInForm.tsx | 19 +++++++------------ .../components/authentication/SignUpForm.tsx | 12 +++++------- app/src/views/SignIn.tsx | 3 +-- 3 files changed, 13 insertions(+), 21 deletions(-) diff --git a/app/src/components/authentication/SignInForm.tsx b/app/src/components/authentication/SignInForm.tsx index 032de992..a081d941 100644 --- a/app/src/components/authentication/SignInForm.tsx +++ b/app/src/components/authentication/SignInForm.tsx @@ -6,7 +6,7 @@ import { TextField, CircularProgress, } from '@mui/material'; -import GoogleIcon from '@mui/icons-material/Google'; +// import GoogleIcon from '@mui/icons-material/Google'; import {useFormik} from 'formik'; import {object, string} from 'yup'; @@ -14,8 +14,7 @@ import {SignInRequest} from '../../services/auth'; import {PasswordField} from './PasswordField'; interface SignInFormProps { - signInIsLoading: boolean; - getTokenIsLoading: boolean; + isLoading: boolean; onSubmit: ({email, password}: SignInRequest) => Promise; } @@ -24,11 +23,7 @@ const validationSchema = object({ password: string().required('password is required'), }); -export const SignInForm = ({ - onSubmit, - signInIsLoading, - getTokenIsLoading, -}: SignInFormProps) => { +export const SignInForm = ({onSubmit, isLoading}: SignInFormProps) => { const { handleSubmit, handleChange, @@ -89,19 +84,19 @@ export const SignInForm = ({ or - + */} ); }; diff --git a/app/src/components/authentication/SignUpForm.tsx b/app/src/components/authentication/SignUpForm.tsx index 88867ab5..8d4a0fbe 100644 --- a/app/src/components/authentication/SignUpForm.tsx +++ b/app/src/components/authentication/SignUpForm.tsx @@ -6,7 +6,7 @@ import { TextField, CircularProgress, } from '@mui/material'; -import GoogleIcon from '@mui/icons-material/Google'; +// import GoogleIcon from '@mui/icons-material/Google'; import {useFormik} from 'formik'; import {SignUpRequest} from '../../services/auth'; import {PasswordValidation} from '../common/PasswordValidation'; @@ -14,18 +14,16 @@ import {signUpVaildationSchema} from '../../utils/PasswordValidationSchema'; import {PasswordField} from './PasswordField'; export interface SignUpFormProps { - // sign up according to host/coordinator + isLoading: boolean; onSubmit: ({ email, password, firstName, lastName, }: Omit) => Promise; - type: string; - isLoading: boolean; } -export const SignUpForm = ({onSubmit, type, isLoading}: SignUpFormProps) => { +export const SignUpForm = ({onSubmit, isLoading}: SignUpFormProps) => { const { handleSubmit, handleChange, @@ -120,7 +118,7 @@ export const SignUpForm = ({onSubmit, type, isLoading}: SignUpFormProps) => { ) : null} or - + */} ); }; diff --git a/app/src/views/SignIn.tsx b/app/src/views/SignIn.tsx index d2cce685..63df4a07 100644 --- a/app/src/views/SignIn.tsx +++ b/app/src/views/SignIn.tsx @@ -102,8 +102,7 @@ export const SignIn = () => { From 4f041ac53f1fa5c1ee4126ad0b49f653650df4e0 Mon Sep 17 00:00:00 2001 From: Erik Date: Wed, 18 Sep 2024 11:29:11 -0700 Subject: [PATCH 53/70] Throw error if no type is provided --- app/src/views/SignUp.tsx | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/app/src/views/SignUp.tsx b/app/src/views/SignUp.tsx index d27fb6c2..91f9712c 100644 --- a/app/src/views/SignUp.tsx +++ b/app/src/views/SignUp.tsx @@ -49,7 +49,7 @@ export const SignUp = () => { lastName, }: Omit) => { if (!type) { - return; + throw new Error('User type is required'); } try { @@ -107,8 +107,6 @@ export const SignUp = () => { From 4b1f8d7305e23bb901a6771007d3e0700e6f6c15 Mon Sep 17 00:00:00 2001 From: Erik Date: Wed, 18 Sep 2024 12:30:39 -0700 Subject: [PATCH 54/70] Add signout route and remove session cookies --- api-v2/app/modules/access/auth_controller.py | 36 +++++++++++--------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/api-v2/app/modules/access/auth_controller.py b/api-v2/app/modules/access/auth_controller.py index abc6929a..df322b42 100644 --- a/api-v2/app/modules/access/auth_controller.py +++ b/api-v2/app/modules/access/auth_controller.py @@ -3,7 +3,7 @@ import boto3 from fastapi import Depends, APIRouter, HTTPException, Response, Security, Request -from fastapi.responses import RedirectResponse +from fastapi.responses import RedirectResponse, JSONResponse from botocore.exceptions import ClientError from app.modules.access.schemas import ( @@ -24,8 +24,8 @@ def set_session_cookie(response: Response, auth_response: dict): refresh_token = auth_response["AuthenticationResult"]["RefreshToken"] id_token = auth_response["AuthenticationResult"]["IdToken"] - response.set_cookie("refresh_token", refresh_token) - response.set_cookie("id_token", id_token) + response.set_cookie("refresh_token", refresh_token, httponly=True) + response.set_cookie("id_token", id_token, httponly=True) @router.get('/signup/confirm') def confirm_sign_up(code: str, username: str, client_id: str, email: str, settings: SettingsDep, cognito_client: CognitoIdpDep, calc_secret_hash: SecretHashFuncDep): @@ -140,25 +140,27 @@ def signin(body: UserSignInRequest, "token": auth_response["AuthenticationResult"]["AccessToken"], } -# @router.get( -# "/signout", dependencies=[ -# Depends(requires_auth) -# ]) -# def signout(response: Response, cognito_client: CognitoIdpDep): - +@router.post( + "/signout", dependencies=[ + Depends(requires_auth) + ]) +def signout(request: Request, cognito_client: CognitoIdpDep): + access_token = request.headers.get("Authorization").split(" ")[1] -# # Signout user -# response = cognito_client.global_sign_out( -# AccessToken=access_token -# ) + # Signout user + response = cognito_client.global_sign_out( + AccessToken=access_token + ) -# # Remove refresh token cookie -# session.pop('refresh_token', None) + response = JSONResponse(content={"message": "User signed out successfully"}) -# # send response -# return response + # Remove refresh token cookie + response.delete_cookie("refresh_token") + response.delete_cookie("id_token") + # send response + return response @router.get( "/secret", From 8885528f77d772cf46039cfa13a1cbd495c538fc Mon Sep 17 00:00:00 2001 From: Erik Date: Thu, 19 Sep 2024 09:09:04 -0700 Subject: [PATCH 55/70] Comment out divider from auth forms --- app/src/components/authentication/SignInForm.tsx | 5 +++-- app/src/components/authentication/SignUpForm.tsx | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/app/src/components/authentication/SignInForm.tsx b/app/src/components/authentication/SignInForm.tsx index a081d941..3c3f7cac 100644 --- a/app/src/components/authentication/SignInForm.tsx +++ b/app/src/components/authentication/SignInForm.tsx @@ -1,7 +1,7 @@ import { Button, Stack, - Divider, + // Divider, Link, TextField, CircularProgress, @@ -95,7 +95,8 @@ export const SignInForm = ({onSubmit, isLoading}: SignInFormProps) => { ) : null} - or + {/* TODO: ADD THIS BACK ONCE GOOGLE AUTH IS SETUP */} + {/* or */} {/* - or + {/* TODO: ADD THIS BACK ONCE GOOGLE AUTH IS SETUP */} + {/* or */} {/*