From 5e8da47a16965b8478fd5785635a15c69880c728 Mon Sep 17 00:00:00 2001 From: <> Date: Tue, 12 Nov 2024 23:06:41 +0000 Subject: [PATCH] Deployed b62f2ea with MkDocs version: 1.6.1 --- .nojekyll | 0 404.html | 876 +++ LICENSE | 1 + architecture/Notes/index.html | 980 +++ architecture/authentication/index.html | 1072 +++ architecture/github_actions/index.html | 1007 +++ architecture/project_structure/index.html | 1176 +++ assets/images/favicon.png | Bin 0 -> 1870 bytes assets/javascripts/bundle.b425cdc4.min.js | 29 + assets/javascripts/bundle.b425cdc4.min.js.map | 8 + assets/javascripts/lunr/min/lunr.ar.min.js | 1 + assets/javascripts/lunr/min/lunr.da.min.js | 18 + assets/javascripts/lunr/min/lunr.de.min.js | 18 + assets/javascripts/lunr/min/lunr.du.min.js | 18 + assets/javascripts/lunr/min/lunr.es.min.js | 18 + assets/javascripts/lunr/min/lunr.fi.min.js | 18 + assets/javascripts/lunr/min/lunr.fr.min.js | 18 + assets/javascripts/lunr/min/lunr.hi.min.js | 1 + assets/javascripts/lunr/min/lunr.hu.min.js | 18 + assets/javascripts/lunr/min/lunr.hy.min.js | 1 + assets/javascripts/lunr/min/lunr.it.min.js | 18 + assets/javascripts/lunr/min/lunr.ja.min.js | 1 + assets/javascripts/lunr/min/lunr.jp.min.js | 1 + assets/javascripts/lunr/min/lunr.kn.min.js | 1 + assets/javascripts/lunr/min/lunr.ko.min.js | 1 + assets/javascripts/lunr/min/lunr.multi.min.js | 1 + assets/javascripts/lunr/min/lunr.nl.min.js | 18 + assets/javascripts/lunr/min/lunr.no.min.js | 18 + assets/javascripts/lunr/min/lunr.pt.min.js | 18 + assets/javascripts/lunr/min/lunr.ro.min.js | 18 + assets/javascripts/lunr/min/lunr.ru.min.js | 18 + assets/javascripts/lunr/min/lunr.sa.min.js | 1 + .../lunr/min/lunr.stemmer.support.min.js | 1 + assets/javascripts/lunr/min/lunr.sv.min.js | 18 + assets/javascripts/lunr/min/lunr.ta.min.js | 1 + assets/javascripts/lunr/min/lunr.te.min.js | 1 + assets/javascripts/lunr/min/lunr.th.min.js | 1 + assets/javascripts/lunr/min/lunr.tr.min.js | 18 + assets/javascripts/lunr/min/lunr.vi.min.js | 1 + assets/javascripts/lunr/min/lunr.zh.min.js | 1 + assets/javascripts/lunr/tinyseg.js | 206 + assets/javascripts/lunr/wordcut.js | 6708 +++++++++++++++++ .../workers/search.208ed371.min.js | 42 + .../workers/search.208ed371.min.js.map | 8 + assets/stylesheets/main.26e3688c.min.css | 1 + assets/stylesheets/main.26e3688c.min.css.map | 1 + assets/stylesheets/palette.ecc896b0.min.css | 1 + .../stylesheets/palette.ecc896b0.min.css.map | 1 + contributing/dev_environment/index.html | 1283 ++++ contributing/documentation/index.html | 923 +++ contributing/git/index.html | 991 +++ .../add-model-and-api-endpoints/index.html | 2054 +++++ .../create-initial-data-migrations/index.html | 1167 +++ contributing/howto/index.html | 917 +++ contributing/howto/run-local/index.html | 958 +++ contributing/index.html | 941 +++ contributing/issues/index.html | 1223 +++ contributing/team/index.html | 927 +++ contributing/tools/docker/index.html | 1196 +++ contributing/tools/index.html | 919 +++ contributing/tools/mkdocs/index.html | 1273 ++++ contributing/tools/pre-commit/index.html | 1043 +++ contributing/tools/scripts/index.html | 1010 +++ contributing/tools/uv/index.html | 1170 +++ howto/authenticate_cognito/index.html | 969 +++ index.html | 1052 +++ license/index.html | 1239 +++ ref/api_endpoints/index.html | 919 +++ search/search_index.json | 1 + sitemap.xml | 3 + sitemap.xml.gz | Bin 0 -> 127 bytes 71 files changed, 34580 insertions(+) create mode 100644 .nojekyll create mode 100644 404.html create mode 100644 LICENSE create mode 100644 architecture/Notes/index.html create mode 100644 architecture/authentication/index.html create mode 100644 architecture/github_actions/index.html create mode 100644 architecture/project_structure/index.html create mode 100644 assets/images/favicon.png create mode 100644 assets/javascripts/bundle.b425cdc4.min.js create mode 100644 assets/javascripts/bundle.b425cdc4.min.js.map create mode 100644 assets/javascripts/lunr/min/lunr.ar.min.js create mode 100644 assets/javascripts/lunr/min/lunr.da.min.js create mode 100644 assets/javascripts/lunr/min/lunr.de.min.js create mode 100644 assets/javascripts/lunr/min/lunr.du.min.js create mode 100644 assets/javascripts/lunr/min/lunr.es.min.js create mode 100644 assets/javascripts/lunr/min/lunr.fi.min.js create mode 100644 assets/javascripts/lunr/min/lunr.fr.min.js create mode 100644 assets/javascripts/lunr/min/lunr.hi.min.js create mode 100644 assets/javascripts/lunr/min/lunr.hu.min.js create mode 100644 assets/javascripts/lunr/min/lunr.hy.min.js create mode 100644 assets/javascripts/lunr/min/lunr.it.min.js create mode 100644 assets/javascripts/lunr/min/lunr.ja.min.js create mode 100644 assets/javascripts/lunr/min/lunr.jp.min.js create mode 100644 assets/javascripts/lunr/min/lunr.kn.min.js create mode 100644 assets/javascripts/lunr/min/lunr.ko.min.js create mode 100644 assets/javascripts/lunr/min/lunr.multi.min.js create mode 100644 assets/javascripts/lunr/min/lunr.nl.min.js create mode 100644 assets/javascripts/lunr/min/lunr.no.min.js create mode 100644 assets/javascripts/lunr/min/lunr.pt.min.js create mode 100644 assets/javascripts/lunr/min/lunr.ro.min.js create mode 100644 assets/javascripts/lunr/min/lunr.ru.min.js create mode 100644 assets/javascripts/lunr/min/lunr.sa.min.js create mode 100644 assets/javascripts/lunr/min/lunr.stemmer.support.min.js create mode 100644 assets/javascripts/lunr/min/lunr.sv.min.js create mode 100644 assets/javascripts/lunr/min/lunr.ta.min.js create mode 100644 assets/javascripts/lunr/min/lunr.te.min.js create mode 100644 assets/javascripts/lunr/min/lunr.th.min.js create mode 100644 assets/javascripts/lunr/min/lunr.tr.min.js create mode 100644 assets/javascripts/lunr/min/lunr.vi.min.js create mode 100644 assets/javascripts/lunr/min/lunr.zh.min.js create mode 100644 assets/javascripts/lunr/tinyseg.js create mode 100644 assets/javascripts/lunr/wordcut.js create mode 100644 assets/javascripts/workers/search.208ed371.min.js create mode 100644 assets/javascripts/workers/search.208ed371.min.js.map create mode 100644 assets/stylesheets/main.26e3688c.min.css create mode 100644 assets/stylesheets/main.26e3688c.min.css.map create mode 100644 assets/stylesheets/palette.ecc896b0.min.css create mode 100644 assets/stylesheets/palette.ecc896b0.min.css.map create mode 100644 contributing/dev_environment/index.html create mode 100644 contributing/documentation/index.html create mode 100644 contributing/git/index.html create mode 100644 contributing/howto/add-model-and-api-endpoints/index.html create mode 100644 contributing/howto/create-initial-data-migrations/index.html create mode 100644 contributing/howto/index.html create mode 100644 contributing/howto/run-local/index.html create mode 100644 contributing/index.html create mode 100644 contributing/issues/index.html create mode 100644 contributing/team/index.html create mode 100644 contributing/tools/docker/index.html create mode 100644 contributing/tools/index.html create mode 100644 contributing/tools/mkdocs/index.html create mode 100644 contributing/tools/pre-commit/index.html create mode 100644 contributing/tools/scripts/index.html create mode 100644 contributing/tools/uv/index.html create mode 100644 howto/authenticate_cognito/index.html create mode 100644 index.html create mode 100644 license/index.html create mode 100644 ref/api_endpoints/index.html create mode 100644 search/search_index.json create mode 100644 sitemap.xml create mode 100644 sitemap.xml.gz diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/404.html b/404.html new file mode 100644 index 00000000..efd21aca --- /dev/null +++ b/404.html @@ -0,0 +1,876 @@ + + + + + + + + + + + + + + + + + + People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ +

404 - Not found

+ +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..f409d452 --- /dev/null +++ b/LICENSE @@ -0,0 +1 @@ +--8<-- "LICENSE" diff --git a/architecture/Notes/index.html b/architecture/Notes/index.html new file mode 100644 index 00000000..f669be07 --- /dev/null +++ b/architecture/Notes/index.html @@ -0,0 +1,980 @@ + + + + + + + + + + + + + + + + + + + + + + Troubleshooting - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Troubleshooting

+

ModHeader

+
    +
  • +

    ModHeader will break Google drive if Authentication Bearer token is set. The Google drive website pops up a dialog saying:

    +
    "You are not signed in.
    +You are signed out. Sign back in, then click 'Retry'.
    +Retry"
    +
    +

    The solution is to disable the Authentication header and Google drive will work as normal.

    +
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/architecture/authentication/index.html b/architecture/authentication/index.html new file mode 100644 index 00000000..f7ad9bca --- /dev/null +++ b/architecture/authentication/index.html @@ -0,0 +1,1072 @@ + + + + + + + + + + + + + + + + + + + + + + Cognito authentication - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+ +
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Cognito authentication

+

What is it

+

Cognito is a single sign-on system from AWS. It allows multiple apps to accept authentication from the same set of user accounts. It separates the management of users and permissions from the applications that use them.

+

Why we use cognito

+

We're invested in AWS, so we might as well use this too.

+

How we implement it

+

We're following the implementation from the djangostar tutorial.

+

These are the steps involved:

+
    +
  1. Backend downloads JWKS from Cognito User Pool on launch
  2. +
  3. User submits credentials and gets id_token and access_token
  4. +
  5. User sends request with token
  6. +
  7. Backend verifies token and processes request
  8. +
  9. User gets response from authenticated API
  10. +
+

Current Dev Setup

+
    +
  1. Created app client called "backend within the vrms-dev user pool, with ALLOW_ADMIN_USER_PASSWORD_AUTH enabled
  2. +
  3. "Domain Name" is already created at https://hackforla-vrms-dev.auth.us-west-2.amazoncognito.com
  4. +
  5. In "App client settings", enabled Implicit grant and openid, Callback URL http://localhost:8000/admin
  6. +
+

How it works now with the dev user pool and local development backend

+
    +
  1. Create a cognito user and login from the Hosted UI (from App client settings). Successful login will redirect to localhost:8000/admin with the necessary tokens
  2. +
  3. Take the access_token from the URL and make a GET request to http://localhost:8000/api/v1/me (Headers key=Authorization, value=Bearer )
  4. +
  5. Backend should return the user's profile data
  6. +
+

Notes

+

The tutorial is 2 years old now (from 2020) and there's been some change made since then.

+
    +
  1. We created an app client in Cognito for the backend to interface with. ALLOW_ADMIN_USER_PASSWORD_AUTH is the new name for the old ADMIN_NO_SRP_AUTH setting. Reference
  2. +
  3. In the custom User model step, the ugettext-lazy package is gettext-lazy for Django 4.0 Reference
  4. +
  5. The tutorial steps don't include instructions to test each step, so it's a little bit of following blindly with the help of linters until the last step.
  6. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/architecture/github_actions/index.html b/architecture/github_actions/index.html new file mode 100644 index 00000000..a23643b8 --- /dev/null +++ b/architecture/github_actions/index.html @@ -0,0 +1,1007 @@ + + + + + + + + + + + + + + + + + + + + + + Github actions - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Github actions

+

These are the github actions used in the project.

+

Files

+
.github/workflows/
+└── deploy-docs.yml # (1)!
+
+
    +
  1. Deploy Documentation
      +
    • triggered by commits to main
    • +
    • builds and deploys the mkdocs documentation to github pages.
    • +
    +
  2. +
+

Actions page workflows

+
    +
  1. deploy-docs
      +
    • see deploy-docs.yml above
    • +
    +
  2. +
  3. pages-build-deployment
      +
    • The github-pages bot runs this automatically for any project that publishes to github pages.
    • +
    • It does extra work that we don't need, but there's no way to disable it. See here.
    • +
    +
  4. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/architecture/project_structure/index.html b/architecture/project_structure/index.html new file mode 100644 index 00000000..536ad54f --- /dev/null +++ b/architecture/project_structure/index.html @@ -0,0 +1,1176 @@ + + + + + + + + + + + + + + + + + + + + + + Project Structure - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Project Structure

+

These are the directories and files in the project. Parts are summarized for clarity.

+

Top level

+
/
+├── app/     # (1)!
+├── docs/    # (2)!
+├── scripts/ # (3)!
+├── docker-compose.yml # (4)!
+└── pyproject.toml # (5)!
+
+
    +
  1. The django project. This is also what goes into the Docker image when it's generated by ./scripts/buildrun.sh. See Django Project below for details.
  2. +
  3. Documentation for the project code. See Documentation below for details.
  4. +
  5. Scripts used in the project. These are run in the commandline to do various project tasks. See Convenience Scripts for details.
  6. +
  7. The docker compose file.
  8. +
  9. The pyproject.toml file. This holds settings for project tools right now. We may combine this with app/setup.cfg in the future. We may move this file into app/ if it makes sense.
  10. +
+

Django project

+
app/
+├── core/ # (1)!
+├── data/ # (2)!
+├── peopledepot/ # (3)!
+   ├── asgi.py
+   ├── settings.py
+   ├── urls.py
+   └── wsgi.py
+├── scripts/ # (4)!
+   └── convert.py
+├── Dockerfile # (5)!
+├── entrypoint.sh # (6)!
+├── manage.py # (7)!
+├── requirements.in # (8)!
+├── requirements.txt # (9)!
+└── setup.cfg # (10)!
+
+
    +
  1. The core app in django. This app contains the API and models. See Core App below for details.
  2. +
  3. The data app in django. This app contains the initial data migrations. See Data App below for details.
  4. +
  5. The django project configuration.
  6. +
  7. Scripts used in the project. This currently contains the convert.py script, which converts csv files into django initial data code. It's used to generate code for the initial data migrations.
  8. +
  9. Dockerfile used to build the Docker image.
  10. +
  11. Entrypoint script called by the Docker image.
  12. +
  13. Django manage.py script. In nearly all cases, there's no good reason to change this. Just leave it alone.
  14. +
  15. Requirements.in file used by uv pip compile. See the uv tool for details.
  16. +
  17. Requirements.txt file generated by uv pip install. Do not modify this file. Edit the requirements.in file instead. See the uv tool for details.
  18. +
  19. Config file for development support tools such as flake8 and pytest. flake8 is the only tool that doesn't support pyproject.toml yet, which is why we have this file.
  20. +
+

Core App

+
core/
+├── admin.py # (1)!
+├── api/
+   ├── permissions.py # (2)!
+   ├── serializers.py # (3)!
+   ├── urls.py # (4)!
+   └── views.py # (5)!
+├── apps.py # (6)!
+├── initial_data/
+   ├── ...
+   └── Initial data in json or csv format # (7)!
+├── migrations/
+   ├── nnnn_migration_name.py # (8)!
+   ├── ...
+   └── max_migration.txt # (9)!
+├── models.py # (10)!
+├── scripts/ # (11)!
+├── tests/
+   ├── conftest.py # (12)!
+   ├── test_api.py # (13)!
+   ├── test_models.py # (14)!
+   └── test_permissions.py # (15)!
+└── utils/ # (16)!
+.   └── jwt.py # (17)!
+
+
    +
  1. Admin site configuration.
  2. +
  3. Permission classes definitions.
  4. +
  5. Serializers to control what data is sent to the client.
  6. +
  7. Routes for the API.
  8. +
  9. Views to retrieve data for the API.
  10. +
  11. AppConfig for the core app.
  12. +
  13. Initial data scripts. See Create initial data scripts for how to create these.
  14. +
  15. Migration scripts. These are generated by the makemigrations command.
  16. +
  17. File used by django-linear-migrations. It stores the last migration name for the app for use in git merge conflicts.
  18. +
  19. Models for the core app.
  20. +
  21. Scripts for the core app. We use it to hold temporary scripts in Create initial data migrations, but there's no need to commit them into git.
  22. +
  23. Test fixtures file
  24. +
  25. Test for the API
  26. +
  27. Test for the models
  28. +
  29. Test for the permissions
  30. +
  31. Utility scripts for the core app
  32. +
  33. Utility functions supporting JWT with Cognito
  34. +
+

Data App

+
data/
+└── migrations/
+.   ├── nnnn_migration_name.py # (1)!
+.   ├── ...
+.   └── max_migration.txt # (2)!
+
+
    +
  1. Migration scripts. See Create initial data migrations for how to create these.
  2. +
  3. File used by django-linear-migrations. It stores the last migration name for the app for use in git merge conflicts.
  4. +
+

Documentation

+
/
+├── docs/
+   ├── [topics]/ # (1)!
+   ├── CONTRIBUTING.md # (2)!
+   ├── index.md # (3)!
+   ├── LICENSE # (4)!
+   ├── license.md # (5)!
+   └── _static/
+├── CONTRIBUTING.md # (6)!
+├── LICENSE # (7)!
+├── mkdocs.yml # (8)!
+└── README.md # (9)!
+
+
    +
  1. Directories containing markdown files on different topics.
  2. +
  3. Placeholder for the CONTRIBUTING.md file in the project root. MkDocs requires all documentation files to be in the docs/ directory. This file uses a snippet to import the source content.
  4. +
  5. Home page of the documentation site. This files uses a snippet to import the README.md file from the project root.
  6. +
  7. Placeholder LICENSE file. This file uses a snippet to import the LICENSE file from the project root. This is used for linking from the Documentation Homepage as well as the README.md file in the Github web interface, which knows the file by this name only.
  8. +
  9. Placeholder license.md file. This file uses a snippet to import the LICENSE file from the project root. This is used for the MkDocs nav section, which requires the md file extension.
  10. +
  11. Contributing file for the project. This name is capitalized according to Github conventions.
  12. +
  13. Licence file for the project. This name is capitalized according to Github conventions.
  14. +
  15. MkDocs config file.
  16. +
  17. README file for the project. This name is capitalized according to Github conventions.
  18. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/assets/images/favicon.png b/assets/images/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..1cf13b9f9d978896599290a74f77d5dbe7d1655c GIT binary patch literal 1870 zcmV-U2eJ5xP)Gc)JR9QMau)O=X#!i9;T z37kk-upj^(fsR36MHs_+1RCI)NNu9}lD0S{B^g8PN?Ww(5|~L#Ng*g{WsqleV}|#l zz8@ri&cTzw_h33bHI+12+kK6WN$h#n5cD8OQt`5kw6p~9H3()bUQ8OS4Q4HTQ=1Ol z_JAocz`fLbT2^{`8n~UAo=#AUOf=SOq4pYkt;XbC&f#7lb$*7=$na!mWCQ`dBQsO0 zLFBSPj*N?#u5&pf2t4XjEGH|=pPQ8xh7tpx;US5Cx_Ju;!O`ya-yF`)b%TEt5>eP1ZX~}sjjA%FJF?h7cX8=b!DZl<6%Cv z*G0uvvU+vmnpLZ2paivG-(cd*y3$hCIcsZcYOGh{$&)A6*XX&kXZd3G8m)G$Zz-LV z^GF3VAW^Mdv!)4OM8EgqRiz~*Cji;uzl2uC9^=8I84vNp;ltJ|q-*uQwGp2ma6cY7 z;`%`!9UXO@fr&Ebapfs34OmS9^u6$)bJxrucutf>`dKPKT%%*d3XlFVKunp9 zasduxjrjs>f8V=D|J=XNZp;_Zy^WgQ$9WDjgY=z@stwiEBm9u5*|34&1Na8BMjjgf3+SHcr`5~>oz1Y?SW^=K z^bTyO6>Gar#P_W2gEMwq)ot3; zREHn~U&Dp0l6YT0&k-wLwYjb?5zGK`W6S2v+K>AM(95m2C20L|3m~rN8dprPr@t)5lsk9Hu*W z?pS990s;Ez=+Rj{x7p``4>+c0G5^pYnB1^!TL=(?HLHZ+HicG{~4F1d^5Awl_2!1jICM-!9eoLhbbT^;yHcefyTAaqRcY zmuctDopPT!%k+}x%lZRKnzykr2}}XfG_ne?nRQO~?%hkzo;@RN{P6o`&mMUWBYMTe z6i8ChtjX&gXl`nvrU>jah)2iNM%JdjqoaeaU%yVn!^70x-flljp6Q5tK}5}&X8&&G zX3fpb3E(!rH=zVI_9Gjl45w@{(ITqngWFe7@9{mX;tO25Z_8 zQHEpI+FkTU#4xu>RkN>b3Tnc3UpWzPXWm#o55GKF09j^Mh~)K7{QqbO_~(@CVq! zS<8954|P8mXN2MRs86xZ&Q4EfM@JB94b=(YGuk)s&^jiSF=t3*oNK3`rD{H`yQ?d; ztE=laAUoZx5?RC8*WKOj`%LXEkgDd>&^Q4M^z`%u0rg-It=hLCVsq!Z%^6eB-OvOT zFZ28TN&cRmgU}Elrnk43)!>Z1FCPL2K$7}gwzIc48NX}#!A1BpJP?#v5wkNprhV** z?Cpalt1oH&{r!o3eSKc&ap)iz2BTn_VV`4>9M^b3;(YY}4>#ML6{~(4mH+?%07*qo IM6N<$f(jP3KmY&$ literal 0 HcmV?d00001 diff --git a/assets/javascripts/bundle.b425cdc4.min.js b/assets/javascripts/bundle.b425cdc4.min.js new file mode 100644 index 00000000..201e5235 --- /dev/null +++ b/assets/javascripts/bundle.b425cdc4.min.js @@ -0,0 +1,29 @@ +"use strict";(()=>{var Ci=Object.create;var gr=Object.defineProperty;var Ri=Object.getOwnPropertyDescriptor;var ki=Object.getOwnPropertyNames,Ht=Object.getOwnPropertySymbols,Hi=Object.getPrototypeOf,yr=Object.prototype.hasOwnProperty,nn=Object.prototype.propertyIsEnumerable;var rn=(e,t,r)=>t in e?gr(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,P=(e,t)=>{for(var r in t||(t={}))yr.call(t,r)&&rn(e,r,t[r]);if(Ht)for(var r of Ht(t))nn.call(t,r)&&rn(e,r,t[r]);return e};var on=(e,t)=>{var r={};for(var n in e)yr.call(e,n)&&t.indexOf(n)<0&&(r[n]=e[n]);if(e!=null&&Ht)for(var n of Ht(e))t.indexOf(n)<0&&nn.call(e,n)&&(r[n]=e[n]);return r};var Pt=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var Pi=(e,t,r,n)=>{if(t&&typeof t=="object"||typeof t=="function")for(let o of ki(t))!yr.call(e,o)&&o!==r&&gr(e,o,{get:()=>t[o],enumerable:!(n=Ri(t,o))||n.enumerable});return e};var yt=(e,t,r)=>(r=e!=null?Ci(Hi(e)):{},Pi(t||!e||!e.__esModule?gr(r,"default",{value:e,enumerable:!0}):r,e));var sn=Pt((xr,an)=>{(function(e,t){typeof xr=="object"&&typeof an!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(xr,function(){"use strict";function e(r){var n=!0,o=!1,i=null,s={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function a(O){return!!(O&&O!==document&&O.nodeName!=="HTML"&&O.nodeName!=="BODY"&&"classList"in O&&"contains"in O.classList)}function f(O){var Qe=O.type,De=O.tagName;return!!(De==="INPUT"&&s[Qe]&&!O.readOnly||De==="TEXTAREA"&&!O.readOnly||O.isContentEditable)}function c(O){O.classList.contains("focus-visible")||(O.classList.add("focus-visible"),O.setAttribute("data-focus-visible-added",""))}function u(O){O.hasAttribute("data-focus-visible-added")&&(O.classList.remove("focus-visible"),O.removeAttribute("data-focus-visible-added"))}function p(O){O.metaKey||O.altKey||O.ctrlKey||(a(r.activeElement)&&c(r.activeElement),n=!0)}function m(O){n=!1}function d(O){a(O.target)&&(n||f(O.target))&&c(O.target)}function h(O){a(O.target)&&(O.target.classList.contains("focus-visible")||O.target.hasAttribute("data-focus-visible-added"))&&(o=!0,window.clearTimeout(i),i=window.setTimeout(function(){o=!1},100),u(O.target))}function v(O){document.visibilityState==="hidden"&&(o&&(n=!0),Y())}function Y(){document.addEventListener("mousemove",N),document.addEventListener("mousedown",N),document.addEventListener("mouseup",N),document.addEventListener("pointermove",N),document.addEventListener("pointerdown",N),document.addEventListener("pointerup",N),document.addEventListener("touchmove",N),document.addEventListener("touchstart",N),document.addEventListener("touchend",N)}function B(){document.removeEventListener("mousemove",N),document.removeEventListener("mousedown",N),document.removeEventListener("mouseup",N),document.removeEventListener("pointermove",N),document.removeEventListener("pointerdown",N),document.removeEventListener("pointerup",N),document.removeEventListener("touchmove",N),document.removeEventListener("touchstart",N),document.removeEventListener("touchend",N)}function N(O){O.target.nodeName&&O.target.nodeName.toLowerCase()==="html"||(n=!1,B())}document.addEventListener("keydown",p,!0),document.addEventListener("mousedown",m,!0),document.addEventListener("pointerdown",m,!0),document.addEventListener("touchstart",m,!0),document.addEventListener("visibilitychange",v,!0),Y(),r.addEventListener("focus",d,!0),r.addEventListener("blur",h,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)})});var cn=Pt(Er=>{(function(e){var t=function(){try{return!!Symbol.iterator}catch(c){return!1}},r=t(),n=function(c){var u={next:function(){var p=c.shift();return{done:p===void 0,value:p}}};return r&&(u[Symbol.iterator]=function(){return u}),u},o=function(c){return encodeURIComponent(c).replace(/%20/g,"+")},i=function(c){return decodeURIComponent(String(c).replace(/\+/g," "))},s=function(){var c=function(p){Object.defineProperty(this,"_entries",{writable:!0,value:{}});var m=typeof p;if(m!=="undefined")if(m==="string")p!==""&&this._fromString(p);else if(p instanceof c){var d=this;p.forEach(function(B,N){d.append(N,B)})}else if(p!==null&&m==="object")if(Object.prototype.toString.call(p)==="[object Array]")for(var h=0;hd[0]?1:0}),c._entries&&(c._entries={});for(var p=0;p1?i(d[1]):"")}})})(typeof global!="undefined"?global:typeof window!="undefined"?window:typeof self!="undefined"?self:Er);(function(e){var t=function(){try{var o=new e.URL("b","http://a");return o.pathname="c d",o.href==="http://a/c%20d"&&o.searchParams}catch(i){return!1}},r=function(){var o=e.URL,i=function(f,c){typeof f!="string"&&(f=String(f)),c&&typeof c!="string"&&(c=String(c));var u=document,p;if(c&&(e.location===void 0||c!==e.location.href)){c=c.toLowerCase(),u=document.implementation.createHTMLDocument(""),p=u.createElement("base"),p.href=c,u.head.appendChild(p);try{if(p.href.indexOf(c)!==0)throw new Error(p.href)}catch(O){throw new Error("URL unable to set base "+c+" due to "+O)}}var m=u.createElement("a");m.href=f,p&&(u.body.appendChild(m),m.href=m.href);var d=u.createElement("input");if(d.type="url",d.value=f,m.protocol===":"||!/:/.test(m.href)||!d.checkValidity()&&!c)throw new TypeError("Invalid URL");Object.defineProperty(this,"_anchorElement",{value:m});var h=new e.URLSearchParams(this.search),v=!0,Y=!0,B=this;["append","delete","set"].forEach(function(O){var Qe=h[O];h[O]=function(){Qe.apply(h,arguments),v&&(Y=!1,B.search=h.toString(),Y=!0)}}),Object.defineProperty(this,"searchParams",{value:h,enumerable:!0});var N=void 0;Object.defineProperty(this,"_updateSearchParams",{enumerable:!1,configurable:!1,writable:!1,value:function(){this.search!==N&&(N=this.search,Y&&(v=!1,this.searchParams._fromString(this.search),v=!0))}})},s=i.prototype,a=function(f){Object.defineProperty(s,f,{get:function(){return this._anchorElement[f]},set:function(c){this._anchorElement[f]=c},enumerable:!0})};["hash","host","hostname","port","protocol"].forEach(function(f){a(f)}),Object.defineProperty(s,"search",{get:function(){return this._anchorElement.search},set:function(f){this._anchorElement.search=f,this._updateSearchParams()},enumerable:!0}),Object.defineProperties(s,{toString:{get:function(){var f=this;return function(){return f.href}}},href:{get:function(){return this._anchorElement.href.replace(/\?$/,"")},set:function(f){this._anchorElement.href=f,this._updateSearchParams()},enumerable:!0},pathname:{get:function(){return this._anchorElement.pathname.replace(/(^\/?)/,"/")},set:function(f){this._anchorElement.pathname=f},enumerable:!0},origin:{get:function(){var f={"http:":80,"https:":443,"ftp:":21}[this._anchorElement.protocol],c=this._anchorElement.port!=f&&this._anchorElement.port!=="";return this._anchorElement.protocol+"//"+this._anchorElement.hostname+(c?":"+this._anchorElement.port:"")},enumerable:!0},password:{get:function(){return""},set:function(f){},enumerable:!0},username:{get:function(){return""},set:function(f){},enumerable:!0}}),i.createObjectURL=function(f){return o.createObjectURL.apply(o,arguments)},i.revokeObjectURL=function(f){return o.revokeObjectURL.apply(o,arguments)},e.URL=i};if(t()||r(),e.location!==void 0&&!("origin"in e.location)){var n=function(){return e.location.protocol+"//"+e.location.hostname+(e.location.port?":"+e.location.port:"")};try{Object.defineProperty(e.location,"origin",{get:n,enumerable:!0})}catch(o){setInterval(function(){e.location.origin=n()},100)}}})(typeof global!="undefined"?global:typeof window!="undefined"?window:typeof self!="undefined"?self:Er)});var qr=Pt((Mt,Nr)=>{/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */(function(t,r){typeof Mt=="object"&&typeof Nr=="object"?Nr.exports=r():typeof define=="function"&&define.amd?define([],r):typeof Mt=="object"?Mt.ClipboardJS=r():t.ClipboardJS=r()})(Mt,function(){return function(){var e={686:function(n,o,i){"use strict";i.d(o,{default:function(){return Ai}});var s=i(279),a=i.n(s),f=i(370),c=i.n(f),u=i(817),p=i.n(u);function m(j){try{return document.execCommand(j)}catch(T){return!1}}var d=function(T){var E=p()(T);return m("cut"),E},h=d;function v(j){var T=document.documentElement.getAttribute("dir")==="rtl",E=document.createElement("textarea");E.style.fontSize="12pt",E.style.border="0",E.style.padding="0",E.style.margin="0",E.style.position="absolute",E.style[T?"right":"left"]="-9999px";var H=window.pageYOffset||document.documentElement.scrollTop;return E.style.top="".concat(H,"px"),E.setAttribute("readonly",""),E.value=j,E}var Y=function(T,E){var H=v(T);E.container.appendChild(H);var I=p()(H);return m("copy"),H.remove(),I},B=function(T){var E=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},H="";return typeof T=="string"?H=Y(T,E):T instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(T==null?void 0:T.type)?H=Y(T.value,E):(H=p()(T),m("copy")),H},N=B;function O(j){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?O=function(E){return typeof E}:O=function(E){return E&&typeof Symbol=="function"&&E.constructor===Symbol&&E!==Symbol.prototype?"symbol":typeof E},O(j)}var Qe=function(){var T=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},E=T.action,H=E===void 0?"copy":E,I=T.container,q=T.target,Me=T.text;if(H!=="copy"&&H!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(q!==void 0)if(q&&O(q)==="object"&&q.nodeType===1){if(H==="copy"&&q.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if(H==="cut"&&(q.hasAttribute("readonly")||q.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(Me)return N(Me,{container:I});if(q)return H==="cut"?h(q):N(q,{container:I})},De=Qe;function $e(j){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?$e=function(E){return typeof E}:$e=function(E){return E&&typeof Symbol=="function"&&E.constructor===Symbol&&E!==Symbol.prototype?"symbol":typeof E},$e(j)}function Ei(j,T){if(!(j instanceof T))throw new TypeError("Cannot call a class as a function")}function tn(j,T){for(var E=0;E0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof I.action=="function"?I.action:this.defaultAction,this.target=typeof I.target=="function"?I.target:this.defaultTarget,this.text=typeof I.text=="function"?I.text:this.defaultText,this.container=$e(I.container)==="object"?I.container:document.body}},{key:"listenClick",value:function(I){var q=this;this.listener=c()(I,"click",function(Me){return q.onClick(Me)})}},{key:"onClick",value:function(I){var q=I.delegateTarget||I.currentTarget,Me=this.action(q)||"copy",kt=De({action:Me,container:this.container,target:this.target(q),text:this.text(q)});this.emit(kt?"success":"error",{action:Me,text:kt,trigger:q,clearSelection:function(){q&&q.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function(I){return vr("action",I)}},{key:"defaultTarget",value:function(I){var q=vr("target",I);if(q)return document.querySelector(q)}},{key:"defaultText",value:function(I){return vr("text",I)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function(I){var q=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return N(I,q)}},{key:"cut",value:function(I){return h(I)}},{key:"isSupported",value:function(){var I=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],q=typeof I=="string"?[I]:I,Me=!!document.queryCommandSupported;return q.forEach(function(kt){Me=Me&&!!document.queryCommandSupported(kt)}),Me}}]),E}(a()),Ai=Li},828:function(n){var o=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function s(a,f){for(;a&&a.nodeType!==o;){if(typeof a.matches=="function"&&a.matches(f))return a;a=a.parentNode}}n.exports=s},438:function(n,o,i){var s=i(828);function a(u,p,m,d,h){var v=c.apply(this,arguments);return u.addEventListener(m,v,h),{destroy:function(){u.removeEventListener(m,v,h)}}}function f(u,p,m,d,h){return typeof u.addEventListener=="function"?a.apply(null,arguments):typeof m=="function"?a.bind(null,document).apply(null,arguments):(typeof u=="string"&&(u=document.querySelectorAll(u)),Array.prototype.map.call(u,function(v){return a(v,p,m,d,h)}))}function c(u,p,m,d){return function(h){h.delegateTarget=s(h.target,p),h.delegateTarget&&d.call(u,h)}}n.exports=f},879:function(n,o){o.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},o.nodeList=function(i){var s=Object.prototype.toString.call(i);return i!==void 0&&(s==="[object NodeList]"||s==="[object HTMLCollection]")&&"length"in i&&(i.length===0||o.node(i[0]))},o.string=function(i){return typeof i=="string"||i instanceof String},o.fn=function(i){var s=Object.prototype.toString.call(i);return s==="[object Function]"}},370:function(n,o,i){var s=i(879),a=i(438);function f(m,d,h){if(!m&&!d&&!h)throw new Error("Missing required arguments");if(!s.string(d))throw new TypeError("Second argument must be a String");if(!s.fn(h))throw new TypeError("Third argument must be a Function");if(s.node(m))return c(m,d,h);if(s.nodeList(m))return u(m,d,h);if(s.string(m))return p(m,d,h);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function c(m,d,h){return m.addEventListener(d,h),{destroy:function(){m.removeEventListener(d,h)}}}function u(m,d,h){return Array.prototype.forEach.call(m,function(v){v.addEventListener(d,h)}),{destroy:function(){Array.prototype.forEach.call(m,function(v){v.removeEventListener(d,h)})}}}function p(m,d,h){return a(document.body,m,d,h)}n.exports=f},817:function(n){function o(i){var s;if(i.nodeName==="SELECT")i.focus(),s=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var a=i.hasAttribute("readonly");a||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),a||i.removeAttribute("readonly"),s=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var f=window.getSelection(),c=document.createRange();c.selectNodeContents(i),f.removeAllRanges(),f.addRange(c),s=f.toString()}return s}n.exports=o},279:function(n){function o(){}o.prototype={on:function(i,s,a){var f=this.e||(this.e={});return(f[i]||(f[i]=[])).push({fn:s,ctx:a}),this},once:function(i,s,a){var f=this;function c(){f.off(i,c),s.apply(a,arguments)}return c._=s,this.on(i,c,a)},emit:function(i){var s=[].slice.call(arguments,1),a=((this.e||(this.e={}))[i]||[]).slice(),f=0,c=a.length;for(f;f{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var rs=/["'&<>]/;Yo.exports=ns;function ns(e){var t=""+e,r=rs.exec(t);if(!r)return t;var n,o="",i=0,s=0;for(i=r.index;i0&&i[i.length-1])&&(c[0]===6||c[0]===2)){r=0;continue}if(c[0]===3&&(!i||c[1]>i[0]&&c[1]=e.length&&(e=void 0),{value:e&&e[n++],done:!e}}};throw new TypeError(t?"Object is not iterable.":"Symbol.iterator is not defined.")}function W(e,t){var r=typeof Symbol=="function"&&e[Symbol.iterator];if(!r)return e;var n=r.call(e),o,i=[],s;try{for(;(t===void 0||t-- >0)&&!(o=n.next()).done;)i.push(o.value)}catch(a){s={error:a}}finally{try{o&&!o.done&&(r=n.return)&&r.call(n)}finally{if(s)throw s.error}}return i}function D(e,t,r){if(r||arguments.length===2)for(var n=0,o=t.length,i;n1||a(m,d)})})}function a(m,d){try{f(n[m](d))}catch(h){p(i[0][3],h)}}function f(m){m.value instanceof et?Promise.resolve(m.value.v).then(c,u):p(i[0][2],m)}function c(m){a("next",m)}function u(m){a("throw",m)}function p(m,d){m(d),i.shift(),i.length&&a(i[0][0],i[0][1])}}function pn(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t=e[Symbol.asyncIterator],r;return t?t.call(e):(e=typeof Ee=="function"?Ee(e):e[Symbol.iterator](),r={},n("next"),n("throw"),n("return"),r[Symbol.asyncIterator]=function(){return this},r);function n(i){r[i]=e[i]&&function(s){return new Promise(function(a,f){s=e[i](s),o(a,f,s.done,s.value)})}}function o(i,s,a,f){Promise.resolve(f).then(function(c){i({value:c,done:a})},s)}}function C(e){return typeof e=="function"}function at(e){var t=function(n){Error.call(n),n.stack=new Error().stack},r=e(t);return r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r}var It=at(function(e){return function(r){e(this),this.message=r?r.length+` errors occurred during unsubscription: +`+r.map(function(n,o){return o+1+") "+n.toString()}).join(` + `):"",this.name="UnsubscriptionError",this.errors=r}});function Ve(e,t){if(e){var r=e.indexOf(t);0<=r&&e.splice(r,1)}}var Ie=function(){function e(t){this.initialTeardown=t,this.closed=!1,this._parentage=null,this._finalizers=null}return e.prototype.unsubscribe=function(){var t,r,n,o,i;if(!this.closed){this.closed=!0;var s=this._parentage;if(s)if(this._parentage=null,Array.isArray(s))try{for(var a=Ee(s),f=a.next();!f.done;f=a.next()){var c=f.value;c.remove(this)}}catch(v){t={error:v}}finally{try{f&&!f.done&&(r=a.return)&&r.call(a)}finally{if(t)throw t.error}}else s.remove(this);var u=this.initialTeardown;if(C(u))try{u()}catch(v){i=v instanceof It?v.errors:[v]}var p=this._finalizers;if(p){this._finalizers=null;try{for(var m=Ee(p),d=m.next();!d.done;d=m.next()){var h=d.value;try{ln(h)}catch(v){i=i!=null?i:[],v instanceof It?i=D(D([],W(i)),W(v.errors)):i.push(v)}}}catch(v){n={error:v}}finally{try{d&&!d.done&&(o=m.return)&&o.call(m)}finally{if(n)throw n.error}}}if(i)throw new It(i)}},e.prototype.add=function(t){var r;if(t&&t!==this)if(this.closed)ln(t);else{if(t instanceof e){if(t.closed||t._hasParent(this))return;t._addParent(this)}(this._finalizers=(r=this._finalizers)!==null&&r!==void 0?r:[]).push(t)}},e.prototype._hasParent=function(t){var r=this._parentage;return r===t||Array.isArray(r)&&r.includes(t)},e.prototype._addParent=function(t){var r=this._parentage;this._parentage=Array.isArray(r)?(r.push(t),r):r?[r,t]:t},e.prototype._removeParent=function(t){var r=this._parentage;r===t?this._parentage=null:Array.isArray(r)&&Ve(r,t)},e.prototype.remove=function(t){var r=this._finalizers;r&&Ve(r,t),t instanceof e&&t._removeParent(this)},e.EMPTY=function(){var t=new e;return t.closed=!0,t}(),e}();var Sr=Ie.EMPTY;function jt(e){return e instanceof Ie||e&&"closed"in e&&C(e.remove)&&C(e.add)&&C(e.unsubscribe)}function ln(e){C(e)?e():e.unsubscribe()}var Le={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var st={setTimeout:function(e,t){for(var r=[],n=2;n0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var n=this,o=this,i=o.hasError,s=o.isStopped,a=o.observers;return i||s?Sr:(this.currentObservers=null,a.push(r),new Ie(function(){n.currentObservers=null,Ve(a,r)}))},t.prototype._checkFinalizedStatuses=function(r){var n=this,o=n.hasError,i=n.thrownError,s=n.isStopped;o?r.error(i):s&&r.complete()},t.prototype.asObservable=function(){var r=new F;return r.source=this,r},t.create=function(r,n){return new xn(r,n)},t}(F);var xn=function(e){ie(t,e);function t(r,n){var o=e.call(this)||this;return o.destination=r,o.source=n,o}return t.prototype.next=function(r){var n,o;(o=(n=this.destination)===null||n===void 0?void 0:n.next)===null||o===void 0||o.call(n,r)},t.prototype.error=function(r){var n,o;(o=(n=this.destination)===null||n===void 0?void 0:n.error)===null||o===void 0||o.call(n,r)},t.prototype.complete=function(){var r,n;(n=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||n===void 0||n.call(r)},t.prototype._subscribe=function(r){var n,o;return(o=(n=this.source)===null||n===void 0?void 0:n.subscribe(r))!==null&&o!==void 0?o:Sr},t}(x);var Et={now:function(){return(Et.delegate||Date).now()},delegate:void 0};var wt=function(e){ie(t,e);function t(r,n,o){r===void 0&&(r=1/0),n===void 0&&(n=1/0),o===void 0&&(o=Et);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=n,i._timestampProvider=o,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=n===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,n),i}return t.prototype.next=function(r){var n=this,o=n.isStopped,i=n._buffer,s=n._infiniteTimeWindow,a=n._timestampProvider,f=n._windowTime;o||(i.push(r),!s&&i.push(a.now()+f)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var n=this._innerSubscribe(r),o=this,i=o._infiniteTimeWindow,s=o._buffer,a=s.slice(),f=0;f0?e.prototype.requestAsyncId.call(this,r,n,o):(r.actions.push(this),r._scheduled||(r._scheduled=ut.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,n,o){var i;if(o===void 0&&(o=0),o!=null?o>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,n,o);var s=r.actions;n!=null&&((i=s[s.length-1])===null||i===void 0?void 0:i.id)!==n&&(ut.cancelAnimationFrame(n),r._scheduled=void 0)},t}(Wt);var Sn=function(e){ie(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var n=this._scheduled;this._scheduled=void 0;var o=this.actions,i;r=r||o.shift();do if(i=r.execute(r.state,r.delay))break;while((r=o[0])&&r.id===n&&o.shift());if(this._active=!1,i){for(;(r=o[0])&&r.id===n&&o.shift();)r.unsubscribe();throw i}},t}(Dt);var Oe=new Sn(wn);var M=new F(function(e){return e.complete()});function Vt(e){return e&&C(e.schedule)}function Cr(e){return e[e.length-1]}function Ye(e){return C(Cr(e))?e.pop():void 0}function Te(e){return Vt(Cr(e))?e.pop():void 0}function zt(e,t){return typeof Cr(e)=="number"?e.pop():t}var pt=function(e){return e&&typeof e.length=="number"&&typeof e!="function"};function Nt(e){return C(e==null?void 0:e.then)}function qt(e){return C(e[ft])}function Kt(e){return Symbol.asyncIterator&&C(e==null?void 0:e[Symbol.asyncIterator])}function Qt(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function zi(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var Yt=zi();function Gt(e){return C(e==null?void 0:e[Yt])}function Bt(e){return un(this,arguments,function(){var r,n,o,i;return $t(this,function(s){switch(s.label){case 0:r=e.getReader(),s.label=1;case 1:s.trys.push([1,,9,10]),s.label=2;case 2:return[4,et(r.read())];case 3:return n=s.sent(),o=n.value,i=n.done,i?[4,et(void 0)]:[3,5];case 4:return[2,s.sent()];case 5:return[4,et(o)];case 6:return[4,s.sent()];case 7:return s.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function Jt(e){return C(e==null?void 0:e.getReader)}function U(e){if(e instanceof F)return e;if(e!=null){if(qt(e))return Ni(e);if(pt(e))return qi(e);if(Nt(e))return Ki(e);if(Kt(e))return On(e);if(Gt(e))return Qi(e);if(Jt(e))return Yi(e)}throw Qt(e)}function Ni(e){return new F(function(t){var r=e[ft]();if(C(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function qi(e){return new F(function(t){for(var r=0;r=2;return function(n){return n.pipe(e?A(function(o,i){return e(o,i,n)}):de,ge(1),r?He(t):Dn(function(){return new Zt}))}}function Vn(){for(var e=[],t=0;t=2,!0))}function pe(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new x}:t,n=e.resetOnError,o=n===void 0?!0:n,i=e.resetOnComplete,s=i===void 0?!0:i,a=e.resetOnRefCountZero,f=a===void 0?!0:a;return function(c){var u,p,m,d=0,h=!1,v=!1,Y=function(){p==null||p.unsubscribe(),p=void 0},B=function(){Y(),u=m=void 0,h=v=!1},N=function(){var O=u;B(),O==null||O.unsubscribe()};return y(function(O,Qe){d++,!v&&!h&&Y();var De=m=m!=null?m:r();Qe.add(function(){d--,d===0&&!v&&!h&&(p=$r(N,f))}),De.subscribe(Qe),!u&&d>0&&(u=new rt({next:function($e){return De.next($e)},error:function($e){v=!0,Y(),p=$r(B,o,$e),De.error($e)},complete:function(){h=!0,Y(),p=$r(B,s),De.complete()}}),U(O).subscribe(u))})(c)}}function $r(e,t){for(var r=[],n=2;ne.next(document)),e}function K(e,t=document){return Array.from(t.querySelectorAll(e))}function z(e,t=document){let r=ce(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function ce(e,t=document){return t.querySelector(e)||void 0}function _e(){return document.activeElement instanceof HTMLElement&&document.activeElement||void 0}function tr(e){return L(b(document.body,"focusin"),b(document.body,"focusout")).pipe(ke(1),l(()=>{let t=_e();return typeof t!="undefined"?e.contains(t):!1}),V(e===_e()),J())}function Xe(e){return{x:e.offsetLeft,y:e.offsetTop}}function Kn(e){return L(b(window,"load"),b(window,"resize")).pipe(Ce(0,Oe),l(()=>Xe(e)),V(Xe(e)))}function rr(e){return{x:e.scrollLeft,y:e.scrollTop}}function dt(e){return L(b(e,"scroll"),b(window,"resize")).pipe(Ce(0,Oe),l(()=>rr(e)),V(rr(e)))}var Yn=function(){if(typeof Map!="undefined")return Map;function e(t,r){var n=-1;return t.some(function(o,i){return o[0]===r?(n=i,!0):!1}),n}return function(){function t(){this.__entries__=[]}return Object.defineProperty(t.prototype,"size",{get:function(){return this.__entries__.length},enumerable:!0,configurable:!0}),t.prototype.get=function(r){var n=e(this.__entries__,r),o=this.__entries__[n];return o&&o[1]},t.prototype.set=function(r,n){var o=e(this.__entries__,r);~o?this.__entries__[o][1]=n:this.__entries__.push([r,n])},t.prototype.delete=function(r){var n=this.__entries__,o=e(n,r);~o&&n.splice(o,1)},t.prototype.has=function(r){return!!~e(this.__entries__,r)},t.prototype.clear=function(){this.__entries__.splice(0)},t.prototype.forEach=function(r,n){n===void 0&&(n=null);for(var o=0,i=this.__entries__;o0},e.prototype.connect_=function(){!Wr||this.connected_||(document.addEventListener("transitionend",this.onTransitionEnd_),window.addEventListener("resize",this.refresh),va?(this.mutationsObserver_=new MutationObserver(this.refresh),this.mutationsObserver_.observe(document,{attributes:!0,childList:!0,characterData:!0,subtree:!0})):(document.addEventListener("DOMSubtreeModified",this.refresh),this.mutationEventsAdded_=!0),this.connected_=!0)},e.prototype.disconnect_=function(){!Wr||!this.connected_||(document.removeEventListener("transitionend",this.onTransitionEnd_),window.removeEventListener("resize",this.refresh),this.mutationsObserver_&&this.mutationsObserver_.disconnect(),this.mutationEventsAdded_&&document.removeEventListener("DOMSubtreeModified",this.refresh),this.mutationsObserver_=null,this.mutationEventsAdded_=!1,this.connected_=!1)},e.prototype.onTransitionEnd_=function(t){var r=t.propertyName,n=r===void 0?"":r,o=ba.some(function(i){return!!~n.indexOf(i)});o&&this.refresh()},e.getInstance=function(){return this.instance_||(this.instance_=new e),this.instance_},e.instance_=null,e}(),Gn=function(e,t){for(var r=0,n=Object.keys(t);r0},e}(),Jn=typeof WeakMap!="undefined"?new WeakMap:new Yn,Xn=function(){function e(t){if(!(this instanceof e))throw new TypeError("Cannot call a class as a function.");if(!arguments.length)throw new TypeError("1 argument required, but only 0 present.");var r=ga.getInstance(),n=new La(t,r,this);Jn.set(this,n)}return e}();["observe","unobserve","disconnect"].forEach(function(e){Xn.prototype[e]=function(){var t;return(t=Jn.get(this))[e].apply(t,arguments)}});var Aa=function(){return typeof nr.ResizeObserver!="undefined"?nr.ResizeObserver:Xn}(),Zn=Aa;var eo=new x,Ca=$(()=>k(new Zn(e=>{for(let t of e)eo.next(t)}))).pipe(g(e=>L(ze,k(e)).pipe(R(()=>e.disconnect()))),X(1));function he(e){return{width:e.offsetWidth,height:e.offsetHeight}}function ye(e){return Ca.pipe(S(t=>t.observe(e)),g(t=>eo.pipe(A(({target:r})=>r===e),R(()=>t.unobserve(e)),l(()=>he(e)))),V(he(e)))}function bt(e){return{width:e.scrollWidth,height:e.scrollHeight}}function ar(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}var to=new x,Ra=$(()=>k(new IntersectionObserver(e=>{for(let t of e)to.next(t)},{threshold:0}))).pipe(g(e=>L(ze,k(e)).pipe(R(()=>e.disconnect()))),X(1));function sr(e){return Ra.pipe(S(t=>t.observe(e)),g(t=>to.pipe(A(({target:r})=>r===e),R(()=>t.unobserve(e)),l(({isIntersecting:r})=>r))))}function ro(e,t=16){return dt(e).pipe(l(({y:r})=>{let n=he(e),o=bt(e);return r>=o.height-n.height-t}),J())}var cr={drawer:z("[data-md-toggle=drawer]"),search:z("[data-md-toggle=search]")};function no(e){return cr[e].checked}function Ke(e,t){cr[e].checked!==t&&cr[e].click()}function Ue(e){let t=cr[e];return b(t,"change").pipe(l(()=>t.checked),V(t.checked))}function ka(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function Ha(){return L(b(window,"compositionstart").pipe(l(()=>!0)),b(window,"compositionend").pipe(l(()=>!1))).pipe(V(!1))}function oo(){let e=b(window,"keydown").pipe(A(t=>!(t.metaKey||t.ctrlKey)),l(t=>({mode:no("search")?"search":"global",type:t.key,claim(){t.preventDefault(),t.stopPropagation()}})),A(({mode:t,type:r})=>{if(t==="global"){let n=_e();if(typeof n!="undefined")return!ka(n,r)}return!0}),pe());return Ha().pipe(g(t=>t?M:e))}function le(){return new URL(location.href)}function ot(e){location.href=e.href}function io(){return new x}function ao(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)ao(e,r)}function _(e,t,...r){let n=document.createElement(e);if(t)for(let o of Object.keys(t))typeof t[o]!="undefined"&&(typeof t[o]!="boolean"?n.setAttribute(o,t[o]):n.setAttribute(o,""));for(let o of r)ao(n,o);return n}function fr(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function so(){return location.hash.substring(1)}function Dr(e){let t=_("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function Pa(e){return L(b(window,"hashchange"),e).pipe(l(so),V(so()),A(t=>t.length>0),X(1))}function co(e){return Pa(e).pipe(l(t=>ce(`[id="${t}"]`)),A(t=>typeof t!="undefined"))}function Vr(e){let t=matchMedia(e);return er(r=>t.addListener(()=>r(t.matches))).pipe(V(t.matches))}function fo(){let e=matchMedia("print");return L(b(window,"beforeprint").pipe(l(()=>!0)),b(window,"afterprint").pipe(l(()=>!1))).pipe(V(e.matches))}function zr(e,t){return e.pipe(g(r=>r?t():M))}function ur(e,t={credentials:"same-origin"}){return ue(fetch(`${e}`,t)).pipe(fe(()=>M),g(r=>r.status!==200?Ot(()=>new Error(r.statusText)):k(r)))}function We(e,t){return ur(e,t).pipe(g(r=>r.json()),X(1))}function uo(e,t){let r=new DOMParser;return ur(e,t).pipe(g(n=>n.text()),l(n=>r.parseFromString(n,"text/xml")),X(1))}function pr(e){let t=_("script",{src:e});return $(()=>(document.head.appendChild(t),L(b(t,"load"),b(t,"error").pipe(g(()=>Ot(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(l(()=>{}),R(()=>document.head.removeChild(t)),ge(1))))}function po(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function lo(){return L(b(window,"scroll",{passive:!0}),b(window,"resize",{passive:!0})).pipe(l(po),V(po()))}function mo(){return{width:innerWidth,height:innerHeight}}function ho(){return b(window,"resize",{passive:!0}).pipe(l(mo),V(mo()))}function bo(){return G([lo(),ho()]).pipe(l(([e,t])=>({offset:e,size:t})),X(1))}function lr(e,{viewport$:t,header$:r}){let n=t.pipe(ee("size")),o=G([n,r]).pipe(l(()=>Xe(e)));return G([r,t,o]).pipe(l(([{height:i},{offset:s,size:a},{x:f,y:c}])=>({offset:{x:s.x-f,y:s.y-c+i},size:a})))}(()=>{function e(n,o){parent.postMessage(n,o||"*")}function t(...n){return n.reduce((o,i)=>o.then(()=>new Promise(s=>{let a=document.createElement("script");a.src=i,a.onload=s,document.body.appendChild(a)})),Promise.resolve())}var r=class extends EventTarget{constructor(n){super(),this.url=n,this.m=i=>{i.source===this.w&&(this.dispatchEvent(new MessageEvent("message",{data:i.data})),this.onmessage&&this.onmessage(i))},this.e=(i,s,a,f,c)=>{if(s===`${this.url}`){let u=new ErrorEvent("error",{message:i,filename:s,lineno:a,colno:f,error:c});this.dispatchEvent(u),this.onerror&&this.onerror(u)}};let o=document.createElement("iframe");o.hidden=!0,document.body.appendChild(this.iframe=o),this.w.document.open(),this.w.document.write(` + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Development Environment

+

Pre-requisites

+

GitHub account

+

See here for creating a GitHub account. If you are not familiar with Git, this tutorial is recommended.

+

Two-factor authentication

+

Set up two-factor authentication on your account by following this guide.

+

Text editor

+

VS Code is recommended, but feel free to use a text editor of your choice.

+

Install Git

+

Before cloning your forked repository to your local machine, you must have Git installed. You can find instructions for installing Git for your operating system here.

+
+
+
+
    +
  • +

    we recommend installing Windows Subsystem for Linux (WSL). WSL provides a Linux-compatible environment that can prevent common errors during script execution.

    +
  • +
  • +

    After setting up WSL, install Git directly from the Linux terminal. This method can help avoid complications that sometimes arise when using Git Bash on Windows.

    +
  • +
  • +

    If you prefer Git Bash or encounter errors related to line endings when running scripts, the problem might be due to file conversions in Windows. To address this, configure Git as follows:

    +
    git config --system set autocrlf=false
    +
    +
    +

    Feel free to reach out in the Hack for LA Slack channel if you encounter any errors while running scripts on Windows

    +
    +
  • +
+
+
+

Please note that if you have a Mac the page offers several options (see other option, if you need to conserve hard drive space) including:

+
    +
  • an “easiest” option (this version is fine for our use): This option would take just over 4GB.
  • +
  • a “more up to date” option (not required but optional if you want it): This option prompts you to go to install an 8GB package manager called Homebrew.
  • +
  • Other option: If your computer is low on space, you can use this tutorial to install XCode Command Tools and a lighter version of Homebrew and then install Git using this command: $ brew install git which in total only uses 300MB.
  • +
+
+
+
+

Install Docker

+

Install or make sure docker and docker-compose are installed on your computer

+
docker -v
+docker-compose -v
+
+

The recommended installation method for your operating system can be found here.

+
+

Feel free to reach out in the Hack for LA Slack channel if you have trouble installing docker on your system

+
+

More on using Docker and the concepts of containerization:

+ +

Fork the repository

+

You can fork the hackforla/peopledepot repository by clicking +. A fork is a copy of the repository that will be placed on your GitHub account.

+
+

It should create a URL that looks like the following -> https://github.com/<your_GitHub_user_name>/peopledepot

+
+

For example -> https://github.com/octocat/peopledepot

+
+
+
+

What you have created is a forked copy in a remote version on GitHub. It is not on your local machine yet

+
+

Clone a copy on your computer

+

The following steps will clone (create) a local copy of the forked repository on your computer.

+
    +
  1. +

    Create a new folder in your computer that will contain hackforla projects.

    +

    In your command line interface (Terminal, Git Bash, Powershell), move to where you want your new folder to be placed and create a new folder in your computer that will contain hackforla projects. After that, navigate into the folder(directory) you just created.

    +

    For example:

    +
    cd /projects
    +mkdir hackforla
    +cd hackforla
    +
    +
  2. +
  3. +

    From the hackforla directory created in previous section:

    +
    git clone https://github.com/<your_GitHub_user_name>/peopledepot.git
    +
    +

    For example if your GitHub username was octocat:

    +
    git clone https://github.com/octocat/peopledepot.git
    +
    +
    +

    You can also clone using ssh which is more secure but requires more setup. Because of the additional setup, cloning using https as shown above is recommended

    +
    +
  4. +
+

You should now have a new folder in your hackforla folder called peopledepot. Verify this by changing into the new directory:

+
cd peopledepot
+
+

Verify and set up remote references

+

Verify that your local cloned repository is pointing to the correct origin URL (that is, the forked repo on your own GitHub account):

+
git remote -v
+
+

You should see fetch and push URLs with links to your forked repository under your account (i.e. https://github.com/<your_GitHub_user_name>/peopledepot.git). You are all set to make working changes to the project on your local machine.

+

However, we still need a way to keep our local repo up to date with the deployed project. To do so, you must add an upstream remote to incorporate changes made while you are working on your local repo. Run the following to add an upstream remote URL & update your local repo with recent changes to the hackforla version:

+
git remote add upstream https://github.com/hackforla/peopledepot.git
+git fetch upstream
+
+

After adding the upstream remote, you should now see it if you again run git remote -v :

+
origin  https://github.com/<your_GitHub_user_name>/peopledepot.git (fetch)
+origin  https://github.com/<your_GitHub_user_name>/peopledepot.git (push)
+upstream        https://github.com/hackforla/peopledepot.git (fetch)
+upstream        https://github.com/hackforla/peopledepot.git (push)
+
+

Build and run using Docker locally

+
    +
  1. +

    Make sure the Docker service is running

    +
    +
    +
    +
    sudo systemctl status docker
    +
    +

    It will show Active: active (running) if it's running.

    +
    +
    +
      +
    1. Start Docker Desktop
    2. +
    3. Run docker container ls to verify Docker Desktop is running. If it is not running you will get the message: Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
    4. +
    +
    +
    +
    +
  2. +
  3. +

    Create an .env.docker file from .env.docker-example

    +
    cp ./app/.env.docker-example ./app/.env.docker
    +
    +
  4. +
  5. +

    Build and run the project via the script (this includes running docker-compose up)

    +
    ./scripts/buildrun.sh
    +
    +
  6. +
  7. +

    Create a super user for logging into the web admin interface

    +
    docker-compose exec web python manage.py createsuperuser --no-input
    +
    +
  8. +
  9. +

    Browse to the web admin interface at http://localhost:8000/admin/ and confirm the admin site is running. Use DJANGO_SUPERUSER_USERNAME and DJANGO_SUPERUSER_PASSWORD from .env.docker for credentials.

    +
  10. +
+

See our documentation for Working with Docker for more useful Docker commands.

+

Install pre-commit

+

This will check your changes for common problems.

+

See the Pre-commit page for installation instructions.

+

For consistency, an automated bot will perform the same checks on the repository side when you open a pull request.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/documentation/index.html b/contributing/documentation/index.html new file mode 100644 index 00000000..36df6a3a --- /dev/null +++ b/contributing/documentation/index.html @@ -0,0 +1,923 @@ + + + + + + + + + + + + + + + + + + + + + + Documentation - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Documentation

+

We highly encourage contributors to add and update documentation in the same pull request as the code. This will ensure that the docs and features are synchronized.

+

Please see the MkDocs page for how to view documentation changes locally using the mkdocs in docker.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/git/index.html b/contributing/git/index.html new file mode 100644 index 00000000..8181291f --- /dev/null +++ b/contributing/git/index.html @@ -0,0 +1,991 @@ + + + + + + + + + + + + + + + + + + + + + + Working with Git - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Working with Git

+

Sync Main Changes

+

Your fork of this repository on GitHub, and your local clone of that fork, will get out of sync with the (upstream) repository as others update the repository. (That's what has happened when you see something like "This branch is 1 commit behind peopledepot:main" on your forked repository.)

+

One way to keep your fork up to date with this repository is to follow these instruction: Syncing your fork to the original repository via the browser

+

You can also update your fork via the local clone of your fork, using these instructions. Assuming you have a local clone with remotes upstream (this repo) and origin (your GitHub fork of this repo):

+
    +
  • First, you will need to create a local branch which tracks upstream/main. You will only need to do this once; you do not need to do this every time you want to incorporate upstream changes.
  • +
+

Run the following two commands:

+
git fetch upstream
+git checkout -b upstream-main --track upstream/main
+
+

If you have already created the branch upstream-main, the following commands will incorporate upstream changes:

+
git checkout upstream-main # Move to the branch you want to merge with.
+git pull  # This updates your tracking branch to match the main branch in this repository
+git checkout main  # Move back to your main branch
+git merge upstream-main  # Merge to bring your main current.
+
+

If you do all your work on topic branches (as suggested above) and keep main free of local modifications, this merge should apply cleanly.

+

Then push the merge changes to your GitHub fork:

+
git push
+
+

If you go to your online GitHub repository this should remove the message "This branch is x commit behind peopledepot:main".

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/howto/add-model-and-api-endpoints/index.html b/contributing/howto/add-model-and-api-endpoints/index.html new file mode 100644 index 00000000..2f43a964 --- /dev/null +++ b/contributing/howto/add-model-and-api-endpoints/index.html @@ -0,0 +1,2054 @@ + + + + + + + + + + + + + + + + + + + + + + Add new model and API endpoints - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Add new model and API endpoints

+

This guide aims to enable developers with little or no django experience to add django models and API endpoints to the project. Most code examples are followed by detailed explanations.

+
+The developer will have exposure to the following in this document +
    +
  • python
  • +
  • django
  • +
  • django rest framework
  • +
  • relational database through the Django ORM (object-relational mapper)
  • +
  • data types
  • +
  • object-oriented concepts (object, inheritance, composition)
  • +
  • unit testing
  • +
  • API design
  • +
  • command line
  • +
+
+

This guide assumes the developer has followed the working with issues guide and have forked and created a local branch to work on this. The development server would be already running in the background and will automatically apply the changes when we save the files.

+

We will choose the recurring_event issue as an example. Our goal is to create a database table and an API that a client can use to work with the data. The work is split into 3 testable components: the model, the admin site, and the API

+

Let's start!

+

Data model

+
+TDD test +
    +
  1. +

    Write the test

    +

    We would like the model to store these data, and to return the name property in the str function.

    +

    In app/core/tests/test_models.py

    +
    app/core/tests/test_models.py
     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    +12
    +13
    +14
    +15
    +16
    +17
    +18
    +19
    +20
    def test_recurring_event_model(project):
    +    from datetime import datetime
    +
    +    payload = {
    +        "name": "test event",
    +        "start_time": datetime(2023, 1, 1, 2, 34),
    +        "duration_in_min": 60,
    +        "video_conference_url": "https://zoom.com/mtg/1234",
    +        "additional_info": "long description",
    +        "project": project,
    +    }
    +    recurring_event = RecurringEvent(**payload)
    +    # recurring_event.save()
    +    assert recurring_event.name == payload["name"]
    +    assert recurring_event.start_time == payload["start_time"]
    +    assert recurring_event.duration_in_min == payload["duration_in_min"]
    +    assert recurring_event.video_conference_url == payload["video_conference_url"]
    +    assert recurring_event.additional_info == payload["additional_info"]
    +    assert recurring_event.project == payload["project"]
    +    assert str(recurring_event) == payload["name"]
    +
    +

    For testing many-to-many relationships, we can add

    +
    app/core/tests/test_models.py
     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    +12
    def test_project_recurring_event_relationship(project):
    +    recurring_event = RecurringEvent.objects.get(name="{Name of Recurring Event}")
    +
    +    project.recurring_events.add(recurring_event)
    +    assert project.recurring_events.count() == 1
    +    assert project.recurring_events.contains(recurring_event)
    +    assert recurring_event.projects.contains(project)
    +
    +    project.sdgs.remove(recurring_event)
    +    assert project.recurring_events.count() == 0
    +    assert not project.recurring_events.contains(recurring_event)
    +    assert not recurring_event.projects.contains(project)
    +
    +
  2. +
  3. +

    See it fail

    +
    ./scripts/test.sh
    +
    +
  4. +
  5. +

    Run it again after implementing the model to make sure the code satisfies the test

    +
  6. +
+
+

Add the model

+

Add the following to app/core/models.py

+
app/core/models.py
 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
class RecurringEvent(AbstractBaseModel):  # (1)!
+    """
+    Recurring Events
+    """
+
+    name = models.CharField(max_length=255)
+    start_time = models.TimeField("Start", null=True, blank=True)  # (2)!
+    duration_in_min = models.IntegerField(null=True, blank=True)  # (3)!
+    video_conference_url = models.URLField(blank=True)
+    additional_info = models.TextField(blank=True)  # (4)!
+
+    project = models.ForeignKey(Project, on_delete=models.CASCADE)
+    # (5)!
+    # location_id = models.ForeignKey("Location", on_delete=models.DO_NOTHING)
+    # event_type_id = models.ForeignKey("EventType", on_delete=models.DO_NOTHING)
+    # brigade_id = models.ForeignKey("Brigade", on_delete=models.DO_NOTHING)
+    # day_of_week = models.ForeignKey("DayOfWeek", on_delete=models.DO_NOTHING)
+    # must_roles = models.ManyToManyField("Role")
+    # should_roles = models.ManyToManyField("Role")
+    # could_roles = models.ManyToManyField("Role")
+    # frequency_id = models.ForeignKey("Frequency", on_delete=models.DO_NOTHING)
+
+    def __str__(self):  # (6)!
+        return f"{self.name}"
+
+
    +
  1. We inherit all models from AbstractBaseModel, which provides a uuid primary key, created_at, and updated_at timestamps. In the Github issue, these fields might be called id, created, and updated. There's no need to add those.
  2. +
  3. Most fields should not be required. Text fields should be blank=True, data fields should be null=True.
  4. +
  5. The data types in the github issue may be given in database column types such as INTEGER, VARCHAR, but we need to convert them into Django field types when defining the model.
  6. +
  7. VARCHAR can be either CharField or TextField.
      +
    1. CharField has a max_length, which makes it useful for finite length text data. We're going default to giving them max_length=255 unless there's a better value like max_length=2 for state abbreviation.
    2. +
    3. TextField doesn't have a maximum length, which makes it ideal for large text fields such as description.
    4. +
    +
  8. +
  9. Try to add the relationships to non-existent models, but comment them out. Another developer will complete them when they go to implement those models.
  10. +
  11. Always override the __str__ function to output something more meaningful than the default. It lets us do a quick test of the model by calling str([model]). It's also useful for the admin site model list view.
  12. +
+
+Updating models.py for many-to-many relationships +

For adding many-to-many relationships with additional fields, such as ended_on, we can add

+
app/core/models.py
 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
class Project(AbstractBaseModel):
+    ...
+    recurring_events = models.ManyToManyField(
+        "RecurringEvent",
+        related_name="projects",
+        blank=True,
+        through="ProjectRecurringEventXref",
+    )
+    ...
+
+
+class ProjectRecurringEventXref(AbstractBaseModel):
+    """
+    Joins a recurring event to a project
+    """
+
+    recurring_event_id = models.ForeignKey(RecurringEvent, on_delete=models.CASCADE)
+    project_id = models.ForeignKey(Project, on_delete=models.CASCADE)
+    ended_on = models.DateField("Ended on", null=True, blank=True)
+
+

For adding many-to-many relationships without additional fields, we can just add

+
app/core/models.py
1
+2
+3
+4
+5
+6
+7
+8
class Project(AbstractBaseModel):
+    ...
+    recurring_events = models.ManyToManyField(
+        "RecurringEvent",
+        related_name="projects",
+        blank=True,
+    )
+    ...
+
+

which leaves out the "through" field and the "join table" will be created implicitly.

+
+

Run migrations

+

This generates the database migration files

+
./scripts/migrate.sh
+
+
+Test +

Since we overrode the __str__ function, we need to write a test for it.

+
    +
  1. +

    Add a fixture for the model

    +

    Fixtures are reusable code that can be used in multiple tests by declaring them as parameters of the test case. In this example, we show both defining a fixture (recurring_event) and using another fixture (project).

    +

    Note: The conftest file is meant to hold shared test fixtures, among other things. The fixtures have directory scope.

    +

    Add the following to app/core/tests/conftest.py

    +
    app/core/tests/conftest.py
    1
    +2
    +3
    +4
    +5
    @pytest.fixture
    +# (1)!
    +def recurring_event(project):  # (2)!
    +    # (3)!
    +    return RecurringEvent.objects.create(name="Test Recurring Event", project=project)
    +
    +
      +
    1. We name the fixture after the model name (recurring_event).
    2. +
    3. This model makes use of the project model as a foreign key relation, so we pass in the project fixture, which creates a project model.
    4. +
    5. We create an object of the new model, passing in at least the required fields. In this case, we passed in enough arguments to use the __str__ method in a test.
    6. +
    +
  2. +
  3. +

    Add a test case

    +

    When creating Django models, there's no need to test the CRUD functionality since Django itself is well-tested and we can expect it to generate the correct CRUD functionality. Feel free to write some tests for practice. What really needs testing are any custom code that's not part of Django. Sometimes we need to override the default Django behavior and that should be tested.

    +

    Here's a basic test to see that the model stores its name.

    +

    Add the following to app/core/tests/test_models.py

    +
    app/core/tests/test_models.py
    1
    +2
    +3
    def test_recurring_event(recurring_event):  # (1)!
    +    # (2)!
    +    assert str(recurring_event) == "Test Recurring Event"  # (3)!
    +
    +
      +
    1. Pass in our fixture so that the model object is created for us.
    2. +
    3. The __str__ method should be tested since it's an override of the default Django method.
    4. +
    5. Write assertion(s) to check that what's passed into the model is what it contains. The simplest thing to check is the __str__ method.
    6. +
    +
  4. +
  5. +

    Run the test script to show it passing

    +
    ./scripts/test.sh
    +
    +
  6. +
+
+
+Check and commit +

This is a good place to pause, check, and commit progress.

+
    +
  1. +

    Run pre-commit checks

    +
    ./scripts/precommit-check.sh
    +
    +
  2. +
  3. +

    Add and commit changes

    +
    git add -A
    +git commit -m "feat: add model: recurring_event"
    +
    +
  4. +
+
+

Admin site

+

Django comes with an admin site interface that allows admin users to view and change the data in the models. It's essentially a database viewer.

+

Register the model

+

In app/core/admin.py

+
    +
  1. +

    Import the new model

    +
    app/core/admin.py
    from .models import RecurringEvent
    +
    +
  2. +
  3. +

    Register the model with the admin site

    +
    app/core/admin.py
    1
    +2
    +3
    +4
    +5
    +6
    +7
    @admin.register(RecurringEvent)  # (2)!
    +class RecurringEventAdmin(admin.ModelAdmin):  # (1)!
    +    list_display = (  # (3)!
    +        "name",
    +        "start_time",
    +        "duration_in_min",
    +    )  # (4)!
    +
    +
      +
    1. We declare a ModelAdmin class so we can customize the fields that we expose to the admin interface.
    2. +
    3. We use the register decorator to register the class with the admin site.
    4. +
    5. list_display controls what's shown in the list view
    6. +
    7. list_filter adds filter controls to declared fields (useful, but not shown in this example).
    8. +
    +
  4. +
+

View the admin site

+

Check that everything's working and there are no issues, which should be the case unless there's custom input fields creating problems.

+
    +
  1. +

    See the development setup guide section on "Build and run using Docker locally" for how to view the admin interface.

    +
  2. +
  3. +

    Example of a custom field (as opposed to the built-in ones)

    +
    # (1)!
    +time_zone = TimeZoneField(blank=True, use_pytz=False, default="America/Los_Angeles")
    +
    +
      +
    1. Having a misconfigured or buggy custom field could cause the admin site to crash and the developer will need to look at the debug message and resolve it.
    2. +
    +
  4. +
+
+Test +
    +
  1. Feel free to write tests for the admin. There's no example for it yet.
  2. +
  3. The reason there's no tests is that the admin site is independent of the API functionality, and we're mainly interested in the API part.
  4. +
  5. When the time comes that we depend on the admin interface, we will need to have tests for the needed functionalities.
  6. +
+
+
+Check and commit +

This is a good place to pause, check, and commit progress.

+
    +
  1. +

    Run pre-commit checks

    +
    ./scripts/precommit-check.sh
    +
    +
  2. +
  3. +

    Add and commit changes

    +
    git add -A
    +git commit -m "feat: register admin: recurring_event"
    +
    +
  4. +
+
+

API

+

There's several components to adding API endpoints: Model(already done), Serializer, View, and Route.

+

Add serializer

+

This is code that serializes objects into strings for the API endpoints, and deserializes strings into object when we receive data from the client.

+

In app/core/api/serializers.py

+
+Updating serializers.py for many-to-many relationships +

Following the many-to-many relationship between project and recurring event from above,

+

Update the existing serializer classes

+
app/core/api/serializers.py
 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
class ProjectSerializer(serializers.ModelSerializer):
+    """Used to retrieve project info"""
+
+    recurring_events = serializers.StringRelatedField(many=True)
+
+    class Meta:
+        model = Project
+        fields = (
+            "uuid",
+            "name",
+            "description",
+            "created_at",
+            "updated_at",
+            "completed_at",
+            "github_org_id",
+            "github_primary_repo_id",
+            "hide",
+            "google_drive_id",
+            "image_logo",
+            "image_hero",
+            "image_icon",
+            "recurring_events",
+        )
+        read_only_fields = (
+            "uuid",
+            "created_at",
+            "updated_at",
+            "completed_at",
+        )
+
+
+class RecurringEventSerializer(serializers.ModelSerializer):
+    """Used to retrieve recurring_event info"""
+
+    projects = serializers.StringRelatedField(many=True)
+
+    class Meta:
+        model = RecurringEvent
+        fields = (
+            "uuid",
+            "name",
+            "start_time",
+            "duration_in_min",
+            "video_conference_url",
+            "additional_info",
+            "project",
+            "projects",
+        )
+        read_only_fields = (
+            "uuid",
+            "created_at",
+            "updated_at",
+        )
+
+
+
    +
  1. +

    Import the new model

    +
    app/core/api/serializers.py
    from core.models import RecurringEvent
    +
    +
  2. +
  3. +

    Add a serializer class

    +
    app/core/api/serializers.py
     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    +12
    +13
    +14
    +15
    +16
    +17
    +18
    +19
    class RecurringEventSerializer(serializers.ModelSerializer):  # (1)!
    +    """Used to retrieve recurring_event info"""
    +
    +    class Meta:
    +        model = RecurringEvent  # (2)!
    +        fields = (
    +            "uuid",
    +            "name",
    +            "start_time",
    +            "duration_in_min",
    +            "video_conference_url",
    +            "additional_info",
    +            "project",
    +        )
    +        read_only_fields = (
    +            "uuid",  # (3)!
    +            "created_at",
    +            "updated_at",
    +        )
    +
    +
      +
    1. We inherit from ModelSerializer. It knows how to serialize/deserialize the Django built-in data fields so we don't have to write the code to do it.
    2. +
    3. We do need to pass in the model, the fields we want to expose through the API, and any read_only_fields.
    4. +
    5. uuid, created_at, and updated_at are managed by automations and are always read-only.
    6. +
    +
  4. +
  5. +

    Custom data fields may need extra code in the serializer

    +
    time_zone = TimeZoneSerializerField(use_pytz=False)  # (1)!
    +
    +
      +
    1. This non-built-in model field provides a serializer so we just point to it.
    2. +
    +
  6. +
  7. +

    Custom validators if we need them

    +

    We will need to write custom validators here if we want custom behavior, such as validating URL strings and limit them to the github user profile pattern using regular expression, for example.

    +
    Example here when we have one
    +
    +
  8. +
+

Add viewset

+

Viewset defines the set of API endpoints for the model.

+

In app/core/api/views.py

+
    +
  1. +

    Import the model

    +
    app/core/api/views.py
    from ..models import RecurringEvent
    +
    +
  2. +
  3. +

    Import the serializer

    +
    app/core/api/views.py
    from .serializers import RecurringEventSerializer
    +
    +
  4. +
  5. +

    Add the viewset and CRUD API endpoint descriptions

    +
    app/core/api/views.py
     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    +12
    @extend_schema_view(  # (2)!
    +    list=extend_schema(description="Return a list of all the recurring events"),
    +    create=extend_schema(description="Create a new recurring event"),
    +    retrieve=extend_schema(description="Return the details of a recurring event"),
    +    destroy=extend_schema(description="Delete a recurring event"),
    +    update=extend_schema(description="Update a recurring event"),
    +    partial_update=extend_schema(description="Patch a recurring event"),
    +)
    +class RecurringEventViewSet(viewsets.ModelViewSet):  # (1)!
    +    permission_classes = [IsAuthenticated]  # (4)!
    +    queryset = RecurringEvent.objects.all()  # (3)!
    +    serializer_class = RecurringEventSerializer
    +
    +
      +
    1. We inherit from ModelViewSet, which provides a default view implementation of all 6 CRUD actions: create, retrieve, partial_update, update, destroy, list.
    2. +
    3. We use the extend_schema_view decorator to attach the API doc strings to the viewset. They are usually defined as docstrings of the corresponding function definitions inside the viewset. Since we use ModelViewSet, there's nowhere to put the docstrings but above the viewset.
    4. +
    5. The minimum code we need with ModelViewSet are the queryset, and the serializer_class.
    6. +
    7. Permissions
        +
      1. For now use permission_classes = [IsAuthenticated]
      2. +
      3. It doesn't control permissions the way we want, but we will fix it later.
      4. +
      +
    8. +
    +
  6. +
+
+Extended example: Query Params +

This example shows how to add a filter params. It's done for the user model as a requirement from VRMS.

+
    +
  1. +

    Here's a more complex API doc example (this example is using the User model's ViewSet)

    +
    app/core/api/views.py
     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    +12
    +13
    +14
    +15
    +16
    +17
    +18
    +19
    +20
    +21
    +22
    +23
    +24
    +25
    +26
    +27
    +28
    +29
    +30
    +31
    +32
    +33
    +34
    +35
    +36
    +37
    +38
    +39
    +40
    +41
    +42
    @extend_schema_view(
    +    list=extend_schema(  # (2)!
    +        summary="Users List",
    +        description="Return a list of all the existing users",
    +        parameters=[
    +            OpenApiParameter(
    +                name="email",
    +                type=str,
    +                description="Filter by email address",
    +                examples=[
    +                    OpenApiExample(
    +                        "Example 1",
    +                        summary="Demo email",
    +                        description="get the demo user",
    +                        value="demo-email@email.com,",
    +                    ),
    +                ],
    +            ),
    +            OpenApiParameter(
    +                name="username",
    +                type=OpenApiTypes.STR,
    +                location=OpenApiParameter.QUERY,
    +                description="Filter by username",
    +                examples=[
    +                    OpenApiExample(
    +                        "Example 1",
    +                        summary="Demo username",
    +                        description="get the demo user",
    +                        value="demo-user",
    +                    ),
    +                ],
    +            ),
    +        ],
    +    ),
    +    create=extend_schema(description="Create a new user"),  # (1)!
    +    retrieve=extend_schema(description="Return the given user"),
    +    destroy=extend_schema(description="Delete the given user"),
    +    update=extend_schema(description="Update the given user"),
    +    partial_update=extend_schema(description="Partially update the given user"),
    +)
    +class UserViewSet(viewsets.ModelViewSet):
    +    pass
    +
    +
      +
    1. Define strings for all 6 actions: create, retrieve, partial_update, update, destroy, list.
    2. +
    3. This one is fancy and provides examples of data to pass into the query params. It's probably more than we need right now.
        +
      1. The examples array can hold multiple examples.
          +
        1. Example ID string has to be unique but is not displayed.
        2. +
        3. summary string appears as an option in the dropdown.
        4. +
        5. description is displayed in the example.
        6. +
        +
      2. +
      +
    4. +
    +
  2. +
  3. +

    Add any query params according to the requirements (this example is using the User model's ViewSet)

    +
    app/core/api/views.py
     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    +12
    +13
    +14
    +15
    class UserViewSet(viewsets.ModelViewSet):
    +    ...
    +
    +    def get_queryset(self):  # (1)!
    +        """
    +        Optionally filter users by an 'email' and/or 'username' query paramerter in the URL
    +        """
    +        queryset = get_user_model().objects.all()  # (2)!
    +        email = self.request.query_params.get("email")
    +        if email is not None:
    +            queryset = queryset.filter(email=email)
    +        username = self.request.query_params.get("username")
    +        if username is not None:
    +            queryset = queryset.filter(username=username)
    +        return queryset
    +
    +
      +
    1. +

      Notice the queryset property is now the get_queryset(() function which returns the queryset.

      +

      The get_queryset() function overrides the default and lets us filter the objects returned to the client if they pass in a query param.

      +
    2. +
    3. +

      Start with all the model objects and filter them based on any available query params.

      +
    4. +
    +
  4. +
+
+

Register API endpoints

+

In app/core/api/urls.py

+
    +
  1. +

    Import the viewset.

    +
    app/core/api/urls.py
    from .views import RecurringEventViewSet
    +
    +
  2. +
  3. +

    Register the viewset to the router

    +
    app/core/api/urls.py
    1
    +2
    router.register(r"recurring-events", RecurringEventViewSet, basename="recurring-event")
    +# (1)!
    +
    +
      +
    1. Params
        +
      1. First param is the URL prefix use in the API routes. It is, by convention, plural
          +
        • This would show up in the URL like this: http://localhost:8000/api/v1/recuring-events/ and http://localhost:8000/api/v1/recuring-events/<uuid>
        • +
        +
      2. +
      3. Second param is the viewset class which defines the API actions
      4. +
      5. basename is the name used for generating the endpoint names, such as -list, -detail, etc. It's in the singular form. This is automatically generated if the viewset definition contains a queryset attribute, but it's required if the viewset overrides that with the get_queryset function
          +
        • reverse("recurring-event-list") would return http://localhost:8000/api/v1/recuring-events/
        • +
        +
      6. +
      +
    2. +
    +
  4. +
+
+Test +

For the CRUD operations, since we're using ModelViewSet where all the actions are provided by rest_framework and well-tested, it's not necessary to have test cases for them. But here's an example of one.

+

In app/core/tests/test_api.py

+
    +
  1. +

    Import API URL

    +
    app/core/tests/test_api.py
    RECURRING_EVENTS_URL = reverse("recurring-event-list")
    +
    +
  2. +
  3. +

    Add test case

    +
    app/core/tests/test_api.py
     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    +12
    +13
    +14
    def test_create_recurring_event(auth_client, project):
    +    """Test that we can create a recurring event"""
    +
    +    payload = {
    +        "name": "Test Weekly team meeting",
    +        "start_time": "18:00:00",
    +        "duration_in_min": 60,
    +        "video_conference_url": "https://zoom.com/link",
    +        "additional_info": "Test description",
    +        "project": project.uuid,
    +    }
    +    res = auth_client.post(RECURRING_EVENTS_URL, payload)
    +    assert res.status_code == status.HTTP_201_CREATED
    +    assert res.data["name"] == payload["name"]
    +
    +
      +
    1. Given
        +
      1. Pass in the necessary fixtures
      2. +
      3. Construct the payload
      4. +
      +
    2. +
    3. When
        +
      1. Create the object
      2. +
      +
    4. +
    5. Then
        +
      1. Check that it's created via status code
      2. +
      3. Maybe also check the data. A real test should check all the data, but we're kind of relying on django to have already tested this.
      4. +
      +
    6. +
    +
  4. +
  5. +

    Run the test script to show it passing

    +
    ./scripts/test.sh
    +
    +
  6. +
+
+
+Test many-to-many relationships +

In app/core/tests/test_api.py

+
    +
  1. +

    Import API URL

    +
    app/core/tests/test_api.py
    PROJECTS_URL = reverse("project-list")
    +
    +
  2. +
  3. +

    Add test case (following the example above)

    +
    app/core/tests/test_api.py
     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    +12
    +13
    +14
    +15
    +16
    +17
    +18
    +19
    def test_project_sdg_xref(auth_client, project, recurring_event):
    +    def get_object(objects, target_uuid):
    +        for obj in objects:
    +            if str(obj["uuid"]) == str(target_uuid):
    +                return obj
    +        return None
    +
    +    project.recurring_events.add(recurring_event)
    +    proj_res = auth_client.get(PROJECTS_URL)
    +    test_proj = get_object(proj_res.data, project.uuid)
    +    assert test_proj is not None
    +    assert len(test_proj["recurring_events"]) == 1
    +    assert recurring_event.name in test_proj["recurring_events"]
    +
    +    recurring_event_res = auth_client.get(RECURRING_EVENT_URL)
    +    test_recurring_event = get_object(recurring_event_res.data, recurring_event.uuid)
    +    assert test_recurring_event is not None
    +    assert len(test_recurring_event["projects"]) == 1
    +    assert project.name in test_recurring_event["projects"]
    +
    +
  4. +
  5. +

    Run the test script to show it passing

    +
    ./scripts/test.sh
    +
    +
  6. +
+
+
+Check and commit +

This is a good place to pause, check, and commit progress.

+
    +
  1. +

    Run pre-commit checks

    +
    ./scripts/precommit-check.sh
    +
    +
  2. +
  3. +

    Add and commit changes

    +
    git add -A
    +git commit -m "feat: add endpoints: recurring_event"
    +
    +
  4. +
+
+
+Push the code and start a PR +

Refer to the Issues page section on "Push to upstream origin" onward.

+
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/howto/create-initial-data-migrations/index.html b/contributing/howto/create-initial-data-migrations/index.html new file mode 100644 index 00000000..18468544 --- /dev/null +++ b/contributing/howto/create-initial-data-migrations/index.html @@ -0,0 +1,1167 @@ + + + + + + + + + + + + + + + + + + + + + + Create initial data scripts - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Create initial data scripts

+

Overview

+

The goal is to convert our initial data into scripts that can be loaded into the database when the backend is set up for the first time.

+

These are the steps:

+
    +
  1. Export the data into a csv file
  2. +
  3. Generate a python script from the csv data
  4. +
+

Prerequisites

+

You must have Docker installed

+

The initial data exists in a Google spreadsheet, such as this one for People Depot. There should be individual sheets named after the model names the data correspond to, such as ProgramArea - Data. The sheet name is useful for us to identify the model it corresponds to.

+

The sheet should be formatted like so:

+
    +
  • the first row contains the names of the field names in the model. The names must be exactly the same
  • +
  • rows 2 to n are the initial data for the model we want to turn into a script.
  • +
+

It is required that there be data in the first column of the sheet.

+

Gather data for preparation

+
    +
  1. +

    Export the data from the Google spreadsheet

    +
      +
    1. Find the sheet in the document containing the data to export. Let's use the ProgramArea - Data data as our example.
    2. +
    3. Go to File -> Download -> Comma Separated Values (.csv). This will download the sheet as a .csv file.
    4. +
    5. Copy the file to the app/core/initial_data directory.
    6. +
    +
  2. +
+

Convert data into Python script

+
    +
  1. +

    Start Docker

    +
  2. +
  3. +

    From project root, run

    +
    ./scripts/buildrun.sh
    +
    +
  4. +
  5. +

    Go to the project root and run this command

    +
    docker-compose exec web python scripts/convert.py "core/initial_data/PD_ Table and field explanations  - ProgramArea - Data.csv"
    +
    +
  6. +
  7. +

    Check that there's a new file called app/core/scripts/programarea_seed.py and that it looks correct

    +
      +
    1. +

      You can run it to verify, but will need to remove that data if you care about restoring the database state

      +
    2. +
    3. +

      Run this command to run the script

      +
    4. +
    +
    docker-compose exec web python manage.py runscript programarea_seed
    +
    +
      +
    1. To remove the data, go into the database and delete all rows from core_programarea
    2. +
    +
    docker-compose exec web python manage.py dbshell
    +
    +# now we have a shell to the db
    +
    +# see if all the seed data got inserted
    +select count(*) from core_programarea;
    +# shows 9 rows
    +
    +delete from core_programarea;
    +# DELETE 9
    +
    +select count(*) from core_programarea;
    +# shows 0 rows
    +
    +# ctrl-d to exit dbshell
    +
    +
  8. +
+

Combine Script in Migration

+
    +
  • +

    Look for name of the last migration file in core/data/migrations directory

    +
  • +
  • +

    Create a script in the same directory named <number>_<modelname_in_lower_case>_seed.py with the following contents and + replace <modelname_in_lower_case>, ModelNameInPascalCase, and <name of last script> with appropriate values:

    +
    from django.db import migrations
    +
    +from core.models import ModelNameInPascalCase
    +
    +
    +def forward(__code__, __reverse_code__):
    +    # paste everything in seed script's run function here
    +    # remove pass below
    +    pass
    +
    +
    +def reverse(__code__, __reverse_code__):
    +    ModelNameInPascalCase.objects.all().delete()
    +
    +
    +class Migration(migrations.Migration):
    +    dependencies = [("data", "<name of last script, or contents of max_migration.txt>")]
    +
    +    operations = [migrations.RunPython(forward, reverse)]
    +
    +

    For example:

    +
    from django.db import migrations
    +
    +from core.models import BookType
    +
    +
    +def forward(__code__, __reverse_code__):
    +    items = [
    +        (1, "Hard Cover"),
    +        (2, "Soft Cover"),
    +    ]
    +    for uuid, name in items:
    +        BookType.objects.create(uuid=uuid, name=name)
    +
    +
    +def reverse(__code__, __reverse_code__):
    +    BookType.objects.all().delete()
    +
    +
    +class Migration(migrations.Migration):
    +    dependencies = [("data", "0011_author_seed")]
    +
    +    operations = [migrations.RunPython(forward, reverse)]
    +
    +
  • +
+

In this example 011_author_seed is the name of the last migration file in core/data/migrations. You will also need to update this to the last python file in core/data/migrations having the format xxxx_<modename_in_lower_case>_seed.py.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/howto/index.html b/contributing/howto/index.html new file mode 100644 index 00000000..5d3040c9 --- /dev/null +++ b/contributing/howto/index.html @@ -0,0 +1,917 @@ + + + + + + + + + + + + + + + + + + + + + + How-to Guides - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+ +
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/howto/run-local/index.html b/contributing/howto/run-local/index.html new file mode 100644 index 00000000..94a2ccb3 --- /dev/null +++ b/contributing/howto/run-local/index.html @@ -0,0 +1,958 @@ + + + + + + + + + + + + + + + + + + + + + + Run backend in venv - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Run backend in venv

+

If you have a requirement to run on your local machine or you are unable to get it to work on +Docker, do the following steps. WARNING: If you run into issues you will get limited support.

+

Run these commands from the app directory:

+
    +
  1. Copy .env.docker-example to .env.local
  2. +
  3. Inspect .env.local and change values as appropriate. The file includes instructions on how to use local postgres and sqlite for the database. sqlite has no set up. It uses a file db.sqlite3. If it is not there, it automatically creates it.
  4. +
  5. Mac only: If you have a Mac, the python command may not be found and scripts will fail. Try to run python using the "python" command from the terminal. If you get an error that the python command is + not found, type: alias python="python3"
  6. +
  7. Run these commands from the terminal in the project root.
  8. +
+
cd app
+
+# copy the env file
+cp .env.docker-example .env.local
+
+# create a virtual environment
+python -m venv venv
+
+# activate (enter) the virtual environment
+source venv/bin/activate
+# install dependencies
+pip install -r requirements.txt
+
+# start local server
+../scripts/start-local.sh
+# start server with alternate port
+# DJANGO_PORT=8001 ../scripts/start-local.sh
+
+# browse to http://localhost:8000 (or 8001) to see the app
+
+# Ctrl-C to stop the server
+
+# deactivate (exit) the virtual environment
+# to return to the system global environment
+deactivate
+
+

TIP: Look up direnv for a useful method to automatically enter and exit virtual environments based on the current directory.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/index.html b/contributing/index.html new file mode 100644 index 00000000..71a7c7b3 --- /dev/null +++ b/contributing/index.html @@ -0,0 +1,941 @@ + + + + + + + + + + + + + + + + + + + + + + Contributing - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Contributing

+

Thank you for volunteering your time! The following is a set of guidelines for contributing to the PeopleDepot repository, which is hosted on GitHub.

+

Please make sure you have completed the onboarding process which includes joining the Hack for LA Slack, GitHub, and Google Drive. If you have not been onboarded, see the Getting Started Page. Workshop attendees are granted a temporary exception from this requirement.

+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/issues/index.html b/contributing/issues/index.html new file mode 100644 index 00000000..d754b437 --- /dev/null +++ b/contributing/issues/index.html @@ -0,0 +1,1223 @@ + + + + + + + + + + + + + + + + + + + + + + Working with Issues - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Working with Issues

+

Find an issue

+

Find an issue in Prioritized Backlog here

+

If you joined the PeopleDepot repository as described in a previous section:

+
    +
  1. Assign the issue to yourself and move it to "In progress" column.
  2. +
  3. Follow the steps in the issue description to complete the issue.
  4. +
  5. Make sure to comment your ETA and Availability when you first assign yourself.
  6. +
+

If you don't have privileges, add a comment that you are working on the issue.

+

Create a new branch

+

Once you have selected an issue to work on, create a branch for that issue.

+

Verify you are on the main branch.

+
git branch
+
+

You will see a list of all of your branches. There will be a star (*) next to the branch that you are currently in. By default you should start on the main branch.

+

If you are not currently in the main branch, run the following command to return to it:

+
git checkout main
+
+
git pull origin main
+
+

This ensures you have the most recent code, which is important if you previously cloned and it has been more than a day.

+

Create a new branch where you will work on the issue. The branch name should include the issue number. For example, to create a new branch for issue 15 and change into it:

+
git checkout -b <new-branch-name>-15
+
+

Make changes

+

Make changes to fix the issue.

+

Pull to get the most recent code

+

You can probably skip this if you fix the issue on the same day that you pulled the code.

+
git pull
+
+
+

If you are using Visual studios code you can use the Git graphical user interface to stage your changes. For instructions check out the Git GUI page in the website Wiki

+
+

Add changed files to staging

+

Make sure you are on your issue branch (instead of main)

+
git branch
+
+

You must add your files to the staging area before you can commit (save them to git).

+

Run this command if you want to add changes from a specific file to your commit record:

+
git add “filename.ext”
+
+

Run this command if you want to add all changes to all file(s) to your commit record:

+
git add .
+
+

Check Git status

+

This command will list the files that have been staged with green text. These are the files that will be committed (saved) when you run the next command, git commit. Please be sure all your staged changes are relevant to the issue you are working on. If you accidentally included unrelated changes, please unstage them before making this commit, and then make a new commit for the unrelated changes. (The commands for unstaging commits are provided in the output of your git status command.)

+
git status
+
+

Remove files that you don't want staged

+

This command will unstage a file that you don't want included in the commit. The specified file will not be committed (saved) when you run the next command, git commit. This only works if the wrong files were added, but they were not yet committed. (See this tutorial for an in-depth discussion.) The file will be removed from the staging area, but not actually deleted:

+
git reset HEAD “filename.ext”
+
+

Commit staged changes

+

This command saves your work, and prepares it to push to your repository. Use the -m flag to quickly add a message to your commit. Your message should be a short description of the changes you made. It will be extremely helpful if other people can understand your message, so try to resist the temptation to be overly cryptic.

+

To commit your changes with a message, run:

+
git commit -m “insert message here”
+
+

Ensure that your local repository is up-to-date with the main site:

+
git pull upstream
+
+

You can also sync your fork directly on GitHub by clicking "Sync Fork" at the right of the screen and then clicking "Update Branch"

+

Push to upstream origin (aka, your fork)

+

Push your local branch to your remote repository:

+
git push --set-upstream origin <your-branch-name>
+
+

Alternatively, you can run

+
git push
+
+

Create a pull request

+

Push all changes in your issue branch

+

Once you are satisfied with your changes, push them to the feature branch you made within your remote repository.

+
git push --set-upstream origin <name-of-branch>
+
+

Complete pull request from GitHub

+
    +
  1. Click the green button to create a Pull Request (PR)
  2. +
  3. Add a short title in the subject line
  4. +
  5. In the body of the comment, add the following, replacing <issue-number> with the issue you worked on:
  6. +
+
fixes #<issue-number>
+
+
    +
  1. Below this, add a brief description of the changes you made
  2. +
  3. Click the green "Create pull request" button
  4. +
  5. Add the PR to the project board
  6. +
+

Creating Issues

+

To create a new issue, please use the blank issue template (available when you click New Issue). If you want to create an issue for other projects to use, please create the issue in your own repository and send a slack message to one of your hack night hosts with the link.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/team/index.html b/contributing/team/index.html new file mode 100644 index 00000000..41cf22c2 --- /dev/null +++ b/contributing/team/index.html @@ -0,0 +1,927 @@ + + + + + + + + + + + + + + + + + + + + + + Joining Repository Team - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Joining Repository Team

+

This step is optional if this is your first time fixing an issue and you want to try fixing an issue without this step.

+

In the People-depot Slack channel, send an introductory message with your GitHub handle/username asking to be added to the Hack for LA peopledepot GitHub repository, have access to the Google Docs Drive, and Figma.

+
+

Please do the following once you have accepted the GitHub invite (comes via email or in your GitHub notifications)

+

Make your own Hack for LA GitHub organization membership public by following this guide.

+
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/tools/docker/index.html b/contributing/tools/docker/index.html new file mode 100644 index 00000000..7973bb73 --- /dev/null +++ b/contributing/tools/docker/index.html @@ -0,0 +1,1196 @@ + + + + + + + + + + + + + + + + + + + + + + Docker - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Docker

+

Working with Docker

+

Stopping Docker

+

To stop the service-container, but not destroy it (often sufficient for day-to-day work):

+
docker-compose stop
+
+

To stop and destroy the service container:

+
docker-compose down
+
+

Add the -v flag to destroy the data volumes as well:

+
docker-compose down -v
+
+

Recycling / Refreshing Database

+

To restore a database to its original state and remove any data manually added, delete the container and image. +From Docker:

+
+
+
+
docker-compose down -v
+
+
+
+
    +
  1. Open Containers section
  2. +
  3. Delete people-db-1 container
  4. +
  5. Open Images Tab
  6. +
  7. Remove djangorestapipostrgresql image
  8. +
+
+
+
+

Cache mount

+

This helps speed up subsequent docker builds by caching intermediate files and reusing them across builds. It's available with docker buildkit. The key here is to disable anything that could delete the cache, because we want to preserve it. The cache mount is not going to end up in the docker image being built, so there's no concern about disk space usage.

+

Put this flag between RUN and the command

+
RUN \
+--mount=type=cache,target=/root/.cache
+  pip install -r requirements.txt
+
+

For pip, the files are by default stored in /root/.cache/pip. Pip caching docs

+

For apk, the cache directory is /var/cache/apk/. APK wiki on local cache

+

For apt, the cache directory is /var/cache/apt/.

+
+References + +
+

Alpine vs Debian based images

+

We're choosing to use an Alpine-based image for the smaller size and faster builds and downloads. However, a Debian-based image has the advantage of a large ecosystem of available packages, a limitation of Alpine that we may run up against in the future.

+

Switching to Debian

+

Here is how we can switch to a Debian-based images if we need to:

+
    +
  1. +

    Edit Dockerfile to look something like this

    +
    app/Dockerfile
    # pull official base image
    +FROM python:3.10-alpine
    +# (1)! define base image
    +FROM python:3.10-bullseye
    +
    +# set work directory
    +WORKDIR /usr/src/app
    +
    +# set environment variables
    +ENV PYTHONDONTWRITEBYTECODE=1
    +ENV PYTHONUNBUFFERED=1
    +ENV PYTHONPYCACHEPREFIX=/root/.cache/pycache/
    +ENV PIP_CACHE_DIR=/var/cache/buildkit/pip
    +
    +RUN mkdir -p $PIP_CACHE_DIR
    +# (2)! prevent cache deletion
    +RUN rm -f /etc/apt/apt.conf.d/docker-clean; \ 
    +echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
    +
    +# install system dependencies
    +RUN \
    +  --mount=type=cache,target=/var/cache/apk \ 
    +  --mount=type=cache,target=/etc/apk/cache \ 
    +  apk add \czjqqkd:19
    +  'graphviz=~9.0'
    +
    +# install font for graphviz
    +COPY Roboto-Regular.ttf /root/.fonts/
    +RUN fc-cache -f
    +# (3)! define cache mounts and install dependencies
    +  --mount=type=cache,target=/var/cache/apt,sharing=locked \ 
    +  --mount=type=cache,target=/var/lib/apt,sharing=locked \ 
    +  apt-get update \ 
    +  && apt-get install --no-install-recommends -yqq \ 
    +  netcat=1.10-46 \ 
    +  gcc=4:10.2.1-1 \ 
    +  postgresql=13+225+deb11u1 \ 
    +  graphviz=2.42.2-5
    +
    +# install dependencies
    +COPY ./requirements.txt .
    +# hadolint ignore=DL3042
    +# (4)! install uv for faster dependency resolution
    +RUN \
    +  --mount=type=cache,target=/root/.cache \
    +  pip install uv==0.1.15 \
    +  && uv pip install --system -r requirements.txt
    +
    +# copy entrypoint.sh
    +COPY ./entrypoint.sh .
    +RUN sed -i 's/\r$//g' /usr/src/app/entrypoint.sh \
    +  && chmod +x /usr/src/app/entrypoint.sh
    +
    +# copy project
    +COPY . .
    +
    +# run entrypoint.sh
    +ENTRYPOINT ["/usr/src/app/entrypoint.sh"]
    +
    +
      +
    1. define base image
    2. +
    3. prevent cache deletion
    4. +
    5. install system dependencies
        +
      1. define cache mounts for apt and lib
      2. +
      3. install netcat for db wait script, which is used in entrypoint.sh
      4. +
      5. install gcc for python local compiling, which shouldn't be needed
      6. +
      7. install postgresql for dbshell management command
      8. +
      9. install graphviz for generating ERD in erd.sh
      10. +
      +
    6. +
    7. install uv for faster dependency resolution, which may or may not be wanted
    8. +
    +
  2. +
  3. +

    Use the dive tool to check the image layers for extra files that shouldn't be there.

    +
  4. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/tools/index.html b/contributing/tools/index.html new file mode 100644 index 00000000..a8f277c7 --- /dev/null +++ b/contributing/tools/index.html @@ -0,0 +1,919 @@ + + + + + + + + + + + + + + + + + + + + + + Tools - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+ +
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/tools/mkdocs/index.html b/contributing/tools/mkdocs/index.html new file mode 100644 index 00000000..b690fe74 --- /dev/null +++ b/contributing/tools/mkdocs/index.html @@ -0,0 +1,1273 @@ + + + + + + + + + + + + + + + + + + + + + + MkDocs - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

MkDocs

+

We are using MkDocs to generate our documentation. See Docker-mkdocs repo for information about MkDocs and the image we're using.

+

Work on docs locally

+
+

The first time starting the container may take longer due to downloading the ~40MB docker image

+
+
    +
  1. +

    Run the mkdocs container.

    +
    docker-compose up mkdocs # (1)!
    +
    +
      +
    1. Optionally use the -d flag to run the container in the background
    2. +
    +
  2. +
  3. +

    Open a browser to http://localhost:8005/ to view the documentation locally.

    +
  4. +
  5. +

    Modify the files in the docs directory. The site will auto-update when the files are saved.

    +
  6. +
  7. +

    Ctrl+C to quit the local server and stop the container

    +
  8. +
+

Auto-generated docs

+

We have a GitHub Action set up to generate and host the documentation on a GitHub Pages site

+

MkDocs syntax

+

We're using Material for MkDocs. Aside from standard markdown syntax, there are some MkDocs and Material-specific syntax which can help more effective documentation. See the Material reference docs for the complete set of syntax.

+

Here's a list of commonly used MkDocs syntax for quick reference.

+

Code Blocks

+
+
+
+
Code Block
@admin.register(RecurringEvent)
+class RecurringEventAdmin(admin.ModelAdmin):
+    list_display = (
+        "name",
+        "start_time",
+        "duration_in_min",
+    )
+
+
Numbered Lines
1
+2
+3
+4
+5
+6
+7
@admin.register(RecurringEvent)
+class RecurringEventAdmin(admin.ModelAdmin):
+    list_display = (
+        "name",
+        "start_time",
+        "duration_in_min",
+    )
+
+
Highlighted Lines
@admin.register(RecurringEvent)
+class RecurringEventAdmin(admin.ModelAdmin):
+    list_display = (
+        "name",
+        "start_time",
+        "duration_in_min",
+    )
+
+
+
+
```python title="Code Block"
+@admin.register(RecurringEvent)
+class RecurringEventAdmin(admin.ModelAdmin):
+    list_display = (
+        "name",
+        "start_time",
+        "duration_in_min",
+    )
+```
+
+```python title="Numbered Lines" linenums="1"
+@admin.register(RecurringEvent)
+class RecurringEventAdmin(admin.ModelAdmin):
+    list_display = (
+        "name",
+        "start_time",
+        "duration_in_min",
+    )
+```
+
+```python title="Highlighted Lines" hl_lines="1 3 5"
+@admin.register(RecurringEvent)
+class RecurringEventAdmin(admin.ModelAdmin):
+    list_display = (
+        "name",
+        "start_time",
+        "duration_in_min",
+    )
+```
+
+
+
+
+

Code Annotations

+
+
+
+
Click the plus sign --> # (1)!
+
+
    +
  1. This is an explanation text
  2. +
+
+
+
``` bash
+Click the plus sign --> # (1)!
+```
+
+1. This is an explanation text
+
+
+
+
+

Text blocks

+
+
+
+
+

Simple Block

+
+
+

Example

+

Content Block Text

+
+
+Expandable Block +

Content

+
+
+Opened Expandable Block +

Content

+
+
+
+
!!! example "Simple Block"
+
+!!! example
+    Content Block Text
+
+??? example "Expandable Block"
+    Content
+
+???+ example "Opened Expandable Block"
+    Content
+
+
+
+
+

Tabbed content

+
+
+
+
+
+
+

linux-specific content

+
+
+

mac-specific content

+
+
+
+
+
+
=== "Linux"
+
+    linux-specific content
+
+=== "Mac"
+
+    mac-specific content
+
+
+
+
+

Buttons

+
+
+
+
    +
  1. Ctrl+C to quit the local server and stop the container
  2. +
+
+
+
1. ++ctrl+c++ to quit the local server and stop the container
+
+
+
+
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/tools/pre-commit/index.html b/contributing/tools/pre-commit/index.html new file mode 100644 index 00000000..9183f321 --- /dev/null +++ b/contributing/tools/pre-commit/index.html @@ -0,0 +1,1043 @@ + + + + + + + + + + + + + + + + + + + + + + Pre-commit - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Pre-commit

+

The hooks will run when doing normal git commit and git push commands. It's recommended to do this in the command line to see the output. If performing these actions from a gui application, the interface may seem to hang for some time.

+

The pre-commit checks should be fast while the pre-push hooks will take longer since they'll do a full rebuild

+

Installation

+

It's recommended to install "global" tools via pipx, which installs packages in an isolated environment rather than the global python environment.

+
    +
  1. +

    Install pipx

    +
  2. +
  3. +

    Install pre-commit

    +
    pipx install pre-commit
    +
    +
  4. +
  5. +

    Add the hook to git

    +
    pre-commit install
    +
    +

    Pre-commit is now set up to check your files whenever you commit or push code.

    +
  6. +
  7. +

    Test by adding an empty commit

    +
    git commit --allow-empty -m "Test"
    +
    +

    You should see a list of tests that are all skipped, because there's no changes in the commit to test.

    +
  8. +
+

Extra information

+
    +
  • +

    To skip the checks temporarily, you can do one of these

    +
    # skip all the hooks
    +git commit --no-verify
    +
    +# skip specific hooks
    +SKIP=black,flake8 git commit
    +
    +
  • +
  • +

    Manually run the hooks (this runs it against all files rather than only changed files)

    +
    pre-commit run --all-files
    +
    +
  • +
  • +

    More commands to run the hooks

    +
    # run the hooks for the push staga
    +pre-commit run --all-files --hook-stage push
    +
    +# run the hooks for the commit stage
    +pre-commit run --all-files --hook-stage commit
    +
    +# run the hooks for
    +pre-commit run test --all-files --hook-stage push
    +
    +
  • +
  • +

    Update pre-commit and plugins to the latest version

    +
    pre-commit autoupdate
    +
    +
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/tools/scripts/index.html b/contributing/tools/scripts/index.html new file mode 100644 index 00000000..c7eab806 --- /dev/null +++ b/contributing/tools/scripts/index.html @@ -0,0 +1,1010 @@ + + + + + + + + + + + + + + + + + + + + + + Convenience Scripts - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Convenience Scripts

+

These are designed to make it easier to perform various everyday tasks in the project. They try to be transparent by exposing the underlying commands they execute so that users can have an idea of what's happening and try to learn the commands if they wish.

+
scripts/
+├── buildrun.sh
+├── check-migrations.sh
+├── createsuperuser.sh
+├── db.sh
+├── erd.sh
+├── lint.sh
+├── loadenv.sh
+├── logs.sh
+├── migrate.sh
+├── precommit-check.sh
+├── run.sh
+├── start-local.sh
+├── test.sh
+└── update-dependencies.sh
+
+

These scripts assume you are using bash.

+
    +
  1. +

    buildrun.sh - clean, build, and run containers in background mode

    +
      +
    1. Pass in -v to remove data volume, which resets the local database.
    2. +
    3. See the script file for more options.
    4. +
    +
  2. +
  3. +

    check-migrations.sh - check if migrations are up to date

    +
  4. +
  5. +

    createsuperuser.sh - create a default superuser

    +
      +
    1. This assumes that DJANGO_SUPERUSER_USERNAME and DJANGO_SUPERUSER_PASSWORD are set in .env.dev
    2. +
    +
  6. +
  7. +

    db.sh - connect to the database in the db container

    +
      +
    1. This is a different route than manage.py dbshell, which requires the psql executable in the web container
    2. +
    +
  8. +
  9. +

    erd.sh - generate ER diagram

    +
      +
    • The image is saved to app/erd.png
    • +
    • This script is dependent on the graphviz package
    • +
    +
  10. +
  11. +

    lint.sh - lint and and auto-format code

    +
  12. +
  13. +

    loadenv.sh - load environment variables from .env.dev into shell environment

    +
  14. +
  15. +

    logs.sh - view/tail container logs

    +
  16. +
  17. +

    migrate.sh - run database migrations inside container

    +
      +
    1. Add <app> <migration_number> to migrate to that database state. Ex: migrate.sh core 0010
    2. +
    +
  18. +
  19. +

    precommit-check.sh - sanity checks before committing code

    +
      +
    1. Call buildrun.sh, lint.sh, and test.sh
    2. +
    +
  20. +
  21. +

    run.sh - start the development server in Docker, with some options

    +
      +
    1. Pass in -h to show usage
    2. +
    +
  22. +
  23. +

    start-local.sh - start the development server natively

    +
  24. +
  25. +

    test.sh - run tests and generate test coverage report

    +
      +
    1. Use the -k flag to filter tests. For example test.sh -k program_area will select only tests with "program_area" in the name.
    2. +
    3. Pass in --no-cov to disable the coverage report. The coverage report will show many missing lines of coverage as a result.
    4. +
    +
  26. +
  27. +

    update-dependencies.sh - update python dependencies to the latest versions

    +
  28. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/tools/uv/index.html b/contributing/tools/uv/index.html new file mode 100644 index 00000000..3ee14d05 --- /dev/null +++ b/contributing/tools/uv/index.html @@ -0,0 +1,1170 @@ + + + + + + + + + + + + + + + + + + + + + + uv - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

uv

+

We're using uv as a faster replacement to pip and pip-tools. See the official documentation on getting started.

+

How we use it

+

We're using uv to compile and install python dependencies, which replaces the functionalities of pip and pip-tools. uv can also create and maintain a virtual environment but we're not using it for now. In fact, we're suppressing it with the --system option during uv pip install.

+

uv is already part of the docker image, so there's no need to install it on the host. It does require prepending the docker-compose information to run, for example: docker-compose exec web uv pip compile requirements.in -o requirements.txt. We'll omit the docker-compose exec web portion from now on in this document.

+

requirements.in is the requirements file and uv pip compile generates requirement.txt, with pinned versions, similar to lock files in other languages.

+

Usage

+

Upgrade depencencies

+

We shouldn't run this on every build, but we should do this manually every month/quarter or so.

+
# docker-compose exec web
+uv pip compile requirements.in -o requirements.txt --no-header --upgrade
+
+

Or run the script

+
./scripts/update-dependencies.sh
+
+

pip compile options

+
+
Disable header in the generated file
+
--no-header This solves the problem unnecessary code churn caused by changing headers
+
Upgrade all dependencies
+
--upgrade
+
Generate pip-8 style hashes
+
--generate-hashes Hashes improve security but are not verified by uv at the moment. It is planned. Switch back to pip for installation if we need to verify hashes.
+
Disable annotation of where dependencies come from
+
--no-annotate This makes the generated file shorter but less informative
+
+

See pip-compile docs for more options and explanation

+

Install dependencies

+

This is used in the Dockerfile to install python dependencies.

+
uv pip install --system -r requirements.txt
+
+

pip install options

+
+
Install to global
+
--system bypass the virtual environment requirement
+
+

See pip install docs for more options and explanation

+

Explanations

+

Global install

+

We're using the --system option in the Dockerfile to bypass the virtual environment requirement for uv. This is because the docker image is already a virtual environment separate from the host.

+

Version pinning

+

We're leaving most dependencies unpinned in requirements.in so that pip compile will pin the newest compatible versions in requirements.txt. The only manually pinned dependency is django~=4.2.0. The x.2.x versions have long term support, and we're using 4, since 4.2 is the latest LTS available.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/howto/authenticate_cognito/index.html b/howto/authenticate_cognito/index.html new file mode 100644 index 00000000..5343ec2c --- /dev/null +++ b/howto/authenticate_cognito/index.html @@ -0,0 +1,969 @@ + + + + + + + + + + + + + + + + + + + + + + Cognito authentication workflow (pre deployment) - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Cognito authentication workflow (pre deployment)

+

This is a temporary solution until we can deploy a dev environment for PeopleDepot.

+

There's a few manual steps and the login is good for only an hour at a time.

+

Prerequisites:

+ +

Steps:

+
    +
  1. +

    Login (or register first then login) to a cognito account here. Do not worry if you see error messages - you will be using the url.

    +

    +
  2. +
  3. +

    Copy the URL when it redirects. Note: Instead of the screen below, the screen may display an error message. You can ignore any error messages.

    +

    .

    +
  4. +
  5. +

    Extract the access_token using the online tool.

    +
      +
    1. Clear the top box and paste the URL text into it. The box should show there's 1 match
    2. +
    3. The bottom box's content is the extracted access_token
    4. +
    +

    +
  6. +
  7. +

    Open ModHeader. If the icon is hidden, click on the Puzzle icon in the upper right of the browser to see it.

    +
  8. +
  9. +

    Type the word Bearer and paste the token into ModHeader Authorization: Bearer \

    +

    +
  10. +
  11. +

    Go to a page in api/v1/ to see that it allows access

    +

    +
  12. +
  13. +

    Explore APIs using Swagger

    +

    +
  14. +
  15. +

    Some fields have hints on how to retrieve the values.

    +

    +
  16. +
  17. +

    A redoc ui is also available

    +

    +
  18. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/index.html b/index.html new file mode 100644 index 00000000..7ba67733 --- /dev/null +++ b/index.html @@ -0,0 +1,1052 @@ + + + + + + + + + + + + + + + + + + + + People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

pre-commit

+

PeopleDepot

+

PeopleDepot is a project of Hack for LA/Civic Tech Structure Inc. 501(c)(3). PeopleDepot aims to provide a single source of truth as the backend infrastructure and data store for Hack for LA projects, including data about people, program areas, and projects. PeopleDepot uses PostgreSQL for its database and Django as the backend data model framework with Django REST Framework for the API layer. PeopleDepot's goal is to serve as a repository of information for other infrastructure projects (e.g., VRMS, Hack for LA Website, Civic Tech Index, Tables, etc).

+

Project context

+

The hardest part about running a large organization using only free or open source tools and technologies is how to manage the flow of information and provide relevant info to all the people and projects that need it. Managing multiple databases inefficiently can end up taking more time than the projects themselves. This project seeks to create a maintainable database infrastructure that is synchronized.

+

In the process, it should allow for further automation and do away with manual storage of duplicate information across projects, which includes:

+
    +
  • Recruiting members (Website: Project info and meeting times)
  • +
  • Onboarding members to resources (e.g., GitHub, Google Calendar, Google Drive, Google Docs, Google Sheets, etc.)
  • +
  • Helping members find roles (Civic Tech Jobs: roles and project info)
  • +
  • Managing team permissions (VRMS: GitHub, Google Calendar, Google Drives, etc.)
  • +
+

Technology used

+ +

How to contribute

+
    +
  1. Join our organization by going through Hack for LA Onboarding. It's free to join!
  2. +
  3. Read the onboarding section of our Wiki.
  4. +
  5. Read our Contributing Guidelines and follow the instructions there.
  6. +
+

Contact info

+

Contact us in the #people-depot channel on Slack.

+

Licensing

+

This repository uses the GNU General Public License (v2.0).

+ + + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/license/index.html b/license/index.html new file mode 100644 index 00000000..fdb2cdba --- /dev/null +++ b/license/index.html @@ -0,0 +1,1239 @@ + + + + + + + + + + + + + + + + + + + + License - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

License

+ +
                GNU GENERAL PUBLIC LICENSE
+                   Version 2, June 1991
+
+

Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed.

+
                        Preamble
+
+

The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too.

+

When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things.

+

To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it.

+

For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights.

+

We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software.

+

Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations.

+

Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all.

+

The precise terms and conditions for copying, distribution and +modification follow.

+
                GNU GENERAL PUBLIC LICENSE
+
+

TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION

+
    +
  1. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you".
  2. +
+

Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does.

+
    +
  1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program.
  2. +
+

You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee.

+
    +
  1. +

    You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions:

    +

    a) You must cause the modified files to carry prominent notices +stating that you changed the files and the date of any change.

    +

    b) You must cause any work that you distribute or publish, that in +whole or in part contains or is derived from the Program or any +part thereof, to be licensed as a whole at no charge to all third +parties under the terms of this License.

    +

    c) If the modified program normally reads commands interactively +when run, you must cause it, when started running for such +interactive use in the most ordinary way, to print or display an +announcement including an appropriate copyright notice and a +notice that there is no warranty (or else, saying that you provide +a warranty) and that users may redistribute the program under +these conditions, and telling the user how to view a copy of this +License. (Exception: if the Program itself is interactive but +does not normally print such an announcement, your work based on +the Program is not required to print an announcement.)

    +
  2. +
+

These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it.

+

Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program.

+

In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License.

+
    +
  1. +

    You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following:

    +

    a) Accompany it with the complete corresponding machine-readable +source code, which must be distributed under the terms of Sections +1 and 2 above on a medium customarily used for software interchange; or,

    +

    b) Accompany it with a written offer, valid for at least three +years, to give any third party, for a charge no more than your +cost of physically performing source distribution, a complete +machine-readable copy of the corresponding source code, to be +distributed under the terms of Sections 1 and 2 above on a medium +customarily used for software interchange; or,

    +

    c) Accompany it with the information you received as to the offer +to distribute corresponding source code. (This alternative is +allowed only for noncommercial distribution and only if you +received the program in object code or executable form with such +an offer, in accord with Subsection b above.)

    +
  2. +
+

The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable.

+

If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code.

+
    +
  1. +

    You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance.

    +
  2. +
  3. +

    You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it.

    +
  4. +
  5. +

    Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License.

    +
  6. +
  7. +

    If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program.

    +
  8. +
+

If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances.

+

It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice.

+

This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License.

+
    +
  1. +

    If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License.

    +
  2. +
  3. +

    The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns.

    +
  4. +
+

Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation.

+
    +
  1. +

    If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally.

    +
                        NO WARRANTY
    +
    +
  2. +
  3. +

    BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION.

    +
  4. +
  5. +

    IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES.

    +
                 END OF TERMS AND CONDITIONS
    +
    +    How to Apply These Terms to Your New Programs
    +
    +
  6. +
+

If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms.

+

To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found.

+
<one line to give the program's name and a brief idea of what it does.>
+Copyright (C) <year>  <name of author>
+
+This program is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 2 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along
+with this program; if not, write to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+

Also add information on how to contact you by electronic and paper mail.

+

If the program is interactive, make it output a short notice like this +when it starts in an interactive mode:

+
Gnomovision version 69, Copyright (C) year name of author
+Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+This is free software, and you are welcome to redistribute it
+under certain conditions; type `show c' for details.
+
+

The hypothetical commands show w' andshow c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than show w' andshow c'; they could even be +mouse-clicks or menu items--whatever suits your program.

+

You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names:

+

Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker.

+

, 1 April 1989 + Ty Coon, President of Vice

+

This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/ref/api_endpoints/index.html b/ref/api_endpoints/index.html new file mode 100644 index 00000000..88a8aaea --- /dev/null +++ b/ref/api_endpoints/index.html @@ -0,0 +1,919 @@ + + + + + + + + + + + + + + + + + + + + + + Api endpoints - People Depot Docs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+ + + + + + + + + + + + + + + + + + + + +

Api endpoints

+ +

We're using OpenAPI (swagger) for API documentation. We won't have a public URL for it until it's deployed. A ReDoc interface is also available.

+

These are the URLs in the local dev environment

+
    +
  • http://localhost:8000/api/schema/swagger-ui/
  • +
  • http://localhost:8000/api/schema/redoc/
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/search/search_index.json b/search/search_index.json new file mode 100644 index 00000000..d1acf113 --- /dev/null +++ b/search/search_index.json @@ -0,0 +1 @@ +{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"Home","text":""},{"location":"#peopledepot","title":"PeopleDepot","text":"

PeopleDepot is a project of Hack for LA/Civic Tech Structure Inc. 501(c)(3). PeopleDepot aims to provide a single source of truth as the backend infrastructure and data store for Hack for LA projects, including data about people, program areas, and projects. PeopleDepot uses PostgreSQL for its database and Django as the backend data model framework with Django REST Framework for the API layer. PeopleDepot's goal is to serve as a repository of information for other infrastructure projects (e.g., VRMS, Hack for LA Website, Civic Tech Index, Tables, etc).

"},{"location":"#project-context","title":"Project context","text":"

The hardest part about running a large organization using only free or open source tools and technologies is how to manage the flow of information and provide relevant info to all the people and projects that need it. Managing multiple databases inefficiently can end up taking more time than the projects themselves. This project seeks to create a maintainable database infrastructure that is synchronized.

In the process, it should allow for further automation and do away with manual storage of duplicate information across projects, which includes:

  • Recruiting members (Website: Project info and meeting times)
  • Onboarding members to resources (e.g., GitHub, Google Calendar, Google Drive, Google Docs, Google Sheets, etc.)
  • Helping members find roles (Civic Tech Jobs: roles and project info)
  • Managing team permissions (VRMS: GitHub, Google Calendar, Google Drives, etc.)
"},{"location":"#technology-used","title":"Technology used","text":"
  • Docker
  • Django
  • DjangoRestFramework
  • PostgreSQL
"},{"location":"#how-to-contribute","title":"How to contribute","text":"
  1. Join our organization by going through Hack for LA Onboarding. It's free to join!
  2. Read the onboarding section of our Wiki.
  3. Read our Contributing Guidelines and follow the instructions there.
"},{"location":"#contact-info","title":"Contact info","text":"

Contact us in the #people-depot channel on Slack.

"},{"location":"#licensing","title":"Licensing","text":"

This repository uses the GNU General Public License (v2.0).

"},{"location":"license/","title":"License","text":"
                GNU GENERAL PUBLIC LICENSE\n                   Version 2, June 1991\n

Copyright (C) 1989, 1991 Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.

                        Preamble\n

The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too.

When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things.

To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it.

For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.

We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software.

Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations.

Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all.

The precise terms and conditions for copying, distribution and modification follow.

                GNU GENERAL PUBLIC LICENSE\n

TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION

  1. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The \"Program\", below, refers to any such program or work, and a \"work based on the Program\" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term \"modification\".) Each licensee is addressed as \"you\".

Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does.

  1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program.

You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee.

  1. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions:

    a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change.

    b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License.

    c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.)

These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it.

Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program.

In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License.

  1. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following:

    a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,

    b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or,

    c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.)

The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable.

If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code.

  1. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance.

  2. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it.

  3. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License.

  4. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program.

If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances.

It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice.

This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License.

  1. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License.

  2. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.

Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and \"any later version\", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation.

  1. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally.

                        NO WARRANTY\n
  2. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.

  3. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.

                 END OF TERMS AND CONDITIONS\n\n    How to Apply These Terms to Your New Programs\n

If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.

To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the \"copyright\" line and a pointer to where the full notice is found.

<one line to give the program's name and a brief idea of what it does.>\nCopyright (C) <year>  <name of author>\n\nThis program is free software; you can redistribute it and/or modify\nit under the terms of the GNU General Public License as published by\nthe Free Software Foundation; either version 2 of the License, or\n(at your option) any later version.\n\nThis program is distributed in the hope that it will be useful,\nbut WITHOUT ANY WARRANTY; without even the implied warranty of\nMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\nGNU General Public License for more details.\n\nYou should have received a copy of the GNU General Public License along\nwith this program; if not, write to the Free Software Foundation, Inc.,\n51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n

Also add information on how to contact you by electronic and paper mail.

If the program is interactive, make it output a short notice like this when it starts in an interactive mode:

Gnomovision version 69, Copyright (C) year name of author\nGnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.\nThis is free software, and you are welcome to redistribute it\nunder certain conditions; type `show c' for details.\n

The hypothetical commands show w' andshow c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than show w' andshow c'; they could even be mouse-clicks or menu items--whatever suits your program.

You should also get your employer (if you work as a programmer) or your school, if any, to sign a \"copyright disclaimer\" for the program, if necessary. Here is a sample; alter the names:

Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker.

, 1 April 1989 Ty Coon, President of Vice

This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License.

"},{"location":"architecture/Notes/","title":"Troubleshooting","text":""},{"location":"architecture/Notes/#modheader","title":"ModHeader","text":"
  • ModHeader will break Google drive if Authentication Bearer token is set. The Google drive website pops up a dialog saying:

    \"You are not signed in.\nYou are signed out. Sign back in, then click 'Retry'.\nRetry\"\n

    The solution is to disable the Authentication header and Google drive will work as normal.

"},{"location":"architecture/authentication/","title":"Cognito authentication","text":""},{"location":"architecture/authentication/#what-is-it","title":"What is it","text":"

Cognito is a single sign-on system from AWS. It allows multiple apps to accept authentication from the same set of user accounts. It separates the management of users and permissions from the applications that use them.

"},{"location":"architecture/authentication/#why-we-use-cognito","title":"Why we use cognito","text":"

We're invested in AWS, so we might as well use this too.

"},{"location":"architecture/authentication/#how-we-implement-it","title":"How we implement it","text":"

We're following the implementation from the djangostar tutorial.

These are the steps involved:

  1. Backend downloads JWKS from Cognito User Pool on launch
  2. User submits credentials and gets id_token and access_token
  3. User sends request with token
  4. Backend verifies token and processes request
  5. User gets response from authenticated API
"},{"location":"architecture/authentication/#current-dev-setup","title":"Current Dev Setup","text":"
  1. Created app client called \"backend within the vrms-dev user pool, with ALLOW_ADMIN_USER_PASSWORD_AUTH enabled
  2. \"Domain Name\" is already created at https://hackforla-vrms-dev.auth.us-west-2.amazoncognito.com
  3. In \"App client settings\", enabled Implicit grant and openid, Callback URL http://localhost:8000/admin
"},{"location":"architecture/authentication/#how-it-works-now-with-the-dev-user-pool-and-local-development-backend","title":"How it works now with the dev user pool and local development backend","text":"
  1. Create a cognito user and login from the Hosted UI (from App client settings). Successful login will redirect to localhost:8000/admin with the necessary tokens
  2. Take the access_token from the URL and make a GET request to http://localhost:8000/api/v1/me (Headers key=Authorization, value=Bearer )
  3. Backend should return the user's profile data
  4. "},{"location":"architecture/authentication/#notes","title":"Notes","text":"

    The tutorial is 2 years old now (from 2020) and there's been some change made since then.

    1. We created an app client in Cognito for the backend to interface with. ALLOW_ADMIN_USER_PASSWORD_AUTH is the new name for the old ADMIN_NO_SRP_AUTH setting. Reference
    2. In the custom User model step, the ugettext-lazy package is gettext-lazy for Django 4.0 Reference
    3. The tutorial steps don't include instructions to test each step, so it's a little bit of following blindly with the help of linters until the last step.
    "},{"location":"architecture/github_actions/","title":"Github actions","text":"

    These are the github actions used in the project.

    "},{"location":"architecture/github_actions/#files","title":"Files","text":"
    .github/workflows/\n\u2514\u2500\u2500 deploy-docs.yml # (1)!\n
    1. Deploy Documentation
      • triggered by commits to main
      • builds and deploys the mkdocs documentation to github pages.
    "},{"location":"architecture/github_actions/#actions-page-workflows","title":"Actions page workflows","text":"
    1. deploy-docs
      • see deploy-docs.yml above
    2. pages-build-deployment
      • The github-pages bot runs this automatically for any project that publishes to github pages.
      • It does extra work that we don't need, but there's no way to disable it. See here.
    "},{"location":"architecture/project_structure/","title":"Project Structure","text":"

    These are the directories and files in the project. Parts are summarized for clarity.

    "},{"location":"architecture/project_structure/#top-level","title":"Top level","text":"
    /\n\u251c\u2500\u2500 app/     # (1)!\n\u251c\u2500\u2500 docs/    # (2)!\n\u251c\u2500\u2500 scripts/ # (3)!\n\u251c\u2500\u2500 docker-compose.yml # (4)!\n\u2514\u2500\u2500 pyproject.toml # (5)!\n
    1. The django project. This is also what goes into the Docker image when it's generated by ./scripts/buildrun.sh. See Django Project below for details.
    2. Documentation for the project code. See Documentation below for details.
    3. Scripts used in the project. These are run in the commandline to do various project tasks. See Convenience Scripts for details.
    4. The docker compose file.
    5. The pyproject.toml file. This holds settings for project tools right now. We may combine this with app/setup.cfg in the future. We may move this file into app/ if it makes sense.
    "},{"location":"architecture/project_structure/#django-project","title":"Django project","text":"
    app/\n\u251c\u2500\u2500 core/ # (1)!\n\u251c\u2500\u2500 data/ # (2)!\n\u251c\u2500\u2500 peopledepot/ # (3)!\n\u2502\u00a0\u00a0 \u251c\u2500\u2500 asgi.py\n\u2502\u00a0\u00a0 \u251c\u2500\u2500 settings.py\n\u2502\u00a0\u00a0 \u251c\u2500\u2500 urls.py\n\u2502\u00a0\u00a0 \u2514\u2500\u2500 wsgi.py\n\u251c\u2500\u2500 scripts/ # (4)!\n\u2502\u00a0\u00a0 \u2514\u2500\u2500 convert.py\n\u251c\u2500\u2500 Dockerfile # (5)!\n\u251c\u2500\u2500 entrypoint.sh # (6)!\n\u251c\u2500\u2500 manage.py # (7)!\n\u251c\u2500\u2500 requirements.in # (8)!\n\u251c\u2500\u2500 requirements.txt # (9)!\n\u2514\u2500\u2500 setup.cfg # (10)!\n
    1. The core app in django. This app contains the API and models. See Core App below for details.
    2. The data app in django. This app contains the initial data migrations. See Data App below for details.
    3. The django project configuration.
    4. Scripts used in the project. This currently contains the convert.py script, which converts csv files into django initial data code. It's used to generate code for the initial data migrations.
    5. Dockerfile used to build the Docker image.
    6. Entrypoint script called by the Docker image.
    7. Django manage.py script. In nearly all cases, there's no good reason to change this. Just leave it alone.
    8. Requirements.in file used by uv pip compile. See the uv tool for details.
    9. Requirements.txt file generated by uv pip install. Do not modify this file. Edit the requirements.in file instead. See the uv tool for details.
    10. Config file for development support tools such as flake8 and pytest. flake8 is the only tool that doesn't support pyproject.toml yet, which is why we have this file.
    "},{"location":"architecture/project_structure/#core-app","title":"Core App","text":"
    core/\n\u251c\u2500\u2500 admin.py # (1)!\n\u251c\u2500\u2500 api/\n\u2502\u00a0\u00a0 \u251c\u2500\u2500 permissions.py # (2)!\n\u2502\u00a0\u00a0 \u251c\u2500\u2500 serializers.py # (3)!\n\u2502\u00a0\u00a0 \u251c\u2500\u2500 urls.py # (4)!\n\u2502\u00a0\u00a0 \u2514\u2500\u2500 views.py # (5)!\n\u251c\u2500\u2500 apps.py # (6)!\n\u251c\u2500\u2500 initial_data/\n\u2502\u00a0\u00a0 \u251c\u2500\u2500 ...\n\u2502\u00a0\u00a0 \u2514\u2500\u2500 Initial data in json or csv format # (7)!\n\u251c\u2500\u2500 migrations/\n\u2502\u00a0\u00a0 \u251c\u2500\u2500 nnnn_migration_name.py # (8)!\n\u2502\u00a0\u00a0 \u251c\u2500\u2500 ...\n\u2502\u00a0\u00a0 \u2514\u2500\u2500 max_migration.txt # (9)!\n\u251c\u2500\u2500 models.py # (10)!\n\u251c\u2500\u2500 scripts/ # (11)!\n\u251c\u2500\u2500 tests/\n\u2502\u00a0\u00a0 \u251c\u2500\u2500 conftest.py # (12)!\n\u2502\u00a0\u00a0 \u251c\u2500\u2500 test_api.py # (13)!\n\u2502\u00a0\u00a0 \u251c\u2500\u2500 test_models.py # (14)!\n\u2502\u00a0\u00a0 \u2514\u2500\u2500 test_permissions.py # (15)!\n\u2514\u2500\u2500 utils/ # (16)!\n.   \u2514\u2500\u2500 jwt.py # (17)!\n
    1. Admin site configuration.
    2. Permission classes definitions.
    3. Serializers to control what data is sent to the client.
    4. Routes for the API.
    5. Views to retrieve data for the API.
    6. AppConfig for the core app.
    7. Initial data scripts. See Create initial data scripts for how to create these.
    8. Migration scripts. These are generated by the makemigrations command.
    9. File used by django-linear-migrations. It stores the last migration name for the app for use in git merge conflicts.
    10. Models for the core app.
    11. Scripts for the core app. We use it to hold temporary scripts in Create initial data migrations, but there's no need to commit them into git.
    12. Test fixtures file
    13. Test for the API
    14. Test for the models
    15. Test for the permissions
    16. Utility scripts for the core app
    17. Utility functions supporting JWT with Cognito
    "},{"location":"architecture/project_structure/#data-app","title":"Data App","text":"
    data/\n\u2514\u2500\u2500 migrations/\n.\u00a0\u00a0 \u251c\u2500\u2500 nnnn_migration_name.py # (1)!\n.\u00a0\u00a0 \u251c\u2500\u2500 ...\n.   \u2514\u2500\u2500 max_migration.txt # (2)!\n
    1. Migration scripts. See Create initial data migrations for how to create these.
    2. File used by django-linear-migrations. It stores the last migration name for the app for use in git merge conflicts.
    "},{"location":"architecture/project_structure/#documentation","title":"Documentation","text":"
    /\n\u251c\u2500\u2500 docs/\n\u2502   \u251c\u2500\u2500 [topics]/ # (1)!\n\u2502   \u251c\u2500\u2500 CONTRIBUTING.md # (2)!\n\u2502   \u251c\u2500\u2500 index.md # (3)!\n\u2502   \u251c\u2500\u2500 LICENSE # (4)!\n\u2502   \u251c\u2500\u2500 license.md # (5)!\n\u2502   \u2514\u2500\u2500 _static/\n\u251c\u2500\u2500 CONTRIBUTING.md # (6)!\n\u251c\u2500\u2500 LICENSE # (7)!\n\u251c\u2500\u2500 mkdocs.yml # (8)!\n\u2514\u2500\u2500 README.md # (9)!\n
    1. Directories containing markdown files on different topics.
    2. Placeholder for the CONTRIBUTING.md file in the project root. MkDocs requires all documentation files to be in the docs/ directory. This file uses a snippet to import the source content.
    3. Home page of the documentation site. This files uses a snippet to import the README.md file from the project root.
    4. Placeholder LICENSE file. This file uses a snippet to import the LICENSE file from the project root. This is used for linking from the Documentation Homepage as well as the README.md file in the Github web interface, which knows the file by this name only.
    5. Placeholder license.md file. This file uses a snippet to import the LICENSE file from the project root. This is used for the MkDocs nav section, which requires the md file extension.
    6. Contributing file for the project. This name is capitalized according to Github conventions.
    7. Licence file for the project. This name is capitalized according to Github conventions.
    8. MkDocs config file.
    9. README file for the project. This name is capitalized according to Github conventions.
    "},{"location":"contributing/","title":"Contributing","text":"

    Thank you for volunteering your time! The following is a set of guidelines for contributing to the PeopleDepot repository, which is hosted on GitHub.

    Please make sure you have completed the onboarding process which includes joining the Hack for LA Slack, GitHub, and Google Drive. If you have not been onboarded, see the Getting Started Page. Workshop attendees are granted a temporary exception from this requirement.

    • Joining the team

    • Setting up the Development Environment

      • Pre-requisites
      • Fork the repository
      • Build and run locally
    • Working with Issues

    • Working with Git

    • Documentation

    • How-to Guides

    • Tools

    "},{"location":"contributing/dev_environment/","title":"Development Environment","text":""},{"location":"contributing/dev_environment/#pre-requisites","title":"Pre-requisites","text":""},{"location":"contributing/dev_environment/#github-account","title":"GitHub account","text":"

    See here for creating a GitHub account. If you are not familiar with Git, this tutorial is recommended.

    "},{"location":"contributing/dev_environment/#two-factor-authentication","title":"Two-factor authentication","text":"

    Set up two-factor authentication on your account by following this guide.

    "},{"location":"contributing/dev_environment/#text-editor","title":"Text editor","text":"

    VS Code is recommended, but feel free to use a text editor of your choice.

    "},{"location":"contributing/dev_environment/#install-git","title":"Install Git","text":"

    Before cloning your forked repository to your local machine, you must have Git installed. You can find instructions for installing Git for your operating system here.

    WindowsMac
    • we recommend installing Windows Subsystem for Linux (WSL). WSL provides a Linux-compatible environment that can prevent common errors during script execution.

    • After setting up WSL, install Git directly from the Linux terminal. This method can help avoid complications that sometimes arise when using Git Bash on Windows.

    • If you prefer Git Bash or encounter errors related to line endings when running scripts, the problem might be due to file conversions in Windows. To address this, configure Git as follows:

      git config --system set autocrlf=false\n

      Feel free to reach out in the Hack for LA Slack channel if you encounter any errors while running scripts on Windows

    Please note that if you have a Mac the page offers several options (see other option, if you need to conserve hard drive space) including:

    • an \u201ceasiest\u201d option (this version is fine for our use): This option would take just over 4GB.
    • a \u201cmore up to date\u201d option (not required but optional if you want it): This option prompts you to go to install an 8GB package manager called Homebrew.
    • Other option: If your computer is low on space, you can use this tutorial to install XCode Command Tools and a lighter version of Homebrew and then install Git using this command: $ brew install git which in total only uses 300MB.
    "},{"location":"contributing/dev_environment/#install-docker","title":"Install Docker","text":"

    Install or make sure docker and docker-compose are installed on your computer

    docker -v\ndocker-compose -v\n

    The recommended installation method for your operating system can be found here.

    Feel free to reach out in the Hack for LA Slack channel if you have trouble installing docker on your system

    More on using Docker and the concepts of containerization:

    • Get started with Docker
    "},{"location":"contributing/dev_environment/#fork-the-repository","title":"Fork the repository","text":"

    You can fork the hackforla/peopledepot repository by clicking Fork . A fork is a copy of the repository that will be placed on your GitHub account.

    It should create a URL that looks like the following -> https://github.com/<your_GitHub_user_name>/peopledepot

    For example -> https://github.com/octocat/peopledepot

    What you have created is a forked copy in a remote version on GitHub. It is not on your local machine yet

    "},{"location":"contributing/dev_environment/#clone-a-copy-on-your-computer","title":"Clone a copy on your computer","text":"

    The following steps will clone (create) a local copy of the forked repository on your computer.

    1. Create a new folder in your computer that will contain hackforla projects.

      In your command line interface (Terminal, Git Bash, Powershell), move to where you want your new folder to be placed and create a new folder in your computer that will contain hackforla projects. After that, navigate into the folder(directory) you just created.

      For example:

      cd /projects\nmkdir hackforla\ncd hackforla\n
    2. From the hackforla directory created in previous section:

      git clone https://github.com/<your_GitHub_user_name>/peopledepot.git\n

      For example if your GitHub username was octocat:

      git clone https://github.com/octocat/peopledepot.git\n

      You can also clone using ssh which is more secure but requires more setup. Because of the additional setup, cloning using https as shown above is recommended

    You should now have a new folder in your hackforla folder called peopledepot. Verify this by changing into the new directory:

    cd peopledepot\n
    "},{"location":"contributing/dev_environment/#verify-and-set-up-remote-references","title":"Verify and set up remote references","text":"

    Verify that your local cloned repository is pointing to the correct origin URL (that is, the forked repo on your own GitHub account):

    git remote -v\n

    You should see fetch and push URLs with links to your forked repository under your account (i.e. https://github.com/<your_GitHub_user_name>/peopledepot.git). You are all set to make working changes to the project on your local machine.

    However, we still need a way to keep our local repo up to date with the deployed project. To do so, you must add an upstream remote to incorporate changes made while you are working on your local repo. Run the following to add an upstream remote URL & update your local repo with recent changes to the hackforla version:

    git remote add upstream https://github.com/hackforla/peopledepot.git\ngit fetch upstream\n

    After adding the upstream remote, you should now see it if you again run git remote -v :

    origin  https://github.com/<your_GitHub_user_name>/peopledepot.git (fetch)\norigin  https://github.com/<your_GitHub_user_name>/peopledepot.git (push)\nupstream        https://github.com/hackforla/peopledepot.git (fetch)\nupstream        https://github.com/hackforla/peopledepot.git (push)\n
    "},{"location":"contributing/dev_environment/#build-and-run-using-docker-locally","title":"Build and run using Docker locally","text":"
    1. Make sure the Docker service is running

      Docker (Engine)Docker Desktop
      sudo systemctl status docker\n

      It will show Active: active (running) if it's running.

      1. Start Docker Desktop
      2. Run docker container ls to verify Docker Desktop is running. If it is not running you will get the message: Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
    2. Create an .env.docker file from .env.docker-example

      cp ./app/.env.docker-example ./app/.env.docker\n
    3. Build and run the project via the script (this includes running docker-compose up)

      ./scripts/buildrun.sh\n
    4. Create a super user for logging into the web admin interface

      docker-compose exec web python manage.py createsuperuser --no-input\n
    5. Browse to the web admin interface at http://localhost:8000/admin/ and confirm the admin site is running. Use DJANGO_SUPERUSER_USERNAME and DJANGO_SUPERUSER_PASSWORD from .env.docker for credentials.

    See our documentation for Working with Docker for more useful Docker commands.

    "},{"location":"contributing/dev_environment/#install-pre-commit","title":"Install pre-commit","text":"

    This will check your changes for common problems.

    See the Pre-commit page for installation instructions.

    For consistency, an automated bot will perform the same checks on the repository side when you open a pull request.

    "},{"location":"contributing/documentation/","title":"Documentation","text":"

    We highly encourage contributors to add and update documentation in the same pull request as the code. This will ensure that the docs and features are synchronized.

    Please see the MkDocs page for how to view documentation changes locally using the mkdocs in docker.

    "},{"location":"contributing/git/","title":"Working with Git","text":""},{"location":"contributing/git/#sync-main-changes","title":"Sync Main Changes","text":"

    Your fork of this repository on GitHub, and your local clone of that fork, will get out of sync with the (upstream) repository as others update the repository. (That's what has happened when you see something like \"This branch is 1 commit behind peopledepot:main\" on your forked repository.)

    One way to keep your fork up to date with this repository is to follow these instruction: Syncing your fork to the original repository via the browser

    You can also update your fork via the local clone of your fork, using these instructions. Assuming you have a local clone with remotes upstream (this repo) and origin (your GitHub fork of this repo):

    • First, you will need to create a local branch which tracks upstream/main. You will only need to do this once; you do not need to do this every time you want to incorporate upstream changes.

    Run the following two commands:

    git fetch upstream\ngit checkout -b upstream-main --track upstream/main\n

    If you have already created the branch upstream-main, the following commands will incorporate upstream changes:

    git checkout upstream-main # Move to the branch you want to merge with.\ngit pull  # This updates your tracking branch to match the main branch in this repository\ngit checkout main  # Move back to your main branch\ngit merge upstream-main  # Merge to bring your main current.\n

    If you do all your work on topic branches (as suggested above) and keep main free of local modifications, this merge should apply cleanly.

    Then push the merge changes to your GitHub fork:

    git push\n

    If you go to your online GitHub repository this should remove the message \"This branch is x commit behind peopledepot:main\".

    "},{"location":"contributing/issues/","title":"Working with Issues","text":""},{"location":"contributing/issues/#find-an-issue","title":"Find an issue","text":"

    Find an issue in Prioritized Backlog here

    If you joined the PeopleDepot repository as described in a previous section:

    1. Assign the issue to yourself and move it to \"In progress\" column.
    2. Follow the steps in the issue description to complete the issue.
    3. Make sure to comment your ETA and Availability when you first assign yourself.

    If you don't have privileges, add a comment that you are working on the issue.

    "},{"location":"contributing/issues/#create-a-new-branch","title":"Create a new branch","text":"

    Once you have selected an issue to work on, create a branch for that issue.

    Verify you are on the main branch.

    git branch\n

    You will see a list of all of your branches. There will be a star (*) next to the branch that you are currently in. By default you should start on the main branch.

    If you are not currently in the main branch, run the following command to return to it:

    git checkout main\n
    git pull origin main\n

    This ensures you have the most recent code, which is important if you previously cloned and it has been more than a day.

    Create a new branch where you will work on the issue. The branch name should include the issue number. For example, to create a new branch for issue 15 and change into it:

    git checkout -b <new-branch-name>-15\n
    "},{"location":"contributing/issues/#make-changes","title":"Make changes","text":"

    Make changes to fix the issue.

    "},{"location":"contributing/issues/#pull-to-get-the-most-recent-code","title":"Pull to get the most recent code","text":"

    You can probably skip this if you fix the issue on the same day that you pulled the code.

    git pull\n

    If you are using Visual studios code you can use the Git graphical user interface to stage your changes. For instructions check out the Git GUI page in the website Wiki

    "},{"location":"contributing/issues/#add-changed-files-to-staging","title":"Add changed files to staging","text":"

    Make sure you are on your issue branch (instead of main)

    git branch\n

    You must add your files to the staging area before you can commit (save them to git).

    Run this command if you want to add changes from a specific file to your commit record:

    git add \u201cfilename.ext\u201d\n

    Run this command if you want to add all changes to all file(s) to your commit record:

    git add .\n
    "},{"location":"contributing/issues/#check-git-status","title":"Check Git status","text":"

    This command will list the files that have been staged with green text. These are the files that will be committed (saved) when you run the next command, git commit. Please be sure all your staged changes are relevant to the issue you are working on. If you accidentally included unrelated changes, please unstage them before making this commit, and then make a new commit for the unrelated changes. (The commands for unstaging commits are provided in the output of your git status command.)

    git status\n
    "},{"location":"contributing/issues/#remove-files-that-you-dont-want-staged","title":"Remove files that you don't want staged","text":"

    This command will unstage a file that you don't want included in the commit. The specified file will not be committed (saved) when you run the next command, git commit. This only works if the wrong files were added, but they were not yet committed. (See this tutorial for an in-depth discussion.) The file will be removed from the staging area, but not actually deleted:

    git reset HEAD \u201cfilename.ext\u201d\n
    "},{"location":"contributing/issues/#commit-staged-changes","title":"Commit staged changes","text":"

    This command saves your work, and prepares it to push to your repository. Use the -m flag to quickly add a message to your commit. Your message should be a short description of the changes you made. It will be extremely helpful if other people can understand your message, so try to resist the temptation to be overly cryptic.

    To commit your changes with a message, run:

    git commit -m \u201cinsert message here\u201d\n

    Ensure that your local repository is up-to-date with the main site:

    git pull upstream\n

    You can also sync your fork directly on GitHub by clicking \"Sync Fork\" at the right of the screen and then clicking \"Update Branch\"

    "},{"location":"contributing/issues/#push-to-upstream-origin-aka-your-fork","title":"Push to upstream origin (aka, your fork)","text":"

    Push your local branch to your remote repository:

    git push --set-upstream origin <your-branch-name>\n

    Alternatively, you can run

    git push\n
    "},{"location":"contributing/issues/#create-a-pull-request","title":"Create a pull request","text":""},{"location":"contributing/issues/#push-all-changes-in-your-issue-branch","title":"Push all changes in your issue branch","text":"

    Once you are satisfied with your changes, push them to the feature branch you made within your remote repository.

    git push --set-upstream origin <name-of-branch>\n
    "},{"location":"contributing/issues/#complete-pull-request-from-github","title":"Complete pull request from GitHub","text":"
    1. Click the green button to create a Pull Request (PR)
    2. Add a short title in the subject line
    3. In the body of the comment, add the following, replacing <issue-number> with the issue you worked on:
    fixes #<issue-number>\n
    1. Below this, add a brief description of the changes you made
    2. Click the green \"Create pull request\" button
    3. Add the PR to the project board
    "},{"location":"contributing/issues/#creating-issues","title":"Creating Issues","text":"

    To create a new issue, please use the blank issue template (available when you click New Issue). If you want to create an issue for other projects to use, please create the issue in your own repository and send a slack message to one of your hack night hosts with the link.

    "},{"location":"contributing/team/","title":"Joining Repository Team","text":"

    This step is optional if this is your first time fixing an issue and you want to try fixing an issue without this step.

    In the People-depot Slack channel, send an introductory message with your GitHub handle/username asking to be added to the Hack for LA peopledepot GitHub repository, have access to the Google Docs Drive, and Figma.

    Please do the following once you have accepted the GitHub invite (comes via email or in your GitHub notifications)

    Make your own Hack for LA GitHub organization membership public by following this guide.

    "},{"location":"contributing/howto/","title":"How-to Guides","text":"

    These are the developer guides for how to do specific things with the project.

    1. Add model and API endpoints
    2. Create initial data migrations
    3. Run the project in a virtual environment
    "},{"location":"contributing/howto/add-model-and-api-endpoints/","title":"Add new model and API endpoints","text":"

    This guide aims to enable developers with little or no django experience to add django models and API endpoints to the project. Most code examples are followed by detailed explanations.

    The developer will have exposure to the following in this document
    • python
    • django
    • django rest framework
    • relational database through the Django ORM (object-relational mapper)
    • data types
    • object-oriented concepts (object, inheritance, composition)
    • unit testing
    • API design
    • command line

    This guide assumes the developer has followed the working with issues guide and have forked and created a local branch to work on this. The development server would be already running in the background and will automatically apply the changes when we save the files.

    We will choose the recurring_event issue as an example. Our goal is to create a database table and an API that a client can use to work with the data. The work is split into 3 testable components: the model, the admin site, and the API

    Let's start!

    "},{"location":"contributing/howto/add-model-and-api-endpoints/#data-model","title":"Data model","text":"TDD test
    1. Write the test

      We would like the model to store these data, and to return the name property in the str function.

      In app/core/tests/test_models.py

      app/core/tests/test_models.py
      def test_recurring_event_model(project):\nfrom datetime import datetime\npayload = {\n\"name\": \"test event\",\n\"start_time\": datetime(2023, 1, 1, 2, 34),\n\"duration_in_min\": 60,\n\"video_conference_url\": \"https://zoom.com/mtg/1234\",\n\"additional_info\": \"long description\",\n\"project\": project,\n}\nrecurring_event = RecurringEvent(**payload)\n# recurring_event.save()\nassert recurring_event.name == payload[\"name\"]\nassert recurring_event.start_time == payload[\"start_time\"]\nassert recurring_event.duration_in_min == payload[\"duration_in_min\"]\nassert recurring_event.video_conference_url == payload[\"video_conference_url\"]\nassert recurring_event.additional_info == payload[\"additional_info\"]\nassert recurring_event.project == payload[\"project\"]\nassert str(recurring_event) == payload[\"name\"]\n

      For testing many-to-many relationships, we can add

      app/core/tests/test_models.py
      def test_project_recurring_event_relationship(project):\nrecurring_event = RecurringEvent.objects.get(name=\"{Name of Recurring Event}\")\nproject.recurring_events.add(recurring_event)\nassert project.recurring_events.count() == 1\nassert project.recurring_events.contains(recurring_event)\nassert recurring_event.projects.contains(project)\nproject.sdgs.remove(recurring_event)\nassert project.recurring_events.count() == 0\nassert not project.recurring_events.contains(recurring_event)\nassert not recurring_event.projects.contains(project)\n
    2. See it fail

      ./scripts/test.sh\n
    3. Run it again after implementing the model to make sure the code satisfies the test

    "},{"location":"contributing/howto/add-model-and-api-endpoints/#add-the-model","title":"Add the model","text":"

    Add the following to app/core/models.py

    app/core/models.py
    class RecurringEvent(AbstractBaseModel):  # (1)!\n\"\"\"\n    Recurring Events\n    \"\"\"\nname = models.CharField(max_length=255)\nstart_time = models.TimeField(\"Start\", null=True, blank=True)  # (2)!\nduration_in_min = models.IntegerField(null=True, blank=True)  # (3)!\nvideo_conference_url = models.URLField(blank=True)\nadditional_info = models.TextField(blank=True)  # (4)!\nproject = models.ForeignKey(Project, on_delete=models.CASCADE)\n# (5)!\n# location_id = models.ForeignKey(\"Location\", on_delete=models.DO_NOTHING)\n# event_type_id = models.ForeignKey(\"EventType\", on_delete=models.DO_NOTHING)\n# brigade_id = models.ForeignKey(\"Brigade\", on_delete=models.DO_NOTHING)\n# day_of_week = models.ForeignKey(\"DayOfWeek\", on_delete=models.DO_NOTHING)\n# must_roles = models.ManyToManyField(\"Role\")\n# should_roles = models.ManyToManyField(\"Role\")\n# could_roles = models.ManyToManyField(\"Role\")\n# frequency_id = models.ForeignKey(\"Frequency\", on_delete=models.DO_NOTHING)\ndef __str__(self):  # (6)!\nreturn f\"{self.name}\"\n
    1. We inherit all models from AbstractBaseModel, which provides a uuid primary key, created_at, and updated_at timestamps. In the Github issue, these fields might be called id, created, and updated. There's no need to add those.
    2. Most fields should not be required. Text fields should be blank=True, data fields should be null=True.
    3. The data types in the github issue may be given in database column types such as INTEGER, VARCHAR, but we need to convert them into Django field types when defining the model.
    4. VARCHAR can be either CharField or TextField.
      1. CharField has a max_length, which makes it useful for finite length text data. We're going default to giving them max_length=255 unless there's a better value like max_length=2 for state abbreviation.
      2. TextField doesn't have a maximum length, which makes it ideal for large text fields such as description.
    5. Try to add the relationships to non-existent models, but comment them out. Another developer will complete them when they go to implement those models.
    6. Always override the __str__ function to output something more meaningful than the default. It lets us do a quick test of the model by calling str([model]). It's also useful for the admin site model list view.
    Updating models.py for many-to-many relationships

    For adding many-to-many relationships with additional fields, such as ended_on, we can add

    app/core/models.py
    class Project(AbstractBaseModel):\n...\nrecurring_events = models.ManyToManyField(\n\"RecurringEvent\",\nrelated_name=\"projects\",\nblank=True,\nthrough=\"ProjectRecurringEventXref\",\n)\n...\nclass ProjectRecurringEventXref(AbstractBaseModel):\n\"\"\"\n    Joins a recurring event to a project\n    \"\"\"\nrecurring_event_id = models.ForeignKey(RecurringEvent, on_delete=models.CASCADE)\nproject_id = models.ForeignKey(Project, on_delete=models.CASCADE)\nended_on = models.DateField(\"Ended on\", null=True, blank=True)\n

    For adding many-to-many relationships without additional fields, we can just add

    app/core/models.py
    class Project(AbstractBaseModel):\n...\nrecurring_events = models.ManyToManyField(\n\"RecurringEvent\",\nrelated_name=\"projects\",\nblank=True,\n)\n...\n

    which leaves out the \"through\" field and the \"join table\" will be created implicitly.

    "},{"location":"contributing/howto/add-model-and-api-endpoints/#run-migrations","title":"Run migrations","text":"

    This generates the database migration files

    ./scripts/migrate.sh\n
    Test

    Since we overrode the __str__ function, we need to write a test for it.

    1. Add a fixture for the model

      Fixtures are reusable code that can be used in multiple tests by declaring them as parameters of the test case. In this example, we show both defining a fixture (recurring_event) and using another fixture (project).

      Note: The conftest file is meant to hold shared test fixtures, among other things. The fixtures have directory scope.

      Add the following to app/core/tests/conftest.py

      app/core/tests/conftest.py
      @pytest.fixture\n# (1)!\ndef recurring_event(project):  # (2)!\n# (3)!\nreturn RecurringEvent.objects.create(name=\"Test Recurring Event\", project=project)\n
      1. We name the fixture after the model name (recurring_event).
      2. This model makes use of the project model as a foreign key relation, so we pass in the project fixture, which creates a project model.
      3. We create an object of the new model, passing in at least the required fields. In this case, we passed in enough arguments to use the __str__ method in a test.
    2. Add a test case

      When creating Django models, there's no need to test the CRUD functionality since Django itself is well-tested and we can expect it to generate the correct CRUD functionality. Feel free to write some tests for practice. What really needs testing are any custom code that's not part of Django. Sometimes we need to override the default Django behavior and that should be tested.

      Here's a basic test to see that the model stores its name.

      Add the following to app/core/tests/test_models.py

      app/core/tests/test_models.py
      def test_recurring_event(recurring_event):  # (1)!\n# (2)!\nassert str(recurring_event) == \"Test Recurring Event\"  # (3)!\n
      1. Pass in our fixture so that the model object is created for us.
      2. The __str__ method should be tested since it's an override of the default Django method.
      3. Write assertion(s) to check that what's passed into the model is what it contains. The simplest thing to check is the __str__ method.
    3. Run the test script to show it passing

      ./scripts/test.sh\n
    Check and commit

    This is a good place to pause, check, and commit progress.

    1. Run pre-commit checks

      ./scripts/precommit-check.sh\n
    2. Add and commit changes

      git add -A\ngit commit -m \"feat: add model: recurring_event\"\n
    "},{"location":"contributing/howto/add-model-and-api-endpoints/#admin-site","title":"Admin site","text":"

    Django comes with an admin site interface that allows admin users to view and change the data in the models. It's essentially a database viewer.

    "},{"location":"contributing/howto/add-model-and-api-endpoints/#register-the-model","title":"Register the model","text":"

    In app/core/admin.py

    1. Import the new model

      app/core/admin.py
      from .models import RecurringEvent\n
    2. Register the model with the admin site

      app/core/admin.py
      @admin.register(RecurringEvent)  # (2)!\nclass RecurringEventAdmin(admin.ModelAdmin):  # (1)!\nlist_display = (  # (3)!\n\"name\",\n\"start_time\",\n\"duration_in_min\",\n)  # (4)!\n
      1. We declare a ModelAdmin class so we can customize the fields that we expose to the admin interface.
      2. We use the register decorator to register the class with the admin site.
      3. list_display controls what's shown in the list view
      4. list_filter adds filter controls to declared fields (useful, but not shown in this example).
    "},{"location":"contributing/howto/add-model-and-api-endpoints/#view-the-admin-site","title":"View the admin site","text":"

    Check that everything's working and there are no issues, which should be the case unless there's custom input fields creating problems.

    1. See the development setup guide section on \"Build and run using Docker locally\" for how to view the admin interface.

    2. Example of a custom field (as opposed to the built-in ones)

      # (1)!\ntime_zone = TimeZoneField(blank=True, use_pytz=False, default=\"America/Los_Angeles\")\n
      1. Having a misconfigured or buggy custom field could cause the admin site to crash and the developer will need to look at the debug message and resolve it.
    Test
    1. Feel free to write tests for the admin. There's no example for it yet.
    2. The reason there's no tests is that the admin site is independent of the API functionality, and we're mainly interested in the API part.
    3. When the time comes that we depend on the admin interface, we will need to have tests for the needed functionalities.
    Check and commit

    This is a good place to pause, check, and commit progress.

    1. Run pre-commit checks

      ./scripts/precommit-check.sh\n
    2. Add and commit changes

      git add -A\ngit commit -m \"feat: register admin: recurring_event\"\n
    "},{"location":"contributing/howto/add-model-and-api-endpoints/#api","title":"API","text":"

    There's several components to adding API endpoints: Model(already done), Serializer, View, and Route.

    "},{"location":"contributing/howto/add-model-and-api-endpoints/#add-serializer","title":"Add serializer","text":"

    This is code that serializes objects into strings for the API endpoints, and deserializes strings into object when we receive data from the client.

    In app/core/api/serializers.py

    Updating serializers.py for many-to-many relationships

    Following the many-to-many relationship between project and recurring event from above,

    Update the existing serializer classes

    app/core/api/serializers.py
    class ProjectSerializer(serializers.ModelSerializer):\n\"\"\"Used to retrieve project info\"\"\"\nrecurring_events = serializers.StringRelatedField(many=True)\nclass Meta:\nmodel = Project\nfields = (\n\"uuid\",\n\"name\",\n\"description\",\n\"created_at\",\n\"updated_at\",\n\"completed_at\",\n\"github_org_id\",\n\"github_primary_repo_id\",\n\"hide\",\n\"google_drive_id\",\n\"image_logo\",\n\"image_hero\",\n\"image_icon\",\n\"recurring_events\",\n)\nread_only_fields = (\n\"uuid\",\n\"created_at\",\n\"updated_at\",\n\"completed_at\",\n)\nclass RecurringEventSerializer(serializers.ModelSerializer):\n\"\"\"Used to retrieve recurring_event info\"\"\"\nprojects = serializers.StringRelatedField(many=True)\nclass Meta:\nmodel = RecurringEvent\nfields = (\n\"uuid\",\n\"name\",\n\"start_time\",\n\"duration_in_min\",\n\"video_conference_url\",\n\"additional_info\",\n\"project\",\n\"projects\",\n)\nread_only_fields = (\n\"uuid\",\n\"created_at\",\n\"updated_at\",\n)\n
    1. Import the new model

      app/core/api/serializers.py
      from core.models import RecurringEvent\n
    2. Add a serializer class

      app/core/api/serializers.py
      class RecurringEventSerializer(serializers.ModelSerializer):  # (1)!\n\"\"\"Used to retrieve recurring_event info\"\"\"\nclass Meta:\nmodel = RecurringEvent  # (2)!\nfields = (\n\"uuid\",\n\"name\",\n\"start_time\",\n\"duration_in_min\",\n\"video_conference_url\",\n\"additional_info\",\n\"project\",\n)\nread_only_fields = (\n\"uuid\",  # (3)!\n\"created_at\",\n\"updated_at\",\n)\n
      1. We inherit from ModelSerializer. It knows how to serialize/deserialize the Django built-in data fields so we don't have to write the code to do it.
      2. We do need to pass in the model, the fields we want to expose through the API, and any read_only_fields.
      3. uuid, created_at, and updated_at are managed by automations and are always read-only.
    3. Custom data fields may need extra code in the serializer

      time_zone = TimeZoneSerializerField(use_pytz=False)  # (1)!\n
      1. This non-built-in model field provides a serializer so we just point to it.
    4. Custom validators if we need them

      We will need to write custom validators here if we want custom behavior, such as validating URL strings and limit them to the github user profile pattern using regular expression, for example.

      Example here when we have one\n
    "},{"location":"contributing/howto/add-model-and-api-endpoints/#add-viewset","title":"Add viewset","text":"

    Viewset defines the set of API endpoints for the model.

    In app/core/api/views.py

    1. Import the model

      app/core/api/views.py
      from ..models import RecurringEvent\n
    2. Import the serializer

      app/core/api/views.py
      from .serializers import RecurringEventSerializer\n
    3. Add the viewset and CRUD API endpoint descriptions

      app/core/api/views.py
      @extend_schema_view(  # (2)!\nlist=extend_schema(description=\"Return a list of all the recurring events\"),\ncreate=extend_schema(description=\"Create a new recurring event\"),\nretrieve=extend_schema(description=\"Return the details of a recurring event\"),\ndestroy=extend_schema(description=\"Delete a recurring event\"),\nupdate=extend_schema(description=\"Update a recurring event\"),\npartial_update=extend_schema(description=\"Patch a recurring event\"),\n)\nclass RecurringEventViewSet(viewsets.ModelViewSet):  # (1)!\npermission_classes = [IsAuthenticated]  # (4)!\nqueryset = RecurringEvent.objects.all()  # (3)!\nserializer_class = RecurringEventSerializer\n
      1. We inherit from ModelViewSet, which provides a default view implementation of all 6 CRUD actions: create, retrieve, partial_update, update, destroy, list.
      2. We use the extend_schema_view decorator to attach the API doc strings to the viewset. They are usually defined as docstrings of the corresponding function definitions inside the viewset. Since we use ModelViewSet, there's nowhere to put the docstrings but above the viewset.
      3. The minimum code we need with ModelViewSet are the queryset, and the serializer_class.
      4. Permissions
        1. For now use permission_classes = [IsAuthenticated]
        2. It doesn't control permissions the way we want, but we will fix it later.
    Extended example: Query Params

    This example shows how to add a filter params. It's done for the user model as a requirement from VRMS.

    1. Here's a more complex API doc example (this example is using the User model's ViewSet)

      app/core/api/views.py
      @extend_schema_view(\nlist=extend_schema(  # (2)!\nsummary=\"Users List\",\ndescription=\"Return a list of all the existing users\",\nparameters=[\nOpenApiParameter(\nname=\"email\",\ntype=str,\ndescription=\"Filter by email address\",\nexamples=[\nOpenApiExample(\n\"Example 1\",\nsummary=\"Demo email\",\ndescription=\"get the demo user\",\nvalue=\"demo-email@email.com,\",\n),\n],\n),\nOpenApiParameter(\nname=\"username\",\ntype=OpenApiTypes.STR,\nlocation=OpenApiParameter.QUERY,\ndescription=\"Filter by username\",\nexamples=[\nOpenApiExample(\n\"Example 1\",\nsummary=\"Demo username\",\ndescription=\"get the demo user\",\nvalue=\"demo-user\",\n),\n],\n),\n],\n),\ncreate=extend_schema(description=\"Create a new user\"),  # (1)!\nretrieve=extend_schema(description=\"Return the given user\"),\ndestroy=extend_schema(description=\"Delete the given user\"),\nupdate=extend_schema(description=\"Update the given user\"),\npartial_update=extend_schema(description=\"Partially update the given user\"),\n)\nclass UserViewSet(viewsets.ModelViewSet):\npass\n
      1. Define strings for all 6 actions: create, retrieve, partial_update, update, destroy, list.
      2. This one is fancy and provides examples of data to pass into the query params. It's probably more than we need right now.
        1. The examples array can hold multiple examples.
          1. Example ID string has to be unique but is not displayed.
          2. summary string appears as an option in the dropdown.
          3. description is displayed in the example.
    2. Add any query params according to the requirements (this example is using the User model's ViewSet)

      app/core/api/views.py
      class UserViewSet(viewsets.ModelViewSet):\n...\ndef get_queryset(self):  # (1)!\n\"\"\"\n        Optionally filter users by an 'email' and/or 'username' query paramerter in the URL\n        \"\"\"\nqueryset = get_user_model().objects.all()  # (2)!\nemail = self.request.query_params.get(\"email\")\nif email is not None:\nqueryset = queryset.filter(email=email)\nusername = self.request.query_params.get(\"username\")\nif username is not None:\nqueryset = queryset.filter(username=username)\nreturn queryset\n
      1. Notice the queryset property is now the get_queryset(() function which returns the queryset.

        The get_queryset() function overrides the default and lets us filter the objects returned to the client if they pass in a query param.

      2. Start with all the model objects and filter them based on any available query params.

    "},{"location":"contributing/howto/add-model-and-api-endpoints/#register-api-endpoints","title":"Register API endpoints","text":"

    In app/core/api/urls.py

    1. Import the viewset.

      app/core/api/urls.py
      from .views import RecurringEventViewSet\n
    2. Register the viewset to the router

      app/core/api/urls.py
      router.register(r\"recurring-events\", RecurringEventViewSet, basename=\"recurring-event\")\n# (1)!\n
      1. Params
        1. First param is the URL prefix use in the API routes. It is, by convention, plural
          • This would show up in the URL like this: http://localhost:8000/api/v1/recuring-events/ and http://localhost:8000/api/v1/recuring-events/<uuid>
        2. Second param is the viewset class which defines the API actions
        3. basename is the name used for generating the endpoint names, such as -list, -detail, etc. It's in the singular form. This is automatically generated if the viewset definition contains a queryset attribute, but it's required if the viewset overrides that with the get_queryset function
          • reverse(\"recurring-event-list\") would return http://localhost:8000/api/v1/recuring-events/
          Test

          For the CRUD operations, since we're using ModelViewSet where all the actions are provided by rest_framework and well-tested, it's not necessary to have test cases for them. But here's an example of one.

          In app/core/tests/test_api.py

          1. Import API URL

            app/core/tests/test_api.py
            RECURRING_EVENTS_URL = reverse(\"recurring-event-list\")\n
          2. Add test case

            app/core/tests/test_api.py
            def test_create_recurring_event(auth_client, project):\n\"\"\"Test that we can create a recurring event\"\"\"\npayload = {\n\"name\": \"Test Weekly team meeting\",\n\"start_time\": \"18:00:00\",\n\"duration_in_min\": 60,\n\"video_conference_url\": \"https://zoom.com/link\",\n\"additional_info\": \"Test description\",\n\"project\": project.uuid,\n}\nres = auth_client.post(RECURRING_EVENTS_URL, payload)\nassert res.status_code == status.HTTP_201_CREATED\nassert res.data[\"name\"] == payload[\"name\"]\n
            1. Given
              1. Pass in the necessary fixtures
              2. Construct the payload
            2. When
              1. Create the object
            3. Then
              1. Check that it's created via status code
              2. Maybe also check the data. A real test should check all the data, but we're kind of relying on django to have already tested this.
          3. Run the test script to show it passing

            ./scripts/test.sh\n
          Test many-to-many relationships

          In app/core/tests/test_api.py

          1. Import API URL

            app/core/tests/test_api.py
            PROJECTS_URL = reverse(\"project-list\")\n
          2. Add test case (following the example above)

            app/core/tests/test_api.py
            def test_project_sdg_xref(auth_client, project, recurring_event):\ndef get_object(objects, target_uuid):\nfor obj in objects:\nif str(obj[\"uuid\"]) == str(target_uuid):\nreturn obj\nreturn None\nproject.recurring_events.add(recurring_event)\nproj_res = auth_client.get(PROJECTS_URL)\ntest_proj = get_object(proj_res.data, project.uuid)\nassert test_proj is not None\nassert len(test_proj[\"recurring_events\"]) == 1\nassert recurring_event.name in test_proj[\"recurring_events\"]\nrecurring_event_res = auth_client.get(RECURRING_EVENT_URL)\ntest_recurring_event = get_object(recurring_event_res.data, recurring_event.uuid)\nassert test_recurring_event is not None\nassert len(test_recurring_event[\"projects\"]) == 1\nassert project.name in test_recurring_event[\"projects\"]\n
          3. Run the test script to show it passing

            ./scripts/test.sh\n
          Check and commit

          This is a good place to pause, check, and commit progress.

          1. Run pre-commit checks

            ./scripts/precommit-check.sh\n
          2. Add and commit changes

            git add -A\ngit commit -m \"feat: add endpoints: recurring_event\"\n
          Push the code and start a PR

          Refer to the Issues page section on \"Push to upstream origin\" onward.

          "},{"location":"contributing/howto/create-initial-data-migrations/","title":"Create initial data scripts","text":""},{"location":"contributing/howto/create-initial-data-migrations/#overview","title":"Overview","text":"

          The goal is to convert our initial data into scripts that can be loaded into the database when the backend is set up for the first time.

          These are the steps:

          1. Export the data into a csv file
          2. Generate a python script from the csv data
          "},{"location":"contributing/howto/create-initial-data-migrations/#prerequisites","title":"Prerequisites","text":"

          You must have Docker installed

          The initial data exists in a Google spreadsheet, such as this one for People Depot. There should be individual sheets named after the model names the data correspond to, such as ProgramArea - Data. The sheet name is useful for us to identify the model it corresponds to.

          The sheet should be formatted like so:

          • the first row contains the names of the field names in the model. The names must be exactly the same
          • rows 2 to n are the initial data for the model we want to turn into a script.

          It is required that there be data in the first column of the sheet.

          "},{"location":"contributing/howto/create-initial-data-migrations/#gather-data-for-preparation","title":"Gather data for preparation","text":"
          1. Export the data from the Google spreadsheet

            1. Find the sheet in the document containing the data to export. Let's use the ProgramArea - Data data as our example.
            2. Go to File -> Download -> Comma Separated Values (.csv). This will download the sheet as a .csv file.
            3. Copy the file to the app/core/initial_data directory.
          "},{"location":"contributing/howto/create-initial-data-migrations/#convert-data-into-python-script","title":"Convert data into Python script","text":"
          1. Start Docker

          2. From project root, run

            ./scripts/buildrun.sh\n
          3. Go to the project root and run this command

            docker-compose exec web python scripts/convert.py \"core/initial_data/PD_ Table and field explanations  - ProgramArea - Data.csv\"\n
          4. Check that there's a new file called app/core/scripts/programarea_seed.py and that it looks correct

            1. You can run it to verify, but will need to remove that data if you care about restoring the database state

            2. Run this command to run the script

            docker-compose exec web python manage.py runscript programarea_seed\n
            1. To remove the data, go into the database and delete all rows from core_programarea
            docker-compose exec web python manage.py dbshell\n\n# now we have a shell to the db\n# see if all the seed data got inserted\nselect count(*) from core_programarea;\n# shows 9 rows\ndelete from core_programarea;\n# DELETE 9\nselect count(*) from core_programarea;\n# shows 0 rows\n# ctrl-d to exit dbshell\n
          "},{"location":"contributing/howto/create-initial-data-migrations/#combine-script-in-migration","title":"Combine Script in Migration","text":"
          • Look for name of the last migration file in core/data/migrations directory

          • Create a script in the same directory named <number>_<modelname_in_lower_case>_seed.py with the following contents and replace <modelname_in_lower_case>, ModelNameInPascalCase, and <name of last script> with appropriate values:

            from django.db import migrations\nfrom core.models import ModelNameInPascalCase\ndef forward(__code__, __reverse_code__):\n# paste everything in seed script's run function here\n# remove pass below\npass\ndef reverse(__code__, __reverse_code__):\nModelNameInPascalCase.objects.all().delete()\nclass Migration(migrations.Migration):\ndependencies = [(\"data\", \"<name of last script, or contents of max_migration.txt>\")]\noperations = [migrations.RunPython(forward, reverse)]\n

            For example:

            from django.db import migrations\nfrom core.models import BookType\ndef forward(__code__, __reverse_code__):\nitems = [\n(1, \"Hard Cover\"),\n(2, \"Soft Cover\"),\n]\nfor uuid, name in items:\nBookType.objects.create(uuid=uuid, name=name)\ndef reverse(__code__, __reverse_code__):\nBookType.objects.all().delete()\nclass Migration(migrations.Migration):\ndependencies = [(\"data\", \"0011_author_seed\")]\noperations = [migrations.RunPython(forward, reverse)]\n

          In this example 011_author_seed is the name of the last migration file in core/data/migrations. You will also need to update this to the last python file in core/data/migrations having the format xxxx_<modename_in_lower_case>_seed.py.

          "},{"location":"contributing/howto/run-local/","title":"Run backend in venv","text":"

          If you have a requirement to run on your local machine or you are unable to get it to work on Docker, do the following steps. WARNING: If you run into issues you will get limited support.

          Run these commands from the app directory:

          1. Copy .env.docker-example to .env.local
          2. Inspect .env.local and change values as appropriate. The file includes instructions on how to use local postgres and sqlite for the database. sqlite has no set up. It uses a file db.sqlite3. If it is not there, it automatically creates it.
          3. Mac only: If you have a Mac, the python command may not be found and scripts will fail. Try to run python using the \"python\" command from the terminal. If you get an error that the python command is not found, type: alias python=\"python3\"
          4. Run these commands from the terminal in the project root.
          cd app\n\n# copy the env file\ncp .env.docker-example .env.local\n\n# create a virtual environment\npython -m venv venv\n\n# activate (enter) the virtual environment\nsource venv/bin/activate\n# install dependencies\npip install -r requirements.txt\n\n# start local server\n../scripts/start-local.sh\n# start server with alternate port\n# DJANGO_PORT=8001 ../scripts/start-local.sh\n# browse to http://localhost:8000 (or 8001) to see the app\n# Ctrl-C to stop the server\n# deactivate (exit) the virtual environment\n# to return to the system global environment\ndeactivate\n

          TIP: Look up direnv for a useful method to automatically enter and exit virtual environments based on the current directory.

          "},{"location":"contributing/tools/","title":"Tools","text":"

          These are the tools we use in the PeopleDepot project with notes on how we use them.

          • Convenience scripts
          • Docker for containerization
          • MkDocs for documentation
          • Pre-commit for linting
          • Uv for fast dependency resolution
          "},{"location":"contributing/tools/docker/","title":"Docker","text":""},{"location":"contributing/tools/docker/#working-with-docker","title":"Working with Docker","text":""},{"location":"contributing/tools/docker/#stopping-docker","title":"Stopping Docker","text":"

          To stop the service-container, but not destroy it (often sufficient for day-to-day work):

          docker-compose stop\n

          To stop and destroy the service container:

          docker-compose down\n

          Add the -v flag to destroy the data volumes as well:

          docker-compose down -v\n
          "},{"location":"contributing/tools/docker/#recycling-refreshing-database","title":"Recycling / Refreshing Database","text":"

          To restore a database to its original state and remove any data manually added, delete the container and image. From Docker:

          TerminalDocker Desktop
          docker-compose down -v\n
          1. Open Containers section
          2. Delete people-db-1 container
          3. Open Images Tab
          4. Remove djangorestapipostrgresql image
          "},{"location":"contributing/tools/docker/#cache-mount","title":"Cache mount","text":"

          This helps speed up subsequent docker builds by caching intermediate files and reusing them across builds. It's available with docker buildkit. The key here is to disable anything that could delete the cache, because we want to preserve it. The cache mount is not going to end up in the docker image being built, so there's no concern about disk space usage.

          Put this flag between RUN and the command

          RUN \\\n--mount=type=cache,target=/root/.cache\n  pip install -r requirements.txt\n

          For pip, the files are by default stored in /root/.cache/pip. Pip caching docs

          For apk, the cache directory is /var/cache/apk/. APK wiki on local cache

          For apt, the cache directory is /var/cache/apt/.

          References
          • buildkit mount the cache
          • proper usage of mount cache
          • mount cache reference
          • buildkit dockerfile reference
          "},{"location":"contributing/tools/docker/#alpine-vs-debian-based-images","title":"Alpine vs Debian based images","text":"

          We're choosing to use an Alpine-based image for the smaller size and faster builds and downloads. However, a Debian-based image has the advantage of a large ecosystem of available packages, a limitation of Alpine that we may run up against in the future.

          "},{"location":"contributing/tools/docker/#switching-to-debian","title":"Switching to Debian","text":"

          Here is how we can switch to a Debian-based images if we need to:

          1. Edit Dockerfile to look something like this

            app/Dockerfile
            # pull official base image\nFROM python:3.10-alpine\n# (1)! define base image\nFROM python:3.10-bullseye\n# set work directory\nWORKDIR /usr/src/app\n# set environment variables\nENV PYTHONDONTWRITEBYTECODE=1\nENV PYTHONUNBUFFERED=1\nENV PYTHONPYCACHEPREFIX=/root/.cache/pycache/\nENV PIP_CACHE_DIR=/var/cache/buildkit/pip\nRUN mkdir -p $PIP_CACHE_DIR\n# (2)! prevent cache deletion\nRUN rm -f /etc/apt/apt.conf.d/docker-clean; \\ \necho 'Binary::apt::APT::Keep-Downloaded-Packages \"true\";' > /etc/apt/apt.conf.d/keep-cache\n# install system dependencies\nRUN \\\n--mount=type=cache,target=/var/cache/apk \\ \n--mount=type=cache,target=/etc/apk/cache \\ \napk add \\\u0002czjqqkd:19\u0003\n  'graphviz=~9.0'\n# install font for graphviz\nCOPY Roboto-Regular.ttf /root/.fonts/\nRUN fc-cache -f\n# (3)! define cache mounts and install dependencies\n--mount=type=cache,target=/var/cache/apt,sharing=locked \\ \n--mount=type=cache,target=/var/lib/apt,sharing=locked \\ \napt-get update \\ \n&& apt-get install --no-install-recommends -yqq \\ \nnetcat=1.10-46 \\ \ngcc=4:10.2.1-1 \\ \npostgresql=13+225+deb11u1 \\ \ngraphviz=2.42.2-5\n# install dependencies\nCOPY ./requirements.txt .\n# hadolint ignore=DL3042\n# (4)! install uv for faster dependency resolution\nRUN \\\n--mount=type=cache,target=/root/.cache \\\npip install uv==0.1.15 \\\n&& uv pip install --system -r requirements.txt\n\n# copy entrypoint.sh\nCOPY ./entrypoint.sh .\nRUN sed -i 's/\\r$//g' /usr/src/app/entrypoint.sh \\\n&& chmod +x /usr/src/app/entrypoint.sh\n\n# copy project\nCOPY . .\n\n# run entrypoint.sh\nENTRYPOINT [\"/usr/src/app/entrypoint.sh\"]\n
              \n
            1. define base image
            2. \n
            3. prevent cache deletion
            4. \n
            5. install system dependencies
                \n
              1. define cache mounts for apt and lib
              2. \n
              3. install netcat for db wait script, which is used in entrypoint.sh
              4. \n
              5. install gcc for python local compiling, which shouldn't be needed
              6. \n
              7. install postgresql for dbshell management command
              8. \n
              9. install graphviz for generating ERD in erd.sh
              10. \n
              \n
            6. \n
            7. install uv for faster dependency resolution, which may or may not be wanted
            8. \n
            \n
          2. \n

            Use the dive tool to check the image layers for extra files that shouldn't be there.

            \n
          3. "},{"location":"contributing/tools/mkdocs/","title":"MkDocs","text":"

            We are using MkDocs to generate our documentation. See Docker-mkdocs repo for information about MkDocs and the image we're using.

            "},{"location":"contributing/tools/mkdocs/#work-on-docs-locally","title":"Work on docs locally","text":"

            The first time starting the container may take longer due to downloading the ~40MB docker image

            1. Run the mkdocs container.

              docker-compose up mkdocs # (1)!\n
              1. Optionally use the -d flag to run the container in the background
            2. Open a browser to http://localhost:8005/ to view the documentation locally.

            3. Modify the files in the docs directory. The site will auto-update when the files are saved.

            4. Ctrl+C to quit the local server and stop the container

            "},{"location":"contributing/tools/mkdocs/#auto-generated-docs","title":"Auto-generated docs","text":"

            We have a GitHub Action set up to generate and host the documentation on a GitHub Pages site

            "},{"location":"contributing/tools/mkdocs/#mkdocs-syntax","title":"MkDocs syntax","text":"

            We're using Material for MkDocs. Aside from standard markdown syntax, there are some MkDocs and Material-specific syntax which can help more effective documentation. See the Material reference docs for the complete set of syntax.

            Here's a list of commonly used MkDocs syntax for quick reference.

            "},{"location":"contributing/tools/mkdocs/#code-blocks","title":"Code Blocks","text":"ExampleCode Code Block
            @admin.register(RecurringEvent)\nclass RecurringEventAdmin(admin.ModelAdmin):\nlist_display = (\n\"name\",\n\"start_time\",\n\"duration_in_min\",\n)\n
            Numbered Lines
            @admin.register(RecurringEvent)\nclass RecurringEventAdmin(admin.ModelAdmin):\nlist_display = (\n\"name\",\n\"start_time\",\n\"duration_in_min\",\n)\n
            Highlighted Lines
            @admin.register(RecurringEvent)\nclass RecurringEventAdmin(admin.ModelAdmin):\nlist_display = (\n\"name\",\n\"start_time\",\n\"duration_in_min\",\n)\n
            ```python title=\"Code Block\"\n@admin.register(RecurringEvent)\nclass RecurringEventAdmin(admin.ModelAdmin):\n    list_display = (\n        \"name\",\n        \"start_time\",\n        \"duration_in_min\",\n    )\n```\n\n```python title=\"Numbered Lines\" linenums=\"1\"\n@admin.register(RecurringEvent)\nclass RecurringEventAdmin(admin.ModelAdmin):\n    list_display = (\n        \"name\",\n        \"start_time\",\n        \"duration_in_min\",\n    )\n```\n\n```python title=\"Highlighted Lines\" hl_lines=\"1 3 5\"\n@admin.register(RecurringEvent)\nclass RecurringEventAdmin(admin.ModelAdmin):\n    list_display = (\n        \"name\",\n        \"start_time\",\n        \"duration_in_min\",\n    )\n```\n
            "},{"location":"contributing/tools/mkdocs/#code-annotations","title":"Code Annotations","text":"ExampleCode
            Click the plus sign --> # (1)!\n
            1. This is an explanation text
            ``` bash\nClick the plus sign --> # (1)!\n```\n\n1. This is an explanation text\n
            "},{"location":"contributing/tools/mkdocs/#text-blocks","title":"Text blocks","text":"ExampleCode

            Simple Block

            Example

            Content Block Text

            Expandable Block

            Content

            Opened Expandable Block

            Content

            !!! example \"Simple Block\"\n\n!!! example\n    Content Block Text\n\n??? example \"Expandable Block\"\n    Content\n\n???+ example \"Opened Expandable Block\"\n    Content\n
            "},{"location":"contributing/tools/mkdocs/#tabbed-content","title":"Tabbed content","text":"ExampleCode LinuxMac

            linux-specific content

            mac-specific content

            === \"Linux\"\n\n    linux-specific content\n\n=== \"Mac\"\n\n    mac-specific content\n
            "},{"location":"contributing/tools/mkdocs/#buttons","title":"Buttons","text":"ExampleCode
            1. Ctrl+C to quit the local server and stop the container
            1. ++ctrl+c++ to quit the local server and stop the container\n
            "},{"location":"contributing/tools/pre-commit/","title":"Pre-commit","text":"

            The hooks will run when doing normal git commit and git push commands. It's recommended to do this in the command line to see the output. If performing these actions from a gui application, the interface may seem to hang for some time.

            The pre-commit checks should be fast while the pre-push hooks will take longer since they'll do a full rebuild

            "},{"location":"contributing/tools/pre-commit/#installation","title":"Installation","text":"

            It's recommended to install \"global\" tools via pipx, which installs packages in an isolated environment rather than the global python environment.

            1. Install pipx

            2. Install pre-commit

              pipx install pre-commit\n
            3. Add the hook to git

              pre-commit install\n

              Pre-commit is now set up to check your files whenever you commit or push code.

            4. Test by adding an empty commit

              git commit --allow-empty -m \"Test\"\n

              You should see a list of tests that are all skipped, because there's no changes in the commit to test.

            "},{"location":"contributing/tools/pre-commit/#extra-information","title":"Extra information","text":"
            • To skip the checks temporarily, you can do one of these

              # skip all the hooks\ngit commit --no-verify\n\n# skip specific hooks\nSKIP=black,flake8 git commit\n
            • Manually run the hooks (this runs it against all files rather than only changed files)

              pre-commit run --all-files\n
            • More commands to run the hooks

              # run the hooks for the push staga\npre-commit run --all-files --hook-stage push\n\n# run the hooks for the commit stage\npre-commit run --all-files --hook-stage commit\n\n# run the hooks for\npre-commit run test --all-files --hook-stage push\n
            • Update pre-commit and plugins to the latest version

              pre-commit autoupdate\n
            "},{"location":"contributing/tools/scripts/","title":"Convenience Scripts","text":"

            These are designed to make it easier to perform various everyday tasks in the project. They try to be transparent by exposing the underlying commands they execute so that users can have an idea of what's happening and try to learn the commands if they wish.

            scripts/\n\u251c\u2500\u2500 buildrun.sh\n\u251c\u2500\u2500 check-migrations.sh\n\u251c\u2500\u2500 createsuperuser.sh\n\u251c\u2500\u2500 db.sh\n\u251c\u2500\u2500 erd.sh\n\u251c\u2500\u2500 lint.sh\n\u251c\u2500\u2500 loadenv.sh\n\u251c\u2500\u2500 logs.sh\n\u251c\u2500\u2500 migrate.sh\n\u251c\u2500\u2500 precommit-check.sh\n\u251c\u2500\u2500 run.sh\n\u251c\u2500\u2500 start-local.sh\n\u251c\u2500\u2500 test.sh\n\u2514\u2500\u2500 update-dependencies.sh\n

            These scripts assume you are using bash.

            1. buildrun.sh - clean, build, and run containers in background mode

              1. Pass in -v to remove data volume, which resets the local database.
              2. See the script file for more options.
            2. check-migrations.sh - check if migrations are up to date

            3. createsuperuser.sh - create a default superuser

              1. This assumes that DJANGO_SUPERUSER_USERNAME and DJANGO_SUPERUSER_PASSWORD are set in .env.dev
            4. db.sh - connect to the database in the db container

              1. This is a different route than manage.py dbshell, which requires the psql executable in the web container
            5. erd.sh - generate ER diagram

              • The image is saved to app/erd.png
              • This script is dependent on the graphviz package
            6. lint.sh - lint and and auto-format code

            7. loadenv.sh - load environment variables from .env.dev into shell environment

            8. logs.sh - view/tail container logs

            9. migrate.sh - run database migrations inside container

              1. Add <app> <migration_number> to migrate to that database state. Ex: migrate.sh core 0010
            10. precommit-check.sh - sanity checks before committing code

              1. Call buildrun.sh, lint.sh, and test.sh
            11. run.sh - start the development server in Docker, with some options

              1. Pass in -h to show usage
            12. start-local.sh - start the development server natively

            13. test.sh - run tests and generate test coverage report

              1. Use the -k flag to filter tests. For example test.sh -k program_area will select only tests with \"program_area\" in the name.
              2. Pass in --no-cov to disable the coverage report. The coverage report will show many missing lines of coverage as a result.
            14. update-dependencies.sh - update python dependencies to the latest versions

            "},{"location":"contributing/tools/uv/","title":"uv","text":"

            We're using uv as a faster replacement to pip and pip-tools. See the official documentation on getting started.

            "},{"location":"contributing/tools/uv/#how-we-use-it","title":"How we use it","text":"

            We're using uv to compile and install python dependencies, which replaces the functionalities of pip and pip-tools. uv can also create and maintain a virtual environment but we're not using it for now. In fact, we're suppressing it with the --system option during uv pip install.

            uv is already part of the docker image, so there's no need to install it on the host. It does require prepending the docker-compose information to run, for example: docker-compose exec web uv pip compile requirements.in -o requirements.txt. We'll omit the docker-compose exec web portion from now on in this document.

            requirements.in is the requirements file and uv pip compile generates requirement.txt, with pinned versions, similar to lock files in other languages.

            "},{"location":"contributing/tools/uv/#usage","title":"Usage","text":""},{"location":"contributing/tools/uv/#upgrade-depencencies","title":"Upgrade depencencies","text":"

            We shouldn't run this on every build, but we should do this manually every month/quarter or so.

            # docker-compose exec web\nuv pip compile requirements.in -o requirements.txt --no-header --upgrade\n

            Or run the script

            ./scripts/update-dependencies.sh\n
            "},{"location":"contributing/tools/uv/#pip-compile-options","title":"pip compile options","text":"Disable header in the generated file --no-header This solves the problem unnecessary code churn caused by changing headers Upgrade all dependencies --upgrade Generate pip-8 style hashes --generate-hashes Hashes improve security but are not verified by uv at the moment. It is planned. Switch back to pip for installation if we need to verify hashes. Disable annotation of where dependencies come from --no-annotate This makes the generated file shorter but less informative

            See pip-compile docs for more options and explanation

            "},{"location":"contributing/tools/uv/#install-dependencies","title":"Install dependencies","text":"

            This is used in the Dockerfile to install python dependencies.

            uv pip install --system -r requirements.txt\n
            "},{"location":"contributing/tools/uv/#pip-install-options","title":"pip install options","text":"Install to global --system bypass the virtual environment requirement

            See pip install docs for more options and explanation

            "},{"location":"contributing/tools/uv/#explanations","title":"Explanations","text":""},{"location":"contributing/tools/uv/#global-install","title":"Global install","text":"

            We're using the --system option in the Dockerfile to bypass the virtual environment requirement for uv. This is because the docker image is already a virtual environment separate from the host.

            "},{"location":"contributing/tools/uv/#version-pinning","title":"Version pinning","text":"

            We're leaving most dependencies unpinned in requirements.in so that pip compile will pin the newest compatible versions in requirements.txt. The only manually pinned dependency is django~=4.2.0. The x.2.x versions have long term support, and we're using 4, since 4.2 is the latest LTS available.

            "},{"location":"howto/authenticate_cognito/","title":"Cognito authentication workflow (pre deployment)","text":"

            This is a temporary solution until we can deploy a dev environment for PeopleDepot.

            There's a few manual steps and the login is good for only an hour at a time.

            Prerequisites:

            • ModHeader browser extension

            Steps:

            1. Login (or register first then login) to a cognito account here. Do not worry if you see error messages - you will be using the url.

            2. Copy the URL when it redirects. Note: Instead of the screen below, the screen may display an error message. You can ignore any error messages.

              .

            3. Extract the access_token using the online tool.

              1. Clear the top box and paste the URL text into it. The box should show there's 1 match
              2. The bottom box's content is the extracted access_token

            4. Open ModHeader. If the icon is hidden, click on the Puzzle icon in the upper right of the browser to see it.

            5. Type the word Bearer and paste the token into ModHeader Authorization: Bearer \\

            6. Go to a page in api/v1/ to see that it allows access

            7. Explore APIs using Swagger

            8. Some fields have hints on how to retrieve the values.

            9. A redoc ui is also available

            10. "},{"location":"ref/api_endpoints/","title":"Api endpoints","text":"

              We're using OpenAPI (swagger) for API documentation. We won't have a public URL for it until it's deployed. A ReDoc interface is also available.

              These are the URLs in the local dev environment

              • http://localhost:8000/api/schema/swagger-ui/
              • http://localhost:8000/api/schema/redoc/
              "}]} \ No newline at end of file diff --git a/sitemap.xml b/sitemap.xml new file mode 100644 index 00000000..0f8724ef --- /dev/null +++ b/sitemap.xml @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/sitemap.xml.gz b/sitemap.xml.gz new file mode 100644 index 0000000000000000000000000000000000000000..18c6e92170c831ca88964ae7b1d2d1c9d1421052 GIT binary patch literal 127 zcmV-_0D%7=iwFpSnlfhs|8r?{Wo=<_E_iKh04<9_3V)_WXo8&M?ytk3HC}0~zlG)Vu