diff --git a/.github/workflows/apply-ruff.yml b/.github/workflows/apply-ruff.yml new file mode 100644 index 0000000..0cd9136 --- /dev/null +++ b/.github/workflows/apply-ruff.yml @@ -0,0 +1,37 @@ +name: Apply ruff format, isort, and fixes + +on: + workflow_call: + inputs: + ruff-select: + description: 'ruff select' + default: I,D20,D21,UP00,UP032,UP034 + type: string + ruff-ignore: + description: 'ruff ignore' + default: D212 + type: string + ruff-version-file: + description: The requirements.txt file that contains the Ruff version (ruff==x.x.x) + required: true + type: string + +jobs: + apply-ruff: + name: Apply ruff + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Install ruff + run: | + pip3 install -r <(grep '^ruff==' "${{ inputs.ruff-version-file }}") + - name: Run ruff and push + run: | + # It will exit on ruff failure + ruff check --select=${{ inputs.ruff-select }} --ignore=${{ inputs.ruff-ignore }} --fix --unsafe-fixes . + ruff format . + git config user.name github-actions[bot] + git config user.email github-actions[bot]@users.noreply.github.com + git add . + git commit -m "style: ruff format, isort, fixes [skip ci]" + git push diff --git a/.github/workflows/check-cargo.yml b/.github/workflows/check-cargo.yml new file mode 100644 index 0000000..29752cd --- /dev/null +++ b/.github/workflows/check-cargo.yml @@ -0,0 +1,46 @@ +name: Cargo style checking + +on: + workflow_call: + inputs: + check-type: + description: fmt, clippy + default: lint + type: string + working-directory: + description: Rust project root + default: rust + type: string + +jobs: + check-cargo: + name: check-cargo + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Run clippy + run: | + set +e # Do not exit shell on ruff failure + + cd ${{ inputs.working-directory }} + + if [[ "${{ inputs.check-type }}" == "clippy" ]]; then + out=$(cargo clippy --all-targets --all-features -- -D warnings 2> app_stderr.txt) + elif [[ "${{ inputs.check-type }}" == "fmt" ]]; then + out=$(cargo fmt --all -- --check 2> app_stderr.txt) + else + echo "Invalid check-type: ${{ inputs.check-type }}" + exit 1 + fi + exit_code=$? + err=$(> "$GITHUB_STEP_SUMMARY" + + # Exit with the exit-code returned by ruff + exit ${exit_code} diff --git a/.github/workflows/check-ruff-only-changed.yml b/.github/workflows/check-ruff-only-changed.yml new file mode 100644 index 0000000..157ac03 --- /dev/null +++ b/.github/workflows/check-ruff-only-changed.yml @@ -0,0 +1,101 @@ +name: Style check on changed files + +on: + workflow_call: + inputs: + check-type: + description: format, isort or lint + default: lint + type: string + ruff-version-file: + description: The requirements.txt file that contains the Ruff version (ruff==x.x.x) + required: true + type: string + +jobs: + # ------------------------------------------------------------------------------------------------------------------------------------------------ + # Event `pull_request`: Compare the last commit of the main branch or last remote commit of the PR branch -> to the current commit of a PR branch. + # ------------------------------------------------------------------------------------------------------------------------------------------------ + format: + name: ruff-format-changed + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 # OR "2" -> To retrieve the preceding commit. + + - name: Get all changed python files + id: changed-python-files + uses: tj-actions/changed-files@v44 + with: + # Avoid using single or double quotes for multiline patterns + files: | + **.py + - name: Install ruff + if: steps.changed-python-files.outputs.any_changed == 'true' + run: | + pip3 install -r <(grep '^ruff==' ${{ inputs.ruff-version-file }}) + - name: Run ruff + if: steps.changed-python-files.outputs.any_changed == 'true' + run: | + set +e # Do not exit shell on app failure + + if [[ ${{ inputs.check-type }} == 'lint' ]]; then + nonzero_exit=0 + for file in ${{ steps.changed-python-files.outputs.all_changed_files }}; do + out=$(ruff check --force-exclude "$file" 2> app_stderr.txt) + exit_code=$? + err=$(> "$GITHUB_STEP_SUMMARY" + fi + if [[ -n "$err" ]]; then + echo "${err}" + { echo "\`\`\`python"; echo "${err}"; echo "\`\`\`"; } >> "$GITHUB_STEP_SUMMARY" + fi + + out=$(ruff check --diff --force-exclude "$file" 2> ruff_stderr.txt) + err=$(> "$GITHUB_STEP_SUMMARY" + fi + if [[ -n "$err" ]]; then + echo "${err}" + { echo "\`\`\`python"; echo "${err}"; echo "\`\`\`"; } >> "$GITHUB_STEP_SUMMARY" + fi + done + + # Exit with the first non-zero exit-code returned by ruff + # or just zero if all passed + exit ${nonzero_exit} + else + if [[ ${{ inputs.check-type }} == 'format' ]]; then + out=$(ruff format --check --diff ${{ steps.changed-python-files.outputs.all_changed_files }} 2> app_stderr.txt) + elif [[ ${{ inputs.check-type }} == 'isort' ]]; then + out=$(ruff check --select I --diff ${{ steps.changed-python-files.outputs.all_changed_files }} 2> app_stderr.txt) + fi + exit_code=$? + err=$(> "$GITHUB_STEP_SUMMARY" + + # Exit with the exit-code returned by the app + exit ${exit_code} + fi diff --git a/.github/workflows/check-ruff.yml b/.github/workflows/check-ruff.yml new file mode 100644 index 0000000..a03eda5 --- /dev/null +++ b/.github/workflows/check-ruff.yml @@ -0,0 +1,96 @@ +name: Ruff style checking + +on: + workflow_call: + inputs: + check-type: + description: format, isort or lint + default: lint + type: string + ruff-version-file: + description: The requirements.txt file that contains the Ruff version (ruff==x.x.x) + required: true + type: string + +jobs: + ruff: + name: ruff + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Install ruff and requirements + run: | + pip3 install -r <(grep '^ruff==' "${{ inputs.ruff-version-file }}") + - name: Run ruff + run: | + set +e # Do not exit shell on ruff failure + + if [[ "${{ inputs.check-type }}" == "lint" ]]; then + # code annotation + ruff check --output-format=github + + # summary + nonzero_exit=0 + files=$(find . -type f -name "*.py" | sort) + while read -r file; do + out=$(ruff check --force-exclude "$file" 2> ruff_stderr.txt) + exit_code=$? + err=$(> "$GITHUB_STEP_SUMMARY" + fi + if [[ -n "$err" ]]; then + echo "${err}" + { echo "\`\`\`python"; echo "${err}"; echo "\`\`\`"; } >> "$GITHUB_STEP_SUMMARY" + fi + + out=$(ruff check --diff --force-exclude "$file" 2> ruff_stderr.txt) + err=$(> "$GITHUB_STEP_SUMMARY" + fi + if [[ -n "$err" ]]; then + echo "${err}" + { echo "\`\`\`python"; echo "${err}"; echo "\`\`\`"; } >> "$GITHUB_STEP_SUMMARY" + fi + done <<< "$files" + + # Exit with the first non-zero exit-code returned by ruff + # or just zero if all passed + exit ${nonzero_exit} + else + + if [[ "${{ inputs.check-type }}" == "format" ]]; then + out=$(ruff format --check --diff . 2> app_stderr.txt) + elif [[ "${{ inputs.check-type }}" == "isort" ]]; then + out=$(ruff check --select I --diff . 2> app_stderr.txt) + else + echo "Invalid check-type: ${{ inputs.check-type }}" + exit 1 + fi + + exit_code=$? + err=$(> "$GITHUB_STEP_SUMMARY" + + # Exit with the exit-code returned by ruff + exit ${exit_code} + fi diff --git a/.github/workflows/commit-changelog-and-release.yml b/.github/workflows/commit-changelog-and-release.yml new file mode 100644 index 0000000..ef2de60 --- /dev/null +++ b/.github/workflows/commit-changelog-and-release.yml @@ -0,0 +1,98 @@ +name: Commit CHANGELOG.md and create a Release + +on: + workflow_call: + inputs: + version-tag: + description: New version tag that starts with 'v' (e.g. v0.1.0) + required: true + type: string + dry-run: + description: Show a sample changelog in Summary without committing or creating a release + type: boolean + default: false + changelog-path: + description: Path to the CHANGELOG.md file + type: string + default: docs/CHANGELOG.md + exclude-types: + description: Comma-separated list of commit types to exclude from the changelog + type: string + default: build,docs,style,other + +jobs: + dry-run: + if: ${{ inputs.dry-run == true }} + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - name: Push new version tag temporarily for changelog generation + run: | + git config user.name github-actions[bot] + git config user.email github-actions[bot]@users.noreply.github.com + git tag -a ${{ inputs.version-tag }} -m ${{ inputs.version-tag }} + git push --tags + + - name: Get CHANGELOG + id: changelog-dry-run + uses: requarks/changelog-action@v1.10.2 + with: + includeInvalidCommits: true + excludeTypes: ${{ inputs.exclude-types }} + token: ${{ github.token }} + tag: ${{ inputs.version-tag }} + + - name: Display CHANGELOG + run: | + echo '${{ steps.changelog-dry-run.outputs.changes }}' + echo '${{ steps.changelog-dry-run.outputs.changes }}' > "$GITHUB_STEP_SUMMARY" + + - name: Remove temporary version tag + run: | + git tag -d ${{ inputs.version-tag }} + git push origin --delete ${{ inputs.version-tag }} + + deploy: + if: ${{ inputs.dry-run == false }} + runs-on: ubuntu-latest + environment: mkdocs + + steps: + - uses: actions/checkout@v4 + - name: Push new version tag temporarily for changelog generation + run: | + git config user.name github-actions[bot] + git config user.email github-actions[bot]@users.noreply.github.com + git tag -a ${{ inputs.version-tag }} -m ${{ inputs.version-tag }} + git push --tags + + - name: Update CHANGELOG + id: changelog + uses: requarks/changelog-action@v1.10.2 + with: + includeInvalidCommits: true + excludeTypes: ${{ inputs.exclude-types }} + token: ${{ github.token }} + tag: ${{ inputs.version-tag }} + changelogFilePath: ${{ inputs.changelog-path }} + + - name: Commit ${{ inputs.changelog-path }} and update tag + run: | + git tag -d ${{ inputs.version-tag }} + git push origin --delete ${{ inputs.version-tag }} + git add ${{ inputs.changelog-path }} + git commit -m "docs: update ${{ inputs.changelog-path }} for ${{ inputs.version-tag }} [skip ci]" + git tag -a ${{ inputs.version-tag }} -m ${{ inputs.version-tag }} + git push + git push --tags + + - name: Create Release + uses: ncipollo/release-action@v1.14.0 + with: + allowUpdates: true + draft: false + makeLatest: true + name: ${{ inputs.version-tag }} + tag: ${{ inputs.version-tag }} + body: ${{ steps.changelog.outputs.changes }} diff --git a/.github/workflows/deploy-mkdocs.yml b/.github/workflows/deploy-mkdocs.yml new file mode 100644 index 0000000..6ff6bc0 --- /dev/null +++ b/.github/workflows/deploy-mkdocs.yml @@ -0,0 +1,79 @@ +name: Deploy docs + +on: + workflow_call: + inputs: + requirements-file: + description: The requirements_docs.txt file that installs mkdocs and its plugins + required: true + type: string + gitlab-project: + description: The GitLab document project name (e.g. "deargen-ai/my-project-docs") + required: true + type: string + gitlab-branch: + description: The GitLab branch or tag to deploy to + default: gl-pages + type: string + version-tag: + description: e.g. "v0.1.0". deploy-type must be "tag" and the tag must exist already. + type: string + deploy-type: + description: The type of deployment (e.g. "latest" or "tag") + required: true + type: string + secrets: + GITLAB_TOKEN: + description: The GitLab token to authenticate with the GitLab API + required: true + +jobs: + mkdocs: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + if: inputs.deploy-type == 'latest' + - uses: actions/checkout@v4 + if: inputs.deploy-type == 'tag' + with: + ref: ${{ inputs.version-tag }} + - name: Run mkdocs + run: | + # python -m pip install --upgrade pip + pip3 install uv + uv venv + source .venv/bin/activate + uv pip install -r ${{ inputs.requirements-file }} + + set +e # Do not exit shell on failure + export HTTPS_REMOTE="https://gitlab-ci-token:${{ secrets.GITLAB_TOKEN }}@gitlab.com/${{ inputs.gitlab-project }}.git" + + git config user.name github-actions[bot] + git config user.email github-actions[bot]@users.noreply.github.com + git remote add gitlab "$HTTPS_REMOTE" + git pull gitlab ${{ inputs.gitlab-branch }}:${{ inputs.gitlab-branch }} + + if [ "${{ inputs.deploy-type }}" == "latest" ]; then + out=$(mike deploy --deploy-prefix public -r "$HTTPS_REMOTE" -p -b ${{ inputs.gitlab-branch }} -u latest 2> stderr.txt) + elif [ "${{ inputs.deploy-type }}" == "tag" ]; then + # Delete the latest page because we're going to make it as an alias to the latest version. + mike delete --deploy-prefix public -r "$HTTPS_REMOTE" -b ${{ inputs.gitlab-branch }} latest + out=$(mike deploy --deploy-prefix public -r "$HTTPS_REMOTE" -p -b ${{ inputs.gitlab-branch }} -u ${{ inputs.version-tag }} latest 2> stderr.txt) + else + echo "Invalid deploy type: ${{ inputs.deploy-type }}" + exit 1 + fi + exit_code=$? + err=$(> "$GITHUB_STEP_SUMMARY" + + exit ${exit_code} diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 0000000..5ae93e9 --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,22 @@ +name: (not reusable) Commit CHANGELOG.md, create a Release of this repo + +on: + workflow_dispatch: + inputs: + version-tag: + description: Version tag + required: true + default: v0.1.0 + dry-run: + description: Dry run + type: boolean + default: false + +jobs: + commit-changelog-and-release: + uses: ./.github/workflows/commit-changelog-and-release.yml + with: + version-tag: ${{ github.event.inputs.version-tag }} + dry-run: ${{ github.event.inputs.dry-run == 'true' }} + changelog-path: docs/CHANGELOG.md + exclude-types: build,docs,style,other diff --git a/README.md b/README.md new file mode 100644 index 0000000..0ce4534 --- /dev/null +++ b/README.md @@ -0,0 +1,303 @@ +# Reusable workflows + +This repository contains reusable and reference GitHub Actions workflows. + +## Style checking and applying fixes + +### Ruff lint and style checking + +```yaml +name: Style checking + +on: + [push, pull_request] + +concurrency: + group: ${{github.workflow}}-${{github.ref}} + cancel-in-progress: true + +jobs: + ruff-format: + uses: deargen/workflows/.github/workflows/check-ruff.yml@master + with: + check-type: format + ruff-version-file: deps/lock/x86_64-unknown-linux-gnu/requirements_dev.txt + ruff-isort: + uses: deargen/workflows/.github/workflows/check-ruff.yml@master + with: + check-type: isort + ruff-version-file: deps/lock/x86_64-unknown-linux-gnu/requirements_dev.txt + ruff-lint: + uses: deargen/workflows/.github/workflows/check-ruff.yml@master + with: + check-type: lint + ruff-version-file: deps/lock/x86_64-unknown-linux-gnu/requirements_dev.txt +``` + +### Style checking for changed files only + +```yaml +name: Style check on changed files + +on: pull_request + +concurrency: + group: ${{github.workflow}}-${{github.ref}} + cancel-in-progress: true + +jobs: + ruff-format-on-changes: + uses: deargen/workflows/.github/workflows/check-ruff-only-changed.yml@master + with: + check-type: format + ruff-version-file: deps/lock/x86_64-unknown-linux-gnu/requirements_dev.txt + ruff-isort-on-changes: + uses: deargen/workflows/.github/workflows/check-ruff-only-changed.yml@master + with: + check-type: isort + ruff-version-file: deps/lock/x86_64-unknown-linux-gnu/requirements_dev.txt + ruff-lint-on-changes: + uses: deargen/workflows/.github/workflows/check-ruff-only-changed.yml@master + with: + check-type: lint + ruff-version-file: deps/lock/x86_64-unknown-linux-gnu/requirements_dev.txt +``` + + +### Apply ruff format, isort and fixes + +```yaml +name: Apply ruff format, isort, and fixes + +on: + workflow_dispatch: + inputs: + ruff-select: + description: 'ruff select' + default: I,D20,D21,UP00,UP032,UP034 + ruff-ignore: + description: 'ruff ignore' + default: D212 + +jobs: + ruff-format: + uses: deargen/workflows/.github/workflows/apply-ruff.yml@master + with: + ruff-select: ${{ github.event.inputs.ruff-select }} + ruff-ignore: ${{ github.event.inputs.ruff-ignore }} + ruff-version-file: deps/lock/x86_64-unknown-linux-gnu/requirements_dev.txt +``` + +## Cargo clippy and fmt checking for Rust projects + +```yaml +name: Style checking + +on: + [push, pull_request] + +concurrency: + group: ${{github.workflow}}-${{github.ref}} + cancel-in-progress: true + +jobs: + check-rustfmt: + uses: deargen/workflows/.github/workflows/check-cargo.yml@master + with: + check-type: fmt + working-directory: rust + check-clippy: + uses: deargen/workflows/.github/workflows/check-cargo.yml@master + with: + check-type: clippy + working-directory: rust +``` + +## Compiling requirements.txt (generate locked versions) + +### Check uv pip compile + +```yaml +name: Check pip compile sync + +on: [push, pull_request] + +concurrency: + group: ${{github.workflow}}-${{github.ref}} + cancel-in-progress: true + +jobs: + check-pip-compile: + name: Check pip compile + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: deargen/workflows/actions/check-pip-compile@master + with: + pyproject-toml-file: pyproject.toml + requirements-in-dir: deps + requirements-out-dir: deps/lock + python-platforms: x86_64-unknown-linux-gnu,aarch64-apple-darwin,x86_64-apple-darwin,x86_64-pc-windows-msvc +``` + +### Apply uv pip compile + +```yaml +name: Apply pip compile (generate lockfiles) + +on: workflow_dispatch + +jobs: + apply-pip-compile: + name: Apply pip compile + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: deargen/workflows/actions/apply-pip-compile@master + with: + pyproject-toml-file: pyproject.toml + requirements-in-dir: deps + requirements-out-dir: deps/lock + python-platforms: x86_64-unknown-linux-gnu,aarch64-apple-darwin,x86_64-apple-darwin,x86_64-pc-windows-msvc +``` + +## Generating __init__.py files + +Without `__init__.py` files, mkdocs will not be able to generate the documentation for the package. + +```yaml +name: Generate __init__.py files + +on: + workflow_dispatch: + inputs: + src-dir: + description: src directory + required: true + default: src + +jobs: + generate-init-py: + name: Generate __init__.py files + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: deargen/workflows/actions/gen-init-py@master + with: + src-dir: ${{ github.event.inputs.src-dir }} +``` + +## Testing + +### Setup python, uv and run pytest and doctest + +```yaml +name: Tests + +on: + - push + - pull_request + +concurrency: + group: ${{github.workflow}}-${{github.ref}} + cancel-in-progress: true + +jobs: + pytest: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: deargen/workflows/actions/setup-python-and-uv@master + - name: Install dependencies + run: | + uv venv + source .venv/bin/activate + uv pip install -r deps/lock/x86_64-unknown-linux-gnu/requirements_dev.txt + bash scripts/install.sh + python3 scripts/hf_download.py + - name: Run pytest + uses: deargen/workflows/actions/run-pytest@master + + doctest: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: deargen/workflows/actions/setup-python-and-uv@master + - name: Install dependencies + run: | + uv venv + source .venv/bin/activate + uv pip install -r deps/lock/x86_64-unknown-linux-gnu/requirements_dev.txt + bash scripts/install.sh + python3 scripts/hf_download.py + - name: Run doctest + uses: deargen/workflows/actions/run-doctest@master +``` + +## Deploying a new version + +### Commit CHANGELOG.md, create a Release and deploy MkDocs + +```yaml +name: Commit CHANGELOG.md, create a Release and deploy MkDocs + +on: + workflow_dispatch: + inputs: + version-tag: + description: Version tag + required: true + default: v0.1.0 + dry-run: + description: Dry run + type: boolean + default: false + +jobs: + commit-changelog-and-release: + uses: deargen/workflows/.github/workflows/commit-changelog-and-release.yml@master + with: + version-tag: ${{ github.event.inputs.version-tag }} + dry-run: ${{ github.event.inputs.dry-run == true }} + changelog-path: docs/CHANGELOG.md + exclude-types: build,docs,style,other + + deploy-mkdocs: + needs: commit-changelog-and-release + uses: deargen/workflows/.github/workflows/deploy-mkdocs.yml@master + with: + requirements-file: deps/lock/x86_64-unknown-linux-gnu/requirements_docs.txt + gitlab-project: deargen-ai/my-project-docs + gitlab-branch: gl-pages + version-tag: ${{ github.event.inputs.version-tag }} + deploy-type: tag + secrets: + GITLAB_TOKEN: ${{ secrets.GITLAB_TOKEN }} +``` + +### Deploy MkDocs on latest commit + +```yaml +name: Deploy MkDocs on latest commit + +on: + push: + branches: + - main + - master + +jobs: + deploy-mkdocs: + uses: deargen/workflows/.github/workflows/deploy-mkdocs.yml@master + with: + deploy-type: latest + requirements-file: deps/lock/x86_64-unknown-linux-gnu/requirements_docs.txt + gitlab-project: deargen-ai/my-project-docs + gitlab-branch: gl-pages + secrets: + GITLAB_TOKEN: ${{ secrets.GITLAB_TOKEN }} +``` + +## Reference + +This repository was inspired from [treesitter/workflows](https://github.com/treesitter/workflows). diff --git a/actions/apply-pip-compile/action.yml b/actions/apply-pip-compile/action.yml new file mode 100644 index 0000000..29c0805 --- /dev/null +++ b/actions/apply-pip-compile/action.yml @@ -0,0 +1,48 @@ +name: Apply pip compile +description: Freeze dependencies using uv pip compile +author: Kiyoon Kim + +inputs: + pyproject-toml-file: + description: Path to pyproject.toml file for getting the minimum python version + required: true + default: pyproject.toml + requirements-in-dir: + description: Directory to search all requirements*.in files + required: true + default: deps + requirements-out-dir: + description: Directory to save compiled requirements*.txt files + required: true + default: deps/lock + python-platforms: + description: Platforms to support (comma-separated) + required: true + default: x86_64-unknown-linux-gnu,aarch64-apple-darwin,x86_64-apple-darwin,x86_64-pc-windows-msvc + +runs: + using: 'composite' + steps: + - shell: bash + run: | + echo "min_python_version=$(python3 ${{ github.action_path }}/../../scripts/get_min_python_version.py "${{ inputs.pyproject-toml-file }}")" >> "$GITHUB_OUTPUT" + pip3 install uv + id: get-python-version + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ steps.get-python-version.outputs.min_python_version }} + - shell: bash + name: Run uv pip compile and push + run: | + # set +e # Do not exit shell on failure + bash ${{ github.action_path }}/../../scripts/compile_requirements.sh \ + "${{ inputs.requirements-in-dir }}" \ + "${{ inputs.requirements-out-dir }}" \ + "${{ steps.get-python-version.outputs.min_python_version }}" \ + "${{ inputs.python-platforms }}" + git config user.name github-actions[bot] + git config user.email github-actions[bot]@users.noreply.github.com + git add . + git commit -m "build: update requirements using uv pip compile [skip ci]" + git push diff --git a/actions/check-pip-compile/action.yml b/actions/check-pip-compile/action.yml new file mode 100644 index 0000000..1730642 --- /dev/null +++ b/actions/check-pip-compile/action.yml @@ -0,0 +1,67 @@ +name: Check pip compile +description: Detect changes in requirements*.in files that are not compiled yet +author: Kiyoon Kim + +inputs: + pyproject-toml-file: + description: Path to pyproject.toml file for getting the minimum python version + required: true + default: pyproject.toml + requirements-in-dir: + description: Directory to search all requirements*.in files + required: true + default: deps + requirements-out-dir: + description: Directory to save compiled requirements*.txt files + required: true + default: deps/lock + python-platforms: + description: Platforms to support (comma-separated) + required: true + default: x86_64-unknown-linux-gnu,aarch64-apple-darwin,x86_64-apple-darwin,x86_64-pc-windows-msvc + +runs: + using: 'composite' + steps: + - shell: bash + run: | + echo "min_python_version=$(python3 ${{ github.action_path }}/../../scripts/get_min_python_version.py "${{ inputs.pyproject-toml-file }}")" >> "$GITHUB_OUTPUT" + pip3 install uv + id: get-python-version + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ steps.get-python-version.outputs.min_python_version }} + - shell: bash + name: Generate lockfile and print diff + run: | + set +e # Do not exit shell on failure + + out=$(bash ${{ github.action_path }}/../../scripts/compile_requirements.sh \ + "${{ inputs.requirements-in-dir }}" \ + "${{ inputs.requirements-out-dir }}" \ + "${{ steps.get-python-version.outputs.min_python_version }}" \ + "${{ inputs.python-platforms }}" 2> _stderr.txt) + exit_code=$? + err=$(<_stderr.txt) + + if [[ -n "$out" ]]; then + # Display the raw output in the step + echo "${out}" + # Display the Markdown output in the job summary + { echo "\`\`\`"; echo "${out}"; echo "\`\`\`"; } >> "$GITHUB_STEP_SUMMARY" + fi + if [[ -n "$err" ]]; then + echo "${err}" + { echo "\`\`\`"; echo "${err}"; echo "\`\`\`"; } >> "$GITHUB_STEP_SUMMARY" + fi + + if [[ $exit_code -eq 0 ]]; then + # When the script fails, there are changes in requirements that are not compiled yet. + # Print the suggested changes. + { echo "\`\`\`diff"; git diff; echo "\`\`\`"; } >> "$GITHUB_STEP_SUMMARY" + exit 1 + fi + + # When the script fails, it means it does not have anything to compile. + exit 0 diff --git a/actions/gen-init-py/action.yml b/actions/gen-init-py/action.yml new file mode 100644 index 0000000..461a4bc --- /dev/null +++ b/actions/gen-init-py/action.yml @@ -0,0 +1,21 @@ +name: Generate __init__.py +description: Generate __init__.py files for all directories +author: Kiyoon Kim + +inputs: + src-dir: + description: Source directory to search for directories + required: true + default: src + +runs: + using: 'composite' + steps: + - shell: bash + run: | + python3 ${{ github.action_path }}/../../scripts/gen_init_py.py "${{ inputs.src-dir }}" + git config user.name github-actions[bot] + git config user.email github-actions[bot]@users.noreply.github.com + git add . + git commit -m "build: generate __init__.py [skip ci]" + git push diff --git a/actions/run-doctest/action.yml b/actions/run-doctest/action.yml new file mode 100644 index 0000000..cf39756 --- /dev/null +++ b/actions/run-doctest/action.yml @@ -0,0 +1,30 @@ +name: Run doctest +description: Just run doctest and output to Summary. Make sure the package and its dependencies are installed. +author: Kiyoon Kim + +inputs: + src-dir: + description: Directory to search for all *.py files to run doctest. + required: true + default: src + +runs: + using: 'composite' + steps: + - shell: bash -el {0} # setup-miniconda needs this + name: Run doctest + run: | + set +e # Do not exit shell on pytest failure + out=$(python ${{ github.action_path }}/../../scripts/run_doctest.py ${{ inputs.src-dir }} 2> stderr.txt) + exit_code=$? + err=$( "$GITHUB_STEP_SUMMARY" + + # Exit with the exit-code returned by doctest + exit ${exit_code} diff --git a/actions/run-pytest/action.yml b/actions/run-pytest/action.yml new file mode 100644 index 0000000..5da21c7 --- /dev/null +++ b/actions/run-pytest/action.yml @@ -0,0 +1,34 @@ +name: Run pytest +description: Just run pytest and output to Summary. Make sure pytest, the package and its dependencies are installed. +author: Kiyoon Kim + +runs: + using: 'composite' + steps: + - shell: bash -el {0} # setup-miniconda needs this + name: Run pytest + run: | + set +e # Do not exit shell on pytest failure + source .venv/bin/activate # if using venv, activate it. It won't error if it doesn't exist. + + out=$(pytest 2> stderr.txt) + exit_code=$? + err=$(> "$GITHUB_STEP_SUMMARY" + if [[ $exit_code -eq 5 ]] + then + echo + echo 'WARNING: No tests were run and it is considered as success' >> "$GITHUB_STEP_SUMMARY" + echo "\`\`\`" >> "$GITHUB_STEP_SUMMARY" + exit 0 + else + echo "\`\`\`" >> "$GITHUB_STEP_SUMMARY" + # Exit with the exit-code returned by pytest + exit ${exit_code} + fi diff --git a/actions/setup-python-and-uv/action.yml b/actions/setup-python-and-uv/action.yml new file mode 100644 index 0000000..873bc2e --- /dev/null +++ b/actions/setup-python-and-uv/action.yml @@ -0,0 +1,21 @@ +name: Setup python with the minimum version, and install uv +description: Use with projects without conda. +author: Kiyoon Kim + +inputs: + pyproject-toml-file: + description: Path to pyproject.toml to infer the minimum python version + required: true + default: pyproject.toml + +runs: + using: 'composite' + steps: + - shell: bash + run: | + echo "python_version=$(python3 ${{ github.action_path }}/../../scripts/get_min_python_version.py ${{ inputs.pyproject-toml-file }})" >> "$GITHUB_OUTPUT" + pip3 install --user uv + id: get-python-version + - uses: actions/setup-python@v5 + with: + python-version: ${{ steps.get-python-version.outputs.python_version }} diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md new file mode 100644 index 0000000..d6637e0 --- /dev/null +++ b/docs/CHANGELOG.md @@ -0,0 +1,5 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). diff --git a/scripts/compile_requirements.sh b/scripts/compile_requirements.sh new file mode 100644 index 0000000..2082043 --- /dev/null +++ b/scripts/compile_requirements.sh @@ -0,0 +1,147 @@ +#!/usr/bin/env bash + +# This script compiles all requirements.in files to requirements.txt files +# This means that all dependencies are locked to a specific version +# Plus, it checks if the requirements.in file has changed since the last time it was compiled +# If not, it skips the file rather than recompiling it (which may change version unnecessarily often) + +if [[ $# -lt 3 ]]; then + echo "Usage: $0 " >&2 + exit 1 +fi + +if ! command -v uv &> /dev/null; then + echo "uv is not installed. Please run 'pip3 install --user uv'" >&2 + exit 1 +fi + +if ! command -v sha256sum &> /dev/null; then + echo "sha256sum is not installed." >&2 + echo "If you're on Mac, run 'brew install coreutils'" >&2 + exit 1 +fi + +if [[ $# -lt 4 ]]; then + TARGET_PLATFORMS=(x86_64-unknown-linux-gnu aarch64-apple-darwin x86_64-apple-darwin x86_64-pc-windows-msvc) +else + IFS=',' read -r -a TARGET_PLATFORMS <<< "$4" +fi + +REQUIREMENTS_IN_DIR="$1" +# realpath alternative +# shellcheck disable=SC2164 +REQUIREMENTS_OUT_DIR="$(cd "$(dirname -- "$2")" >/dev/null; pwd -P)/$(basename -- "$2")" +PYTHON_VERSION="$3" + +# SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) +# REQUIREMENTS_IN_DIR="$SCRIPT_DIR/../deps" +# REQUIREMENTS_OUT_DIR="$SCRIPT_DIR/../deps" + +# NOTE: sha256sum will put the file path in the hash file. +# To simplify the directory (using relative paths), we change the working directory. +cd "$REQUIREMENTS_IN_DIR" || { echo "Failure"; exit 1; } + +for platform in "${TARGET_PLATFORMS[@]}"; do + mkdir -p "$REQUIREMENTS_OUT_DIR/$platform" +done + +shopt -s globstar + +function get_shafile() { + local file=$1 + local target_platform=$2 + # .requirements.in.sha256 + echo "$REQUIREMENTS_OUT_DIR/$target_platform/.$file.sha256" +} + +function get_lockfile() { + local file=$1 + local target_platform=$2 + # requirements.txt + echo "$REQUIREMENTS_OUT_DIR/$target_platform/${file%.in}.txt" +} + +function file_content_changed() { + # Check if the file has changed since the last time it was compiled, using the hash file. + # NOTE: returns 0 if the file has changed + local file=$1 + local target_platform=$2 + local shafile + shafile=$(get_shafile "$file" "$target_platform") + if [[ -f "$shafile" ]] && sha256sum -c "$shafile" &> /dev/null; then + return 1 + fi + return 0 +} + + +function deps_changed() { + # Check if the requirements*.in file has changed since the last time it was compiled, including its dependencies (-r another_requirements.in). + # + # When the requirements have dependencies on other requirements files, we need to check if those have changed as well + # e.g. requirements_dev.in has a dependency on requirements.in (-r requirements.in) + # Note that we also need to recursively check if the dependencies of the dependencies have changed. + # We need to recompile requirements_dev.txt if requirements.in has changed. + # NOTE: returns 0 if the deps have changed + local file=$1 + local target_platform=$2 + + if file_content_changed "$file" "$target_platform"; then + return 0 + fi + + + local file_deps + file_deps=$(grep -Eo -- '-r [^ ]+' "$file") + file_deps=${file_deps//"-r "/} # remove -r + for dep in $file_deps; do + echo "ℹī¸ $file depends on $dep" + dep=${dep#-r } # requirements.in + if deps_changed "$dep" "$target_platform"; then + return 0 + fi + done + return 1 +} + +num_files=0 +num_up_to_date=0 +files_changed=() + +# First, collect all files that need to be compiled. +# We don't compile them yet, because it will mess up the hash comparison. +for file in requirements*.in; do + for target_platform in "${TARGET_PLATFORMS[@]}"; do + # $file: requirements.in + ((num_files++)) + + lockfile=$(get_lockfile "$file" "$target_platform") + shafile=$(get_shafile "$file" "$target_platform") + # Process only changed files by comparing hash + if [[ -f "$lockfile" ]]; then + if ! deps_changed "$file" "$target_platform"; then + echo "⚡ Skipping $file due to no changes" + ((num_up_to_date++)) + continue + fi + fi + files_changed+=("$file") + done +done + +for file in "${files_changed[@]}"; do + for target_platform in "${TARGET_PLATFORMS[@]}"; do + lockfile=$(get_lockfile "$file" "$target_platform") + shafile=$(get_shafile "$file" "$target_platform") + echo "🔒 Generating lockfile $lockfile from $file" + uv pip compile "$file" -o "$lockfile" --python-platform "$target_platform" --python-version "$PYTHON_VERSION" > /dev/null + sha256sum "$file" > "$shafile" # update hash + done +done + +# exit code 2 when all files are up to date +if [[ $num_files -eq $num_up_to_date ]]; then + echo "💖 All files are up to date!" + exit 2 +fi + diff --git a/scripts/gen_init_py.py b/scripts/gen_init_py.py new file mode 100644 index 0000000..22415e8 --- /dev/null +++ b/scripts/gen_init_py.py @@ -0,0 +1,35 @@ +""" +Automatically generate __init__.py files for all subdirectories. + +Useful before building mkdocs documentation. +""" +# flake8: noqa: T201 + +import os +import sys +from os import PathLike +from pathlib import Path + + +def gen_init_py(path: str | PathLike): + """Generate __init__.py files for all subdirectories of path.""" + for root, _, files in os.walk(path): + if "__init__.py" in files: + continue + if Path(root).samefile(path): + continue + if "__pycache__" in root: + continue + if root.endswith(".egg-info"): + continue + + with open(Path(root) / "__init__.py", "w") as f: + print("Generating __init__.py in", root) + f.write("") + + +if __name__ == "__main__": + src_dir = ( + Path(__file__).parent.parent / "src" if len(sys.argv) == 1 else sys.argv[1] + ) + gen_init_py(src_dir) diff --git a/scripts/get_min_python_version.py b/scripts/get_min_python_version.py new file mode 100644 index 0000000..349a91e --- /dev/null +++ b/scripts/get_min_python_version.py @@ -0,0 +1,39 @@ +""" +Get minimum python version from pyproject.toml. + +Note: + It only works if the format is like this: ">=3.11", ">=3.11,<3.12" +""" + +import sys +from pathlib import Path + +# pyproject_toml_path = Path(__file__).parent.parent / "pyproject.toml" + +if len(sys.argv) == 2: + pyproject_toml_path = sys.argv[1] +elif len(sys.argv) == 1: + pyproject_toml_path = Path(__file__).parent.parent / "pyproject.toml" +else: + raise ValueError("Invalid number of arguments") + +try: + import toml + + pyproject = toml.load(pyproject_toml_path) + version_range = pyproject["project"]["requires-python"] +except ImportError: + # alternatively, search for requires-python in pyproject.toml + with open(pyproject_toml_path) as f: + for line in f: + if line.startswith("requires-python"): + version_range = line.replace("requires-python", "").strip(" ='\"\n") + break + else: + raise ValueError("requires-python not found in pyproject.toml") + + +# get minimum python version +# it has a format like this: ">=3.6", ">=3.7,<3.8" +min_version = version_range.split(",")[0].replace(">=", "") +print(min_version) # noqa: T201 diff --git a/scripts/run_doctest.py b/scripts/run_doctest.py new file mode 100644 index 0000000..562d9f7 --- /dev/null +++ b/scripts/run_doctest.py @@ -0,0 +1,57 @@ +""" +Run doctest for all modules in `src/` directory. + +It will run all modules in `src/` directory and print the result of doctest. + +It also has to load all modules in `src/` directory, so it will run all modules and test if they can be imported. +So if any module doesn't run (e.g. syntax error, import error, etc.), it will also fail. +""" + +# flake8: noqa: T201 +import doctest +import importlib +import os +import sys +from pathlib import Path + +if __name__ == "__main__": + src_dir = sys.argv[1] if len(sys.argv) > 1 else "src" + if src_dir.endswith("/"): + src_dir = src_dir[:-1] + + # find all modules in src/ + modules = [] + for root, _dirs, files in os.walk(src_dir): + for file in files: + if file.endswith(".py"): + # convert path to module name + root = root.replace(f"{src_dir}/", "") + root = root.replace("/", ".") + modules.append(root + "." + Path(file).stem) + + # run doctest for all modules + failed_modules = [] + num_failed = 0 + num_attempted = 0 + num_modules_with_doctest = 0 + for module_name in modules: + module = importlib.import_module(module_name) + result = doctest.testmod(module, verbose=True) + if result.failed > 0: + print(f"🚨 doctest failed for module: {module_name}") + print(f"🚨 {result.failed} failed out of {result.attempted} tests") + num_failed += result.failed + + if result.attempted > 0: + num_modules_with_doctest += 1 + num_attempted += result.attempted + + if num_failed == 0: + print( + f"✅ All {num_attempted} tests passed in {num_modules_with_doctest} modules." + ) + else: + print( + f"🚨 {num_failed} failed out of {num_attempted} tests in {num_modules_with_doctest} modules." + ) + exit(1)