From 025efb07ab5051f41db321356592a17d185f5d7b Mon Sep 17 00:00:00 2001 From: rtuck99 Date: Tue, 23 Jul 2024 10:03:05 +0100 Subject: [PATCH 1/2] 378 Convert source tree to python-copier-template (#667) * (#378) Initial changes for copier template v1.0.0 * (#378) More sensible pre-commit defaults. Fix some broken unit tests * (#378) unit tests mostly working * Add Pycharm project files to .gitignore * (#378) Update to version v2.1.0 of copier template from v1.0.0 * (#378) minor fixes to documentation generation * (#378) Enable pypi publishing * (#378) Update software catalog info * (#378) Rebase onto latest main * (#378) Use diff-quality rather than pyright, make pyright, ruff happy * (#378) Make ruff happy, and code less evil. Add diff-cover to dependencies * (#378) Fix test failures due to missing decorator parentheses. Add missing pyright plugin to dependencies * (#378) Set fetch-depth to 0 for diff-quality * (#378) Fix dist CI * (#378) fix tests, ruff after rebase against main * (#378) Add warning suppression to unit tests for pydantic v2 --- .copier-answers.yml | 16 ++ .devcontainer/devcontainer.json | 52 +++--- .github/CONTRIBUTING.md | 27 +++ .github/CONTRIBUTING.rst | 35 ---- .../actions/install_requirements/action.yml | 66 +++----- .github/dependabot.yml | 8 +- .github/pages/index.html | 2 +- .github/pages/make_switcher.py | 29 ++-- .github/workflows/_check.yml | 27 +++ .github/workflows/_dist.yml | 35 ++++ .github/workflows/{docs.yml => _docs.yml} | 31 ++-- .github/workflows/_pypi.yml | 17 ++ .github/workflows/_release.yml | 32 ++++ .github/workflows/_test.yml | 62 +++++++ .github/workflows/_tox.yml | 24 +++ .github/workflows/ci.yml | 59 +++++++ .github/workflows/code.yml | 159 ------------------ .github/workflows/docs_clean.yml | 43 ----- .github/workflows/linkcheck.yml | 29 ---- .github/workflows/periodic.yml | 13 ++ .gitignore | 6 +- .vscode/extensions.json | 7 +- .vscode/launch.json | 8 +- .vscode/settings.json | 11 +- .vscode/tasks.json | 2 +- Dockerfile | 28 +-- README.md | 40 +++++ README.rst | 61 ------- catalog-info.yaml | 12 ++ docs/conf.py | 32 ++-- docs/developer/explanations/decisions.rst | 17 -- .../0001-record-architecture-decisions.rst | 26 --- .../0002-switched-to-pip-skeleton.rst | 35 ---- docs/developer/how-to/build-docs.rst | 38 ----- docs/developer/how-to/contribute.rst | 1 - docs/developer/how-to/lint.rst | 34 ---- docs/developer/how-to/make-release.rst | 31 ---- docs/developer/how-to/run-tests.rst | 12 -- docs/developer/how-to/static-analysis.rst | 8 - docs/developer/how-to/update-tools.rst | 16 -- docs/developer/index.rst | 67 -------- docs/developer/tutorials/dev-install.rst | 68 -------- docs/explanations.md | 10 ++ docs/explanations/decisions.md | 12 ++ .../0001-record-architecture-decisions.md | 18 ++ ...0002-switched-to-python-copier-template.md | 28 +++ docs/explanations/decisions/COPYME | 19 +++ docs/genindex.md | 3 + docs/genindex.rst | 5 - docs/how-to.md | 10 ++ docs/how-to/build-docs.md | 39 +++++ docs/how-to/contribute.md | 2 + docs/how-to/coverage.md | 8 + .../how-to/create-beamline.rst | 0 docs/how-to/dev-install.md | 56 ++++++ docs/how-to/excalidraw.md | 19 +++ docs/how-to/lint.md | 34 ++++ docs/how-to/lock-requirements.md | 39 +++++ .../how-to/make-new-ophyd-async-device.rst | 0 docs/how-to/make-release.md | 32 ++++ docs/{developer => }/how-to/move-code.rst | 0 docs/how-to/pypi.md | 24 +++ docs/how-to/run-tests.md | 20 +++ docs/how-to/static-analysis.md | 7 + docs/how-to/update-template.md | 9 + docs/{developer => }/how-to/zocalo.rst | 2 +- docs/images/dls-favicon.ico | Bin 99678 -> 0 bytes docs/images/excalidraw-example.svg | 16 ++ docs/index.md | 56 ++++++ docs/index.rst | 29 ---- docs/reference.md | 12 ++ .../reference/api.rst => reference/api.md} | 10 +- .../reference/device-standards.rst | 0 docs/{developer => }/reference/standards.rst | 18 +- docs/tutorials.md | 10 ++ docs/{user => }/tutorials/get_started.rst | 0 docs/tutorials/installation.md | 42 +++++ docs/user/explanations/docs-structure.rst | 18 -- docs/user/index.rst | 56 ------ docs/user/tutorials/installation.rst | 38 ----- pyproject.toml | 47 ++++-- src/dodal/__init__.py | 5 +- src/dodal/beamlines/__init__.py | 2 +- src/dodal/beamlines/i22.py | 10 +- src/dodal/beamlines/p38.py | 10 +- src/dodal/beamlines/p45.py | 8 +- .../common/beamlines/beamline_parameters.py | 4 +- src/dodal/common/beamlines/beamline_utils.py | 11 +- src/dodal/common/maths.py | 4 +- src/dodal/common/types.py | 3 +- src/dodal/common/visit.py | 3 +- src/dodal/devices/aperturescatterguard.py | 8 +- src/dodal/devices/areadetector/adaravis.py | 8 +- src/dodal/devices/areadetector/adsim.py | 3 +- .../devices/detector/det_dim_constants.py | 4 +- .../detector/det_dist_to_beam_converter.py | 2 +- src/dodal/devices/detector/detector.py | 6 +- src/dodal/devices/eiger.py | 10 +- src/dodal/devices/eiger_odin.py | 18 +- src/dodal/devices/fast_grid_scan.py | 4 +- src/dodal/devices/focusing_mirror.py | 8 +- src/dodal/devices/i22/dcm.py | 7 +- src/dodal/devices/i22/fswitch.py | 5 +- src/dodal/devices/i22/nxsas.py | 53 +++--- src/dodal/devices/linkam3.py | 16 +- src/dodal/devices/oav/oav_detector.py | 2 +- src/dodal/devices/oav/oav_parameters.py | 14 +- .../oav/pin_image_recognition/__init__.py | 12 +- .../oav/pin_image_recognition/utils.py | 11 +- src/dodal/devices/oav/utils.py | 4 +- src/dodal/devices/robot.py | 41 +++-- src/dodal/devices/smargon.py | 6 +- src/dodal/devices/tetramm.py | 2 +- src/dodal/devices/util/adjuster_plans.py | 2 +- src/dodal/devices/util/epics_util.py | 12 +- src/dodal/devices/util/lookup_tables.py | 5 +- src/dodal/devices/webcam.py | 6 +- src/dodal/devices/zebra.py | 11 +- src/dodal/devices/zocalo/zocalo_results.py | 14 +- src/dodal/log.py | 19 ++- src/dodal/utils.py | 32 ++-- tests/common/beamlines/test_beamline_utils.py | 23 ++- .../beamlines/test_device_instantiation.py | 2 +- tests/conftest.py | 21 ++- tests/devices/i22/test_dcm.py | 2 +- .../test_aperturescatterguard_system.py | 10 +- .../image_recognition/test_pin_tip_detect.py | 2 +- .../unit_tests/test_aperture_scatterguard.py | 4 +- tests/devices/unit_tests/test_eiger.py | 32 ++-- .../unit_tests/test_focusing_mirror.py | 4 +- .../devices/unit_tests/test_hutch_shutter.py | 3 +- tests/devices/unit_tests/test_odin.py | 2 +- tests/devices/unit_tests/test_slits.py | 3 +- tests/devices/unit_tests/test_smargon.py | 4 +- tests/devices/unit_tests/test_synchrotron.py | 19 ++- tests/devices/unit_tests/test_thawer.py | 14 +- .../devices/unit_tests/test_undulator_dcm.py | 11 ++ tests/devices/unit_tests/test_utils.py | 12 +- tests/devices/unit_tests/test_webcam.py | 5 + tests/devices/unit_tests/test_xspress3.py | 5 +- tests/devices/unit_tests/test_zebra.py | 2 +- .../unit_tests/test_zocalo_interaction.py | 10 +- tests/fake_zocalo/__main__.py | 4 +- tests/test_cli.py | 12 ++ tests/unit_tests/test_log.py | 25 ++- 145 files changed, 1410 insertions(+), 1300 deletions(-) create mode 100644 .copier-answers.yml create mode 100644 .github/CONTRIBUTING.md delete mode 100644 .github/CONTRIBUTING.rst create mode 100644 .github/workflows/_check.yml create mode 100644 .github/workflows/_dist.yml rename .github/workflows/{docs.yml => _docs.yml} (67%) create mode 100644 .github/workflows/_pypi.yml create mode 100644 .github/workflows/_release.yml create mode 100644 .github/workflows/_test.yml create mode 100644 .github/workflows/_tox.yml create mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/code.yml delete mode 100644 .github/workflows/docs_clean.yml delete mode 100644 .github/workflows/linkcheck.yml create mode 100644 .github/workflows/periodic.yml create mode 100644 README.md delete mode 100644 README.rst create mode 100644 catalog-info.yaml delete mode 100644 docs/developer/explanations/decisions.rst delete mode 100644 docs/developer/explanations/decisions/0001-record-architecture-decisions.rst delete mode 100644 docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst delete mode 100644 docs/developer/how-to/build-docs.rst delete mode 100644 docs/developer/how-to/contribute.rst delete mode 100644 docs/developer/how-to/lint.rst delete mode 100644 docs/developer/how-to/make-release.rst delete mode 100644 docs/developer/how-to/run-tests.rst delete mode 100644 docs/developer/how-to/static-analysis.rst delete mode 100644 docs/developer/how-to/update-tools.rst delete mode 100644 docs/developer/index.rst delete mode 100644 docs/developer/tutorials/dev-install.rst create mode 100644 docs/explanations.md create mode 100644 docs/explanations/decisions.md create mode 100644 docs/explanations/decisions/0001-record-architecture-decisions.md create mode 100644 docs/explanations/decisions/0002-switched-to-python-copier-template.md create mode 100644 docs/explanations/decisions/COPYME create mode 100644 docs/genindex.md delete mode 100644 docs/genindex.rst create mode 100644 docs/how-to.md create mode 100644 docs/how-to/build-docs.md create mode 100644 docs/how-to/contribute.md create mode 100644 docs/how-to/coverage.md rename docs/{developer => }/how-to/create-beamline.rst (100%) create mode 100644 docs/how-to/dev-install.md create mode 100644 docs/how-to/excalidraw.md create mode 100644 docs/how-to/lint.md create mode 100644 docs/how-to/lock-requirements.md rename docs/{developer => }/how-to/make-new-ophyd-async-device.rst (100%) create mode 100644 docs/how-to/make-release.md rename docs/{developer => }/how-to/move-code.rst (100%) create mode 100644 docs/how-to/pypi.md create mode 100644 docs/how-to/run-tests.md create mode 100644 docs/how-to/static-analysis.md create mode 100644 docs/how-to/update-template.md rename docs/{developer => }/how-to/zocalo.rst (94%) delete mode 100644 docs/images/dls-favicon.ico create mode 100644 docs/images/excalidraw-example.svg create mode 100644 docs/index.md delete mode 100644 docs/index.rst create mode 100644 docs/reference.md rename docs/{user/reference/api.rst => reference/api.md} (89%) rename docs/{developer => }/reference/device-standards.rst (100%) rename docs/{developer => }/reference/standards.rst (80%) create mode 100644 docs/tutorials.md rename docs/{user => }/tutorials/get_started.rst (100%) create mode 100644 docs/tutorials/installation.md delete mode 100644 docs/user/explanations/docs-structure.rst delete mode 100644 docs/user/index.rst delete mode 100644 docs/user/tutorials/installation.rst create mode 100644 tests/test_cli.py diff --git a/.copier-answers.yml b/.copier-answers.yml new file mode 100644 index 0000000000..551aabe775 --- /dev/null +++ b/.copier-answers.yml @@ -0,0 +1,16 @@ +# Changes here will be overwritten by Copier +_commit: 2.1.0 +_src_path: gh:DiamondLightSource/python-copier-template +author_email: dominic.oram@diamond.ac.uk +author_name: Dominic Oram +component_owner: group:data-acquisition +description: Ophyd devices and other utils that could be used across DLS beamlines +distribution_name: dls-dodal +docker: false +docs_type: sphinx +git_platform: github.com +github_org: DiamondLightSource +package_name: dodal +pypi: true +repo_name: dodal +type_checker: pyright diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 7a30e9bee6..79b85ff41a 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -3,52 +3,44 @@ "name": "Python 3 Developer Container", "build": { "dockerfile": "../Dockerfile", - "target": "build", - // Only upgrade pip, we will install the project below - "args": { - "PIP_OPTIONS": "--upgrade pip" - } + "target": "developer" }, "remoteEnv": { + // Allow X11 apps to run inside the container "DISPLAY": "${localEnv:DISPLAY}" }, - // Add the URLs of features you want added when the container is built. - "features": { - "ghcr.io/devcontainers/features/common-utils:1": { - "username": "none", - "upgradePackages": false - } - }, - // Set *default* container specific settings.json values on container create. - "settings": { - "python.defaultInterpreterPath": "/venv/bin/python" - }, "customizations": { "vscode": { + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/venv/bin/python" + }, // Add the IDs of extensions you want installed when the container is created. "extensions": [ "ms-python.python", + "github.vscode-github-actions", "tamasfe.even-better-toml", "redhat.vscode-yaml", - "ryanluker.vscode-coverage-gutters" + "ryanluker.vscode-coverage-gutters", + "charliermarsh.ruff", + "ms-azuretools.vscode-docker" ] } }, - // Make sure the files we are mapping into the container exist on the host - "initializeCommand": "bash -c 'for i in $HOME/.inputrc; do [ -f $i ] || touch $i; done'", + "features": { + // Some default things like git config + "ghcr.io/devcontainers/features/common-utils:2": { + "upgradePackages": false + } + }, "runArgs": [ + // Allow the container to access the host X11 display and EPICS CA "--net=host", - "--security-opt=label=type:container_runtime_t" - ], - "mounts": [ - "source=${localEnv:HOME}/.ssh,target=/root/.ssh,type=bind", - "source=${localEnv:HOME}/.inputrc,target=/root/.inputrc,type=bind", - // map in home directory - not strictly necessary but useful - "source=${localEnv:HOME},target=${localEnv:HOME},type=bind,consistency=cached" + // Make sure SELinux does not disable with access to host filesystems like tmp + "--security-opt=label=disable" ], - // make the workspace folder the same inside and outside of the container - "workspaceMount": "source=${localWorkspaceFolder},target=${localWorkspaceFolder},type=bind", - "workspaceFolder": "${localWorkspaceFolder}", + // Mount the parent as /workspaces so we can pip install peers as editable + "workspaceMount": "source=${localWorkspaceFolder}/..,target=/workspaces,type=bind", // After the container is created, install the python project in editable form - "postCreateCommand": "pip install -e .[dev]" + "postCreateCommand": "pip install $([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e '.[dev]' && pre-commit install" } \ No newline at end of file diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md new file mode 100644 index 0000000000..0f26801edf --- /dev/null +++ b/.github/CONTRIBUTING.md @@ -0,0 +1,27 @@ +# Contribute to the project + +Contributions and issues are most welcome! All issues and pull requests are +handled through [GitHub](https://github.com/DiamondLightSource/dodal/issues). Also, please check for any existing issues before +filing a new one. If you have a great idea but it involves big changes, please +file a ticket before making a pull request! We want to make sure you don't spend +your time coding something that might not fit the scope of the project. + +## Issue or Discussion? + +Github also offers [discussions](https://github.com/DiamondLightSource/dodal/discussions) as a place to ask questions and share ideas. If +your issue is open ended and it is not obvious when it can be "closed", please +raise it as a discussion instead. + +## Code Coverage + +While 100% code coverage does not make a library bug-free, it significantly +reduces the number of easily caught bugs! Please make sure coverage remains the +same or is improved by a pull request! + +## Developer Information + +It is recommended that developers use a [vscode devcontainer](https://code.visualstudio.com/docs/devcontainers/containers). This repository contains configuration to set up a containerized development environment that suits its own needs. + +This project was created using the [Diamond Light Source Copier Template](https://github.com/DiamondLightSource/python-copier-template) for Python projects. + +For more information on common tasks like setting up a developer environment, running the tests, and setting a pre-commit hook, see the template's [How-to guides](https://diamondlightsource.github.io/python-copier-template/2.1.0/how-to.html). diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst deleted file mode 100644 index 0a02723010..0000000000 --- a/.github/CONTRIBUTING.rst +++ /dev/null @@ -1,35 +0,0 @@ -Contributing to the project -=========================== - -Contributions and issues are most welcome! All issues and pull requests are -handled through GitHub_. Also, please check for any existing issues before -filing a new one. If you have a great idea but it involves big changes, please -file a ticket before making a pull request! We want to make sure you don't spend -your time coding something that might not fit the scope of the project. - -.. _GitHub: https://github.com/DiamondLightSource/dodal/issues - -Issue or Discussion? --------------------- - -Github also offers discussions_ as a place to ask questions and share ideas. If -your issue is open ended and it is not obvious when it can be "closed", please -raise it as a discussion instead. - -.. _discussions: https://github.com/DiamondLightSource/dodal/discussions - -Code coverage -------------- - -While 100% code coverage does not make a library bug-free, it significantly -reduces the number of easily caught bugs! Please make sure coverage remains the -same or is improved by a pull request! - -Developer guide ---------------- - -The `Developer Guide`_ contains information on setting up a development -environment, running the tests and what standards the code and documentation -should follow. - -.. _Developer Guide: https://diamondlightsource.github.io/dodal/main/developer/how-to/contribute.html diff --git a/.github/actions/install_requirements/action.yml b/.github/actions/install_requirements/action.yml index 79d1a71eef..d33e080527 100644 --- a/.github/actions/install_requirements/action.yml +++ b/.github/actions/install_requirements/action.yml @@ -1,60 +1,34 @@ name: Install requirements -description: Run pip install with requirements and upload resulting requirements +description: Install a version of python then call pip install and report what was installed inputs: - requirements_file: - description: Name of requirements file to use and upload - required: true - install_options: + python-version: + description: Python version to install, default is from Dockerfile + default: "dev" + pip-install: description: Parameters to pass to pip install - required: true - artifact_name: - description: A user friendly name to give the produced artifacts - required: true - python_version: - description: Python version to install - default: "3.x" + default: "$([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e .[dev]" runs: using: composite - steps: + - name: Get version of python + run: | + PYTHON_VERSION="${{ inputs.python-version }}" + if [ $PYTHON_VERSION == "dev" ]; then + PYTHON_VERSION=$(sed -n "s/ARG PYTHON_VERSION=//p" Dockerfile) + fi + echo "PYTHON_VERSION=$PYTHON_VERSION" >> "$GITHUB_ENV" + shell: bash + - name: Setup python uses: actions/setup-python@v5 with: - python-version: ${{ inputs.python_version }} + python-version: ${{ env.PYTHON_VERSION }} - - name: Pip install - run: | - touch ${{ inputs.requirements_file }} - # -c uses requirements.txt as constraints, see 'Validate requirements file' - pip install -c ${{ inputs.requirements_file }} ${{ inputs.install_options }} + - name: Install packages + run: pip install ${{ inputs.pip-install }} shell: bash - - name: Create lockfile - run: | - mkdir -p lockfiles - pip freeze --exclude-editable > lockfiles/${{ inputs.requirements_file }} - # delete the self referencing line and make sure it isn't blank - sed -i'' -e '/file:/d' lockfiles/${{ inputs.requirements_file }} - shell: bash - - - name: Upload lockfiles - uses: actions/upload-artifact@v4.0.0 - with: - name: lockfiles-${{ inputs.python_version }}-${{ inputs.artifact_name }}-${{ github.sha }} - path: lockfiles - - # This eliminates the class of problems where the requirements being given no - # longer match what the packages themselves dictate. E.g. In the rare instance - # where I install some-package which used to depend on vulnerable-dependency - # but now uses good-dependency (despite being nominally the same version) - # pip will install both if given a requirements file with -r - - name: If requirements file exists, check it matches pip installed packages - run: | - if [ -s ${{ inputs.requirements_file }} ]; then - if ! diff -u ${{ inputs.requirements_file }} lockfiles/${{ inputs.requirements_file }}; then - echo "Error: ${{ inputs.requirements_file }} need the above changes to be exhaustive" - exit 1 - fi - fi + - name: Report what was installed + run: pip freeze shell: bash diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 2d1af8738d..184ba3631a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -10,11 +10,15 @@ updates: schedule: interval: "weekly" groups: - github-artifacts: + actions: patterns: - - actions/*-artifact + - "*" - package-ecosystem: "pip" directory: "/" schedule: interval: "weekly" + groups: + dev-dependencies: + patterns: + - "*" diff --git a/.github/pages/index.html b/.github/pages/index.html index c495f39f2f..80f0a00912 100644 --- a/.github/pages/index.html +++ b/.github/pages/index.html @@ -8,4 +8,4 @@ - + \ No newline at end of file diff --git a/.github/pages/make_switcher.py b/.github/pages/make_switcher.py index 8b6afeb8f2..29f646c3ac 100755 --- a/.github/pages/make_switcher.py +++ b/.github/pages/make_switcher.py @@ -3,28 +3,27 @@ from argparse import ArgumentParser from pathlib import Path from subprocess import CalledProcessError, check_output -from typing import List -def report_output(stdout: bytes, label: str) -> List[str]: +def report_output(stdout: bytes, label: str) -> list[str]: ret = stdout.decode().strip().split("\n") print(f"{label}: {ret}") return ret -def get_branch_contents(ref: str) -> List[str]: +def get_branch_contents(ref: str) -> list[str]: """Get the list of directories in a branch.""" stdout = check_output(["git", "ls-tree", "-d", "--name-only", ref]) return report_output(stdout, "Branch contents") -def get_sorted_tags_list() -> List[str]: +def get_sorted_tags_list() -> list[str]: """Get a list of sorted tags in descending order from the repository.""" stdout = check_output(["git", "tag", "-l", "--sort=-v:refname"]) return report_output(stdout, "Tags list") -def get_versions(ref: str, add: str | None, remove: str | None) -> List[str]: +def get_versions(ref: str, add: str | None) -> list[str]: """Generate the file containing the list of all GitHub Pages builds.""" # Get the directories (i.e. builds) from the GitHub Pages branch try: @@ -36,15 +35,12 @@ def get_versions(ref: str, add: str | None, remove: str | None) -> List[str]: # Add and remove from the list of builds if add: builds.add(add) - if remove: - assert remove in builds, f"Build '{remove}' not in {sorted(builds)}" - builds.remove(remove) # Get a sorted list of tags tags = get_sorted_tags_list() # Make the sorted versions list from main branches and tags - versions: List[str] = [] + versions: list[str] = [] for version in ["master", "main"] + tags: if version in builds: versions.append(version) @@ -58,9 +54,12 @@ def get_versions(ref: str, add: str | None, remove: str | None) -> List[str]: def write_json(path: Path, repository: str, versions: str): org, repo_name = repository.split("/") + pages_url = f"https://{org}.github.io" + if repo_name != f"{org}.github.io": + # Only add the repo name if it isn't the source for the org pages site + pages_url += f"/{repo_name}" struct = [ - {"version": version, "url": f"https://{org}.github.io/{repo_name}/{version}/"} - for version in versions + {"version": version, "url": f"{pages_url}/{version}/"} for version in versions ] text = json.dumps(struct, indent=2) print(f"JSON switcher:\n{text}") @@ -69,16 +68,12 @@ def write_json(path: Path, repository: str, versions: str): def main(args=None): parser = ArgumentParser( - description="Make a versions.txt file from gh-pages directories" + description="Make a versions.json file from gh-pages directories" ) parser.add_argument( "--add", help="Add this directory to the list of existing directories", ) - parser.add_argument( - "--remove", - help="Remove this directory from the list of existing directories", - ) parser.add_argument( "repository", help="The GitHub org and repository name: ORG/REPO", @@ -91,7 +86,7 @@ def main(args=None): args = parser.parse_args(args) # Write the versions file - versions = get_versions("origin/gh-pages", args.add, args.remove) + versions = get_versions("origin/gh-pages", args.add) write_json(args.output, args.repository, versions) diff --git a/.github/workflows/_check.yml b/.github/workflows/_check.yml new file mode 100644 index 0000000000..a6139c19fe --- /dev/null +++ b/.github/workflows/_check.yml @@ -0,0 +1,27 @@ +on: + workflow_call: + outputs: + branch-pr: + description: The PR number if the branch is in one + value: ${{ jobs.pr.outputs.branch-pr }} + +jobs: + pr: + runs-on: "ubuntu-latest" + outputs: + branch-pr: ${{ steps.script.outputs.result }} + steps: + - uses: actions/github-script@v7 + id: script + if: github.event_name == 'push' + with: + script: | + const prs = await github.rest.pulls.list({ + owner: context.repo.owner, + repo: context.repo.repo, + head: context.repo.owner + ':${{ github.ref_name }}' + }) + if (prs.data.length) { + console.log(`::notice ::Skipping CI on branch push as it is already run in PR #${prs.data[0]["number"]}`) + return prs.data[0]["number"] + } diff --git a/.github/workflows/_dist.yml b/.github/workflows/_dist.yml new file mode 100644 index 0000000000..436443c3f1 --- /dev/null +++ b/.github/workflows/_dist.yml @@ -0,0 +1,35 @@ +on: + workflow_call: + +jobs: + build: + runs-on: "ubuntu-latest" + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + # Need this to get version number from last tag + fetch-depth: 0 + + - name: Build sdist and wheel + run: > + export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && + pipx run build + + - name: Upload sdist and wheel as artifacts + uses: actions/upload-artifact@v4 + with: + name: dist + path: dist + + - name: Check for packaging errors + run: pipx run twine check --strict dist/* + + - name: Install produced wheel + uses: ./.github/actions/install_requirements + with: + pip-install: dist/*.whl + + - name: Test module --version works using the installed wheel + run: python -m dodal --version diff --git a/.github/workflows/docs.yml b/.github/workflows/_docs.yml similarity index 67% rename from .github/workflows/docs.yml rename to .github/workflows/_docs.yml index d6e4b0e5f4..40446e332b 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/_docs.yml @@ -1,17 +1,13 @@ -name: Docs CI - on: - push: - pull_request: + workflow_call: jobs: - docs: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository + build: runs-on: ubuntu-latest steps: - name: Avoid git conflicts when tag and branch pushed at same time - if: startsWith(github.ref, 'refs/tags') + if: github.ref_type == 'tag' run: sleep 60 - name: Checkout @@ -21,20 +17,23 @@ jobs: fetch-depth: 0 - name: Install system packages - # Can delete this if you don't use graphviz in your docs run: sudo apt-get install graphviz - name: Install python packages uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - python_version: "3.11" - artifact_name: docs - name: Build docs run: tox -e docs + - name: Remove environment.pickle + run: rm build/html/.doctrees/environment.pickle + + - name: Upload built docs artifact + uses: actions/upload-artifact@v4 + with: + name: docs + path: build + - name: Sanitize ref name for docs version run: echo "DOCS_VERSION=${GITHUB_REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV @@ -45,11 +44,11 @@ jobs: run: python .github/pages/make_switcher.py --add $DOCS_VERSION ${{ github.repository }} .github/pages/switcher.json - name: Publish Docs to gh-pages - if: github.event_name == 'push' && github.actor != 'dependabot[bot]' + if: github.ref_type == 'tag' || github.ref_name == 'main' # We pin to the SHA, not the tag, for security reasons. # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions - uses: peaceiris/actions-gh-pages@64b46b4226a4a12da2239ba3ea5aa73e3163c75b # v3.9.1 + uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: .github/pages - keep_files: true + keep_files: true \ No newline at end of file diff --git a/.github/workflows/_pypi.yml b/.github/workflows/_pypi.yml new file mode 100644 index 0000000000..0c5258dbee --- /dev/null +++ b/.github/workflows/_pypi.yml @@ -0,0 +1,17 @@ +on: + workflow_call: + +jobs: + upload: + runs-on: ubuntu-latest + environment: release + + steps: + - name: Download dist artifact + uses: actions/download-artifact@v4 + with: + name: dist + path: dist + + - name: Publish to PyPI using trusted publishing + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/_release.yml b/.github/workflows/_release.yml new file mode 100644 index 0000000000..e55efdb37f --- /dev/null +++ b/.github/workflows/_release.yml @@ -0,0 +1,32 @@ +on: + workflow_call: + +jobs: + artifacts: + runs-on: ubuntu-latest + + steps: + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + merge-multiple: true + + - name: Zip up docs + run: | + set -vxeuo pipefail + if [ -d html ]; then + mv html $GITHUB_REF_NAME + zip -r docs.zip $GITHUB_REF_NAME + rm -rf $GITHUB_REF_NAME + fi + + - name: Create GitHub Release + # We pin to the SHA, not the tag, for security reasons. + # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions + uses: softprops/action-gh-release@9d7c94cfd0a1f3ed45544c887983e9fa900f0564 # v2.0.4 + with: + prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} + files: "*" + generate_release_notes: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/_test.yml b/.github/workflows/_test.yml new file mode 100644 index 0000000000..f652d4145f --- /dev/null +++ b/.github/workflows/_test.yml @@ -0,0 +1,62 @@ +on: + workflow_call: + inputs: + python-version: + type: string + description: The version of python to install + required: true + runs-on: + type: string + description: The runner to run this job on + required: true + secrets: + CODECOV_TOKEN: + required: true + +env: + # https://github.com/pytest-dev/pytest/issues/2042 + PY_IGNORE_IMPORTMISMATCH: "1" + +jobs: + run: + runs-on: ${{ inputs.runs-on }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + # Need this to get version number from last tag + fetch-depth: 0 + + - if: inputs.python-version == 'dev' + name: Install dev versions of python packages + uses: ./.github/actions/install_requirements + + - if: inputs.python-version == 'dev' + name: Write the requirements as an artifact + run: pip freeze --exclude-editable > /tmp/dev-requirements.txt + + - if: inputs.python-version == 'dev' + name: Upload dev-requirements.txt + uses: actions/upload-artifact@v4 + with: + name: dev-requirements + path: /tmp/dev-requirements.txt + + - if: inputs.python-version != 'dev' + name: Install latest versions of python packages + uses: ./.github/actions/install_requirements + with: + python-version: ${{ inputs.python-version }} + pip-install: ".[dev]" + + - name: Run tests + run: tox -e tests + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4 + with: + name: ${{ inputs.python-version }}/${{ inputs.runs-on }} + files: cov.xml + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/_tox.yml b/.github/workflows/_tox.yml new file mode 100644 index 0000000000..ec3bb5b513 --- /dev/null +++ b/.github/workflows/_tox.yml @@ -0,0 +1,24 @@ +on: + workflow_call: + inputs: + tox: + type: string + description: What to run under tox + required: true + + +jobs: + run: + runs-on: "ubuntu-latest" + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Install python packages + uses: ./.github/actions/install_requirements + + - name: Run tox + run: tox -e ${{ inputs.tox }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000000..338acf0da4 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,59 @@ +name: CI + +on: + push: + pull_request: + +jobs: + check: + uses: ./.github/workflows/_check.yml + + lint: + needs: check + if: needs.check.outputs.branch-pr == '' + uses: ./.github/workflows/_tox.yml + with: + tox: pre-commit,diff-quality + + test: + needs: check + if: needs.check.outputs.branch-pr == '' + strategy: + matrix: + runs-on: ["ubuntu-latest"] # can add windows-latest, macos-latest + python-version: ["3.10", "3.11"] + include: + # Include one that runs in the dev environment + - runs-on: "ubuntu-latest" + python-version: "dev" + fail-fast: false + uses: ./.github/workflows/_test.yml + with: + runs-on: ${{ matrix.runs-on }} + python-version: ${{ matrix.python-version }} + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + + docs: + needs: check + if: needs.check.outputs.branch-pr == '' + uses: ./.github/workflows/_docs.yml + + dist: + needs: check + if: needs.check.outputs.branch-pr == '' + uses: ./.github/workflows/_dist.yml + + pypi: + if: github.ref_type == 'tag' + needs: dist + uses: ./.github/workflows/_pypi.yml + permissions: + id-token: write + + release: + if: github.ref_type == 'tag' + needs: [dist, docs] + uses: ./.github/workflows/_release.yml + permissions: + contents: write diff --git a/.github/workflows/code.yml b/.github/workflows/code.yml deleted file mode 100644 index 416ae69092..0000000000 --- a/.github/workflows/code.yml +++ /dev/null @@ -1,159 +0,0 @@ -name: Code CI - -on: - push: - pull_request: -env: - # The target python version, which must match the Dockerfile version - CONTAINER_PYTHON: "3.11" - DIST_WHEEL_PATH: dist-${{ github.sha }} - -jobs: - lint: - # pull requests are a duplicate of a branch push if within the same repo. - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - python_version: "3.11" - artifact_name: lint - - - name: Lint - run: tox -e pre-commit,mypy - - test: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - strategy: - fail-fast: false - matrix: - os: ["ubuntu-latest"] # can add windows-latest, macos-latest - python: ["3.11"] - install: ["-e .[dev]"] - # Make one version be non-editable to test both paths of version code - include: - - os: "ubuntu-latest" - python: "3.10" - install: ".[dev]" - - runs-on: ${{ matrix.os }} - env: - # https://github.com/pytest-dev/pytest/issues/2042 - PY_IGNORE_IMPORTMISMATCH: "1" - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - # Need this to get version number from last tag - fetch-depth: 0 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - python_version: ${{ matrix.python }} - requirements_file: requirements-test-${{ matrix.os }}-${{ matrix.python }}.txt - install_options: ${{ matrix.install }} - artifact_name: tests - - - name: List dependency tree - run: pipdeptree - - - name: Run tests - run: pytest --random-order -m "not s03" - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 - with: - name: ${{ matrix.python }}/${{ matrix.os }} - files: cov.xml - - dist: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: "ubuntu-latest" - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - # Need this to get version number from last tag - fetch-depth: 0 - - - name: Build sdist and wheel - run: | - export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && \ - pipx run build - - - name: Upload sdist and wheel as artifacts - uses: actions/upload-artifact@v4.3.1 - with: - name: ${{ env.DIST_WHEEL_PATH }} - path: dist - - - name: Check for packaging errors - run: pipx run twine check --strict dist/* - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - python_version: ${{env.CONTAINER_PYTHON}} - requirements_file: requirements.txt - install_options: dist/*.whl - artifact_name: dist - - - name: Test module --version works using the installed wheel - # If more than one module in src/ replace with module name to test - run: python -m dodal --version - - release: - # upload to PyPI and make a release on every tag - needs: [lint, dist, test] - if: ${{ github.event_name == 'push' && github.ref_type == 'tag' }} - runs-on: ubuntu-latest - env: - HAS_PYPI_TOKEN: ${{ secrets.PYPI_TOKEN != '' }} - - environment: - name: pypi - url: https://pypi.org/p/dls-dodal - - steps: - - name: Download wheel and lockfiles - uses: actions/download-artifact@v4.1.2 - with: - pattern: "*dist*" - - - name: Rename lockfiles and dist - run: | - mv lockfiles-${{ env.CONTAINER_PYTHON }}-dist-${{ github.sha }} lockfiles - mv ${{ env.DIST_WHEEL_PATH }} dist - - - name: Fixup blank lockfiles - # Github release artifacts can't be blank - run: for f in lockfiles/*; do [ -s $f ] || echo '# No requirements' >> $f; done - - - name: Github Release - # We pin to the SHA, not the tag, for security reasons. - # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15 - with: - prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} - files: | - dist/* - lockfiles/* - generate_release_notes: true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Publish to PyPI - if: ${{ env.HAS_PYPI_TOKEN }} - uses: pypa/gh-action-pypi-publish@release/v1 - with: - password: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/docs_clean.yml b/.github/workflows/docs_clean.yml deleted file mode 100644 index e324640e78..0000000000 --- a/.github/workflows/docs_clean.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Docs Cleanup CI - -# delete branch documentation when a branch is deleted -# also allow manually deleting a documentation version -on: - delete: - workflow_dispatch: - inputs: - version: - description: "documentation version to DELETE" - required: true - type: string - -jobs: - remove: - if: github.event.ref_type == 'branch' || github.event_name == 'workflow_dispatch' - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - ref: gh-pages - - - name: removing documentation for branch ${{ github.event.ref }} - if: ${{ github.event_name != 'workflow_dispatch' }} - run: echo "REF_NAME=${{ github.event.ref }}" >> $GITHUB_ENV - - - name: manually removing documentation version ${{ github.event.inputs.version }} - if: ${{ github.event_name == 'workflow_dispatch' }} - run: echo "REF_NAME=${{ github.event.inputs.version }}" >> $GITHUB_ENV - - - name: Sanitize ref name for docs version - run: echo "DOCS_VERSION=${REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV - - - name: update index and push changes - run: | - rm -r $DOCS_VERSION - python make_switcher.py --remove $DOCS_VERSION ${{ github.repository }} switcher.json - git config --global user.name 'GitHub Actions Docs Cleanup CI' - git config --global user.email 'GithubActionsCleanup@noreply.github.com' - git commit -am "Removing redundant docs version $DOCS_VERSION" - git push diff --git a/.github/workflows/linkcheck.yml b/.github/workflows/linkcheck.yml deleted file mode 100644 index ed9cf57f3e..0000000000 --- a/.github/workflows/linkcheck.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: Link Check - -on: - workflow_dispatch: - schedule: - # Run weekly to check URL links still resolve - - cron: "0 8 * * WED" - -jobs: - docs: - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - python_version: "3.11" - artifact_name: link_check - - - name: Check links - run: tox -e docs build -- -b linkcheck - - - name: Keepalive Workflow - uses: gautamkrishnar/keepalive-workflow@v1 diff --git a/.github/workflows/periodic.yml b/.github/workflows/periodic.yml new file mode 100644 index 0000000000..e2a0fd1b9e --- /dev/null +++ b/.github/workflows/periodic.yml @@ -0,0 +1,13 @@ +name: Periodic + +on: + workflow_dispatch: + schedule: + # Run weekly to check URL links still resolve + - cron: "0 8 * * WED" + +jobs: + linkcheck: + uses: ./.github/workflows/_tox.yml + with: + tox: docs build -- -b linkcheck diff --git a/.gitignore b/.gitignore index 2393a30cd0..6b1fba131b 100644 --- a/.gitignore +++ b/.gitignore @@ -8,7 +8,6 @@ __pycache__/ # Distribution / packaging .Python env/ -.venv build/ develop-eggs/ dist/ @@ -57,7 +56,7 @@ tmp/ # Sphinx documentation docs/_build/ -docs/user/reference/generated/ +docs/reference/generated/ # PyBuilder target/ @@ -71,3 +70,6 @@ lockfiles/ # ruff cache .ruff_cache/ + +# pycharm project files +.idea/ diff --git a/.vscode/extensions.json b/.vscode/extensions.json index a1227b3482..66ad6324d0 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -1,10 +1,5 @@ { "recommendations": [ "ms-vscode-remote.remote-containers", - "ms-python.python", - "tamasfe.even-better-toml", - "redhat.vscode-yaml", - "ryanluker.vscode-coverage-gutters", - "charliermarsh.Ruff" ] -} +} \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index a14b412778..27c9bb2659 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -6,7 +6,7 @@ "configurations": [ { "name": "Debug Unit Test", - "type": "python", + "type": "debugpy", "request": "launch", "justMyCode": false, "program": "${file}", @@ -16,10 +16,8 @@ "console": "integratedTerminal", "preLaunchTask": "load_dials_env", "env": { - // The default config in setup.cfg's "[tool:pytest]" adds coverage. - // Cannot have coverage and debugging at the same time. - // https://github.com/microsoft/vscode-python/issues/693 - "PYTEST_ADDOPTS": "--no-cov" + // Enable break on exception when debugging tests (see: tests/conftest.py) + "PYTEST_RAISE": "1", }, } ] diff --git a/.vscode/settings.json b/.vscode/settings.json index b38388c121..39d49887e1 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,19 +1,22 @@ { - "python.testing.pytestArgs": [], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": "explicit" + }, "[python]": { + "editor.defaultFormatter": "charliermarsh.ruff", "editor.rulers": [ 88 ], "editor.codeActionsOnSave": { - "source.fixAll.ruff": "never", - "source.organizeImports.ruff": "explicit" + "source.fixAll.ruff": false, + "source.organizeImports.ruff": true } }, "python.analysis.extraPaths": [ "./src" ], - "python.analysis.typeCheckingMode": "basic", + "python.analysis.typeCheckingMode": "basic" } \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 4ac17c12eb..1b21c0fb27 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -21,4 +21,4 @@ "type": "shell" } ] -} +} \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index a7cf36f3bb..c4404ecabb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,25 +1,13 @@ -# This file is for use as a devcontainer and a runtime container -# -# The devcontainer should use the build target and run as root with podman +# The devcontainer should use the developer target and run as root with podman # or docker with user namespaces. -# -FROM python:3.11 as build +ARG PYTHON_VERSION=3.11 +FROM python:${PYTHON_VERSION} as developer -ARG PIP_OPTIONS=. +# Add any system dependencies for the developer/build environment here +RUN apt-get update && apt-get install -y --no-install-recommends \ + graphviz \ + && rm -rf /var/lib/apt/lists/* -# Add any system dependencies for the developer/build environment here e.g. -# RUN apt-get update && apt-get upgrade -y && \ -# apt-get install -y --no-install-recommends \ -# desired-packages \ -# && rm -rf /var/lib/apt/lists/* - -# set up a virtual environment and put it in PATH +# Set up a virtual environment and put it in PATH RUN python -m venv /venv ENV PATH=/venv/bin:$PATH - -# Copy any required context for the pip install over -COPY . /context -WORKDIR /context - -# install python package into /venv -RUN pip install ${PIP_OPTIONS} diff --git a/README.md b/README.md new file mode 100644 index 0000000000..be3bd3a1d4 --- /dev/null +++ b/README.md @@ -0,0 +1,40 @@ +[![CI](https://github.com/DiamondLightSource/dodal/actions/workflows/ci.yml/badge.svg)](https://github.com/DiamondLightSource/dodal/actions/workflows/ci.yml) +[![Coverage](https://codecov.io/gh/DiamondLightSource/dodal/branch/main/graph/badge.svg)](https://codecov.io/gh/DiamondLightSource/dodal) +[![PyPI](https://img.shields.io/pypi/v/dls-dodal.svg)](https://pypi.org/project/dls-dodal) +[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) + +# dodal + +Ophyd devices and other utils that could be used across DLS beamlines + +Source | +:---: | :---: +PyPI | `pip install dls-dodal` +Documentation | +Releases | + +Testing Connectivity +-------------------- + +You can test your connection to a beamline if it's PVs are visible to your machine with: + + +``` + # On any workstation: + dodal connect + + # On a beamline workstation, this should suffice: + dodal connect ${BEAMLINE} +``` + + + +For more options, including a list of valid beamlines, type + +``` + dodal connect --help +``` + + + +See https://diamondlightsource.github.io/dodal for more detailed documentation. diff --git a/README.rst b/README.rst deleted file mode 100644 index aba6c9df2b..0000000000 --- a/README.rst +++ /dev/null @@ -1,61 +0,0 @@ -dodal -============ - -|code_ci| |docs_ci| |coverage| |pypi_version| |license| - - -Ophyd devices and other utils that could be used across DLS beamlines - -============== ============================================================== -PyPI ``pip install dls-dodal`` -Source code https://github.com/DiamondLightSource/dodal -Documentation https://DiamondLightSource.github.io/dodal -Releases https://github.com/DiamondLightSource/dodal/releases -============== ============================================================== - -Testing Connectivity --------------------- - -You can test your connection to a beamline if it's PVs are visible to your machine with: - -.. code:: shell - - # On any workstation: - dodal connect - - # On a beamline workstation, this should suffice: - dodal connect ${BEAMLINE} - - -For more options, including a list of valid beamlines, type - -.. code:: shell - - dodal connect --help - - -.. |code_ci| image:: https://github.com/DiamondLightSource/dodal/actions/workflows/code.yml/badge.svg?branch=main - :target: https://github.com/DiamondLightSource/dodal/actions/workflows/code.yml - :alt: Code CI - -.. |docs_ci| image:: https://github.com/DiamondLightSource/dodal/actions/workflows/docs.yml/badge.svg?branch=main - :target: https://github.com/DiamondLightSource/dodal/actions/workflows/docs.yml - :alt: Docs CI - -.. |coverage| image:: https://codecov.io/gh/DiamondLightSource/dodal/branch/main/graph/badge.svg - :target: https://codecov.io/gh/DiamondLightSource/dodal - :alt: Test Coverage - -.. |pypi_version| image:: https://img.shields.io/pypi/v/dls-dodal.svg - :target: https://pypi.org/project/dls-dodal - :alt: Latest PyPI version - -.. |license| image:: https://img.shields.io/badge/License-Apache%202.0-blue.svg - :target: https://opensource.org/licenses/Apache-2.0 - :alt: Apache License - -.. - Anything below this line is used when viewing README.rst and will be replaced - when included in index.rst - -See https://DiamondLightSource.github.io/dodal for more detailed documentation. diff --git a/catalog-info.yaml b/catalog-info.yaml new file mode 100644 index 0000000000..5674a4647a --- /dev/null +++ b/catalog-info.yaml @@ -0,0 +1,12 @@ +apiVersion: backstage.io/v1alpha1 +kind: Component +metadata: + name: dls-dodal + title: dodal + description: Ophyd devices and other utils that could be used across DLS beamlines +spec: + type: library + lifecycle: production + owner: group:data-acquisition + dependsOn: + - zocalo diff --git a/docs/conf.py b/docs/conf.py index 09ff072669..5d29e18465 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -50,8 +50,13 @@ "sphinxcontrib.mermaid", # Signatures from type hinting "sphinx_autodoc_typehints", + # So we can write markdown files + "myst_parser", ] +# So we can use the ::: syntax +myst_enable_extensions = ["colon_fence"] + # If true, Sphinx will warn about all references where the target cannot # be found. nitpicky = True @@ -71,7 +76,6 @@ ("py:class", "typing_extensions.Literal"), ] - # Both the class’ and the __init__ method’s docstring are concatenated and # inserted into the main body of the autoclass directive autoclass_content = "both" @@ -115,14 +119,6 @@ # A dictionary of graphviz graph attributes for inheritance diagrams. inheritance_graph_attrs = {"rankdir": "TB"} -# Common links that should be available on every page -rst_epilog = """ -.. _Diamond Light Source: http://www.diamond.ac.uk -.. _ruff: https://beta.ruff.rs/docs/ -.. _mypy: http://mypy-lang.org/ -.. _pre-commit: https://pre-commit.com/ -""" - # Ignore localhost links for periodic check that links in docs are valid linkcheck_ignore = [r"http://localhost:\d+/"] @@ -137,7 +133,7 @@ # a list of builtin themes. # html_theme = "pydata_sphinx_theme" -github_repo = project +github_repo = "dodal" github_user = "DiamondLightSource" switcher_json = f"https://{github_user}.github.io/{github_repo}/switcher.json" switcher_exists = requests.get(switcher_json).ok @@ -153,10 +149,10 @@ # Theme options for pydata_sphinx_theme # We don't check switcher because there are 3 possible states for a repo: # 1. New project, docs are not published so there is no switcher -# 2. Existing project with latest skeleton, switcher exists and works -# 3. Existing project with old skeleton that makes broken switcher, +# 2. Existing project with latest copier template, switcher exists and works +# 3. Existing project with old copier template that makes broken switcher, # switcher exists but is broken -# Point 3 makes checking switcher difficult, because the updated skeleton +# Point 3 makes checking switcher difficult, because the updated copier template # will fix the switcher at the end of the docs workflow, but never gets a chance # to complete as the docs build warns and fails. html_theme_options = { @@ -178,19 +174,13 @@ }, "check_switcher": False, "navbar_end": ["theme-switcher", "icon-links", "version-switcher"], - "external_links": [ - { - "name": "Release Notes", - "url": f"https://github.com/{github_user}/{github_repo}/releases", - } - ], "navigation_with_keys": False, } # A dictionary of values to pass into the template engine’s context for all pages html_context = { "github_user": github_user, - "github_repo": project, + "github_repo": github_repo, "github_version": version, "doc_path": "docs", } @@ -203,7 +193,7 @@ # Logo html_logo = "images/dls-logo.svg" -html_favicon = "images/dls-favicon.ico" +html_favicon = html_logo templates_path = ["_templates"] diff --git a/docs/developer/explanations/decisions.rst b/docs/developer/explanations/decisions.rst deleted file mode 100644 index 5841e6ea07..0000000000 --- a/docs/developer/explanations/decisions.rst +++ /dev/null @@ -1,17 +0,0 @@ -.. This Source Code Form is subject to the terms of the Mozilla Public -.. License, v. 2.0. If a copy of the MPL was not distributed with this -.. file, You can obtain one at http://mozilla.org/MPL/2.0/. - -Architectural Decision Records -============================== - -We record major architectural decisions in Architecture Decision Records (ADRs), -as `described by Michael Nygard -`_. -Below is the list of our current ADRs. - -.. toctree:: - :maxdepth: 1 - :glob: - - decisions/* \ No newline at end of file diff --git a/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst b/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst deleted file mode 100644 index b2d3d0fe87..0000000000 --- a/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst +++ /dev/null @@ -1,26 +0,0 @@ -1. Record architecture decisions -================================ - -Date: 2022-02-18 - -Status ------- - -Accepted - -Context -------- - -We need to record the architectural decisions made on this project. - -Decision --------- - -We will use Architecture Decision Records, as `described by Michael Nygard -`_. - -Consequences ------------- - -See Michael Nygard's article, linked above. To create new ADRs we will copy and -paste from existing ones. diff --git a/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst b/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst deleted file mode 100644 index 41d90fd4ac..0000000000 --- a/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst +++ /dev/null @@ -1,35 +0,0 @@ -2. Adopt python3-pip-skeleton for project structure -=================================================== - -Date: 2022-02-18 - -Status ------- - -Accepted - -Context -------- - -We should use the following `pip-skeleton `_. -The skeleton will ensure consistency in developer -environments and package management. - -Decision --------- - -We have switched to using the skeleton. - -Consequences ------------- - -This module will use a fixed set of tools as developed in python3-pip-skeleton -and can pull from this skeleton to update the packaging to the latest techniques. - -As such, the developer environment may have changed, the following could be -different: - -- linting -- formatting -- pip venv setup -- CI/CD diff --git a/docs/developer/how-to/build-docs.rst b/docs/developer/how-to/build-docs.rst deleted file mode 100644 index 11a5e6386d..0000000000 --- a/docs/developer/how-to/build-docs.rst +++ /dev/null @@ -1,38 +0,0 @@ -Build the docs using sphinx -=========================== - -You can build the `sphinx`_ based docs from the project directory by running:: - - $ tox -e docs - -This will build the static docs on the ``docs`` directory, which includes API -docs that pull in docstrings from the code. - -.. seealso:: - - `documentation_standards` - -The docs will be built into the ``build/html`` directory, and can be opened -locally with a web browser:: - - $ firefox build/html/index.html - -Autobuild ---------- - -You can also run an autobuild process, which will watch your ``docs`` -directory for changes and rebuild whenever it sees changes, reloading any -browsers watching the pages:: - - $ tox -e docs autobuild - -You can view the pages at localhost:: - - $ firefox http://localhost:8000 - -If you are making changes to source code too, you can tell it to watch -changes in this directory too:: - - $ tox -e docs autobuild -- --watch src - -.. _sphinx: https://www.sphinx-doc.org/ diff --git a/docs/developer/how-to/contribute.rst b/docs/developer/how-to/contribute.rst deleted file mode 100644 index 65b992f08e..0000000000 --- a/docs/developer/how-to/contribute.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../../../.github/CONTRIBUTING.rst diff --git a/docs/developer/how-to/lint.rst b/docs/developer/how-to/lint.rst deleted file mode 100644 index 44ecfdd249..0000000000 --- a/docs/developer/how-to/lint.rst +++ /dev/null @@ -1,34 +0,0 @@ -Run linting using pre-commit -============================ - -Code linting is handled by ruff_ run under pre-commit_. - -Running pre-commit ------------------- - -You can run the above checks on all files with this command:: - - $ tox -e pre-commit - -Or you can install a pre-commit hook that will run each time you do a ``git -commit`` on just the files that have changed:: - - $ pre-commit install - -It is also possible to `automatically enable pre-commit on cloned repositories `_. -This will result in pre-commits being enabled on every repo your user clones from now on. - -Fixing issues -------------- - -If ruff reports an issue you can tell it to reformat all the files in the -repository:: - - $ ruff check --fix . - -Ruff may not be able to automatically fix all issues; in this case, you will have to fix those manually. - -VSCode support --------------- - -The ``.vscode/settings.json`` will run formatting as well as ruff checking on save. Issues will be highlighted in the editor window. diff --git a/docs/developer/how-to/make-release.rst b/docs/developer/how-to/make-release.rst deleted file mode 100644 index 83e1386b4e..0000000000 --- a/docs/developer/how-to/make-release.rst +++ /dev/null @@ -1,31 +0,0 @@ -Make a release -============== - -Releases are created through the Github release interface. - -To make a new release, please follow this checklist: - -- Choose a new PEP440 compliant release number (see https://peps.python.org/pep-0440/) (The release version should - look like ``{major}.{minor}.{patch}``). See `Deciding release numbers`_ if you're unsure on what the release version - should be. -- Go to the GitHub release_ page -- Choose ``Draft New Release`` -- Click ``Choose Tag`` and supply the new tag you chose (click create new tag) -- Click ``Generate release notes``, review and edit these notes. Confirm they do not omit anything important and make sense (to a user, not just a developer). -- Choose a title and click ``Publish Release``. This will create a release on ``pypi`` automatically and post to the - ``bluesky`` slack channel. -- Manually confirm that the ``pypi`` version has been updated (after all tests have run) and that slack is notified. - -Note that tagging and pushing to the main branch has the same effect except that -you will not get the option to edit the release notes. - -.. _release: https://github.com/DiamondLightSource/dodal/releases - -Deciding release numbers ------------------------- - -Releases should obviously be versioned higher than the previous latest release. Otherwise you should follow this guide: - -* **Major** - Changes that have the potential to break plans -* **Minor** - New features -* **Patch** - Bug fixes diff --git a/docs/developer/how-to/run-tests.rst b/docs/developer/how-to/run-tests.rst deleted file mode 100644 index d2e03644c2..0000000000 --- a/docs/developer/how-to/run-tests.rst +++ /dev/null @@ -1,12 +0,0 @@ -Run the tests using pytest -========================== - -Testing is done with pytest_. It will find functions in the project that `look -like tests`_, and run them to check for errors. You can run it with:: - - $ tox -e pytest - -It will also report coverage to the commandline and to ``cov.xml``. - -.. _pytest: https://pytest.org/ -.. _look like tests: https://docs.pytest.org/explanation/goodpractices.html#test-discovery diff --git a/docs/developer/how-to/static-analysis.rst b/docs/developer/how-to/static-analysis.rst deleted file mode 100644 index 065920e1c6..0000000000 --- a/docs/developer/how-to/static-analysis.rst +++ /dev/null @@ -1,8 +0,0 @@ -Run static analysis using mypy -============================== - -Static type analysis is done with mypy_. It checks type definition in source -files without running them, and highlights potential issues where types do not -match. You can run it with:: - - $ tox -e mypy diff --git a/docs/developer/how-to/update-tools.rst b/docs/developer/how-to/update-tools.rst deleted file mode 100644 index c1075ee8c1..0000000000 --- a/docs/developer/how-to/update-tools.rst +++ /dev/null @@ -1,16 +0,0 @@ -Update the tools -================ - -This module is merged with the python3-pip-skeleton_. This is a generic -Python project structure which provides a means to keep tools and -techniques in sync between multiple Python projects. To update to the -latest version of the skeleton, run:: - - $ git pull --rebase=false https://github.com/DiamondLightSource/python3-pip-skeleton - -Any merge conflicts will indicate an area where something has changed that -conflicts with the setup of the current module. Check the `closed pull requests -`_ -of the skeleton module for more details. - -.. _python3-pip-skeleton: https://DiamondLightSource.github.io/python3-pip-skeleton diff --git a/docs/developer/index.rst b/docs/developer/index.rst deleted file mode 100644 index 9c7adc28fb..0000000000 --- a/docs/developer/index.rst +++ /dev/null @@ -1,67 +0,0 @@ -Developer Guide -=============== - -Documentation is split into four categories, also accessible from links in the -side-bar. - -.. grid:: 2 - :gutter: 4 - - .. grid-item-card:: :material-regular:`directions_run;3em` - - .. toctree:: - :caption: Tutorials - :maxdepth: 1 - - tutorials/dev-install - - +++ - - Tutorials for getting up and running as a developer. - - .. grid-item-card:: :material-regular:`task;3em` - - .. toctree:: - :caption: How-to Guides - :maxdepth: 1 - - how-to/contribute - how-to/move-code - how-to/build-docs - how-to/run-tests - how-to/make-new-ophyd-async-device - how-to/static-analysis - how-to/lint - how-to/update-tools - how-to/make-release - how-to/create-beamline - how-to/zocalo - - +++ - - Practical step-by-step guides for day-to-day dev tasks. - - .. grid-item-card:: :material-regular:`apartment;3em` - - .. toctree:: - :caption: Explanations - :maxdepth: 1 - - explanations/decisions - - +++ - - Explanations of how and why the architecture is why it is. - - .. grid-item-card:: :material-regular:`description;3em` - - .. toctree:: - :caption: Reference - :maxdepth: 1 - - reference/standards - reference/device-standards - - +++ - - Technical reference material on standards in use. diff --git a/docs/developer/tutorials/dev-install.rst b/docs/developer/tutorials/dev-install.rst deleted file mode 100644 index d0d3c56774..0000000000 --- a/docs/developer/tutorials/dev-install.rst +++ /dev/null @@ -1,68 +0,0 @@ -Developer install -================= - -These instructions will take you through the minimal steps required to get a dev -environment setup, so you can run the tests locally. - -Clone the repository --------------------- - -First clone the repository locally using `Git -`_:: - - $ git clone git://github.com/DiamondLightSource/dodal.git - -Install dependencies --------------------- - -You can choose to either develop on the host machine using a `venv` (which -requires python 3.8 or later) or to run in a container under `VSCode -`_ - -.. tab-set:: - - .. tab-item:: Local virtualenv - - .. code:: - - $ cd dodal - $ python3 -m venv venv - $ source venv/bin/activate - $ pip install -e .[dev] - - .. tab-item:: VSCode devcontainer - - .. code:: - - $ code dodal - # Click on 'Reopen in Container' when prompted - # Open a new terminal - - .. note:: - - See the epics-containers_ documentation for more complex - use cases, such as integration with podman. - -See what was installed ----------------------- - -To see a graph of the python package dependency tree type:: - - $ pipdeptree - -Build and test --------------- - -Now you have a development environment you can run the tests in a terminal:: - - $ tox -p - -This will run in parallel the following checks: - -- `../how-to/build-docs` -- `../how-to/run-tests` -- `../how-to/static-analysis` -- `../how-to/lint` - - -.. _epics-containers: https://epics-containers.github.io/main/user/tutorials/devcontainer.html diff --git a/docs/explanations.md b/docs/explanations.md new file mode 100644 index 0000000000..73ab289b60 --- /dev/null +++ b/docs/explanations.md @@ -0,0 +1,10 @@ +# Explanations + +Explanations of how it works and why it works that way. + +```{toctree} +:maxdepth: 1 +:glob: + +explanations/* +``` diff --git a/docs/explanations/decisions.md b/docs/explanations/decisions.md new file mode 100644 index 0000000000..0533b98d45 --- /dev/null +++ b/docs/explanations/decisions.md @@ -0,0 +1,12 @@ +# Architectural Decision Records + +Architectural decisions are made throughout a project's lifetime. As a way of keeping track of these decisions, we record these decisions in Architecture Decision Records (ADRs) listed below. + +```{toctree} +:glob: true +:maxdepth: 1 + +decisions/* +``` + +For more information on ADRs see this [blog by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions). diff --git a/docs/explanations/decisions/0001-record-architecture-decisions.md b/docs/explanations/decisions/0001-record-architecture-decisions.md new file mode 100644 index 0000000000..44d234efce --- /dev/null +++ b/docs/explanations/decisions/0001-record-architecture-decisions.md @@ -0,0 +1,18 @@ +# 1. Record architecture decisions + +## Status + +Accepted + +## Context + +We need to record the architectural decisions made on this project. + +## Decision + +We will use Architecture Decision Records, as [described by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions). + +## Consequences + +See Michael Nygard's article, linked above. To create new ADRs we will copy and +paste from existing ones. diff --git a/docs/explanations/decisions/0002-switched-to-python-copier-template.md b/docs/explanations/decisions/0002-switched-to-python-copier-template.md new file mode 100644 index 0000000000..66fe5d8b24 --- /dev/null +++ b/docs/explanations/decisions/0002-switched-to-python-copier-template.md @@ -0,0 +1,28 @@ +# 2. Adopt python-copier-template for project structure + +## Status + +Accepted + +## Context + +We should use the following [python-copier-template](https://github.com/DiamondLightSource/python-copier-template). +The template will ensure consistency in developer +environments and package management. + +## Decision + +We have switched to using the template. + +## Consequences + +This module will use a fixed set of tools as developed in `python-copier-template` +and can pull from this template to update the packaging to the latest techniques. + +As such, the developer environment may have changed, the following could be +different: + +- linting +- formatting +- pip venv setup +- CI/CD diff --git a/docs/explanations/decisions/COPYME b/docs/explanations/decisions/COPYME new file mode 100644 index 0000000000..b466c79299 --- /dev/null +++ b/docs/explanations/decisions/COPYME @@ -0,0 +1,19 @@ +# 3. Short descriptive title + +Date: Today's date + +## Status + +Accepted + +## Context + +Background to allow us to make the decision, to show how we arrived at our conclusions. + +## Decision + +What decision we made. + +## Consequences + +What we will do as a result of this decision. diff --git a/docs/genindex.md b/docs/genindex.md new file mode 100644 index 0000000000..73f1191b0a --- /dev/null +++ b/docs/genindex.md @@ -0,0 +1,3 @@ +# Index + + diff --git a/docs/genindex.rst b/docs/genindex.rst deleted file mode 100644 index 93eb8b2940..0000000000 --- a/docs/genindex.rst +++ /dev/null @@ -1,5 +0,0 @@ -API Index -========= - -.. - https://stackoverflow.com/a/42310803 diff --git a/docs/how-to.md b/docs/how-to.md new file mode 100644 index 0000000000..6b16141727 --- /dev/null +++ b/docs/how-to.md @@ -0,0 +1,10 @@ +# How-to Guides + +Practical step-by-step guides for the more experienced user. + +```{toctree} +:maxdepth: 1 +:glob: + +how-to/* +``` diff --git a/docs/how-to/build-docs.md b/docs/how-to/build-docs.md new file mode 100644 index 0000000000..f557337f4c --- /dev/null +++ b/docs/how-to/build-docs.md @@ -0,0 +1,39 @@ +# Build the docs using sphinx + +You can build the [sphinx](https://www.sphinx-doc.org) based docs from the project directory by running: + +``` +$ tox -e docs +``` + +This will build the static docs on the `docs` directory, which includes API docs that pull in docstrings from the code. + +:::{seealso} +[](documentation_standards) +::: + +The docs will be built into the `build/html` directory, and can be opened locally with a web browse: + +``` +$ firefox build/html/index.html +``` + +## Autobuild + +You can also run an autobuild process, which will watch your `docs` directory for changes and rebuild whenever it sees changes, reloading any browsers watching the pages: + +``` +$ tox -e docs autobuild +``` + +You can view the pages at localhost: + +``` +$ firefox http://localhost:8000 +``` + +If you are making changes to source code too, you can tell it to watch changes in this directory too: + +``` +$ tox -e docs autobuild -- --watch src +``` diff --git a/docs/how-to/contribute.md b/docs/how-to/contribute.md new file mode 100644 index 0000000000..6e41979708 --- /dev/null +++ b/docs/how-to/contribute.md @@ -0,0 +1,2 @@ +```{include} ../../.github/CONTRIBUTING.md +``` diff --git a/docs/how-to/coverage.md b/docs/how-to/coverage.md new file mode 100644 index 0000000000..161ffc2c76 --- /dev/null +++ b/docs/how-to/coverage.md @@ -0,0 +1,8 @@ + +# How to check code coverage + +Code coverage is reported to the command line and to a `cov.xml` file by the command `tox -e tests`. The file is uploaded to the Codecov service in CI. + +## Adding a Codecov Token + +If the repo is not hosted in DiamondLightSource, then you need to visit `https://app.codecov.io/account/gh//org-upload-token` to generate a token for your org, and store it as a secret named `CODECOV_TOKEN` in `https://github.com/organizations//settings/secrets/actions` diff --git a/docs/developer/how-to/create-beamline.rst b/docs/how-to/create-beamline.rst similarity index 100% rename from docs/developer/how-to/create-beamline.rst rename to docs/how-to/create-beamline.rst diff --git a/docs/how-to/dev-install.md b/docs/how-to/dev-install.md new file mode 100644 index 0000000000..3899b55998 --- /dev/null +++ b/docs/how-to/dev-install.md @@ -0,0 +1,56 @@ +# Setup Developer Environment + +These instructions will take you through the minimal steps required to get a dev environment setup, so you can run the tests locally. + +## Clone the repository + +First clone the repository locally using [Git](https://git-scm.com/downloads). There is a link on the GitHub interface to allow you to do this. SSH is recommended if you have setup a key. Enter the directory that it is cloned into to continue. + +## Install dependencies + +You can choose to either develop on the host machine using a `venv` (which requires python 3.10 or later) or to run +in a container under [VSCode](https://code.visualstudio.com/) + + +::::{tab-set} + +:::{tab-item} Local virtualenv +``` +python3 -m venv venv +source venv/bin/activate +pip install -e '.[dev]' +``` + +## See what was installed + +To see a graph of the python package dependency tree type: + + $ pipdeptree + +::: + +:::{tab-item} VSCode devcontainer +If you are at DLS, then first [setup podman and its fix for devcontainer features](https://dev-portal.diamond.ac.uk/guide/containers/tutorials/podman/#enable-use-of-vscode-features) + +``` +code . +# Click on 'Reopen in Container' when prompted +# Open a new terminal +::: + +:::: + +## Build and test + +Now you have a development environment you can run the tests in a terminal: + +``` +tox -p +``` + +This will run in parallel the following checks: + +- [](./build-docs) +- [](./run-tests) +- [](./static-analysis) +- [](./lint) diff --git a/docs/how-to/excalidraw.md b/docs/how-to/excalidraw.md new file mode 100644 index 0000000000..294de96052 --- /dev/null +++ b/docs/how-to/excalidraw.md @@ -0,0 +1,19 @@ +# How to embed Excalidraw diagrams + +Start off by creating your diagram in + +```{raw} html +:file: ../images/excalidraw-example.svg +``` + +Click 'Save as image' and make sure the 'Embed scene' checkbox is enabled. this is required for loading your image back into Excalidraw should you wish to make changes later on. Name your file and export to SVG, saving it inside `docs/images`. + +Add the following to embed it inside your documentation: + + ```{raw} html + :file: ../images/my-diagram.excalidraw.svg + ``` + +It is preferred to use the above convention over the image directive in order to retain the font used by Excalidraw. + +Rebuild the docs and open the resulting html inside a browser. diff --git a/docs/how-to/lint.md b/docs/how-to/lint.md new file mode 100644 index 0000000000..3a706436e9 --- /dev/null +++ b/docs/how-to/lint.md @@ -0,0 +1,34 @@ +# Run linting using pre-commit + +Code linting is handled by [ruff](https://docs.astral.sh/ruff) run under [pre-commit](https://pre-commit.com/). + +## Running pre-commit + +You can run the above checks on all files with this command: + +``` +$ tox -e pre-commit +``` + +Or you can install a pre-commit hook that will run each time you do a `git commit` on just the files that have changed: + +``` +$ pre-commit install +``` + +It is also possible to [automatically enable pre-commit on cloned repositories](https://pre-commit.com/#automatically-enabling-pre-commit-on-repositories). This will result in pre-commits being enabled on every repo your user clones from now on. + +## Fixing issues + +The typical workflow is: + +- Make a code change +- `git add` it +- Try to commit +- Pre-commit will run, and ruff will try and fix any issues it finds +- If anything changes it will be left in your working copy +- Review and commit the results + +## VSCode support + +The `.vscode/settings.json` will run ruff formatters on save, but will not try to auto-fix as that does things like removing unused imports which is too intrusive while editing. diff --git a/docs/how-to/lock-requirements.md b/docs/how-to/lock-requirements.md new file mode 100644 index 0000000000..b81ceeed5e --- /dev/null +++ b/docs/how-to/lock-requirements.md @@ -0,0 +1,39 @@ +# Lock requirements + +## Introduction + +By design this project only defines dependencies in one place, i.e. in the `requires` table in `pyproject.toml`. + +In the `requires` table it is possible to pin versions of some dependencies as needed. For library projects it is best to leave pinning to a minimum so that your library can be used by the widest range of applications. + +When CI builds the project it will use the latest compatible set of dependencies available (after applying your pins and any dependencies' pins). + +This approach means that there is a possibility that a future build may break because an updated release of a dependency has made a breaking change. + +The correct way to fix such an issue is to work out the minimum pinning in `requires` that will resolve the problem. However this can be quite hard to do and may be time consuming when simply trying to release a minor update. + +For this reason we provide a mechanism for locking all dependencies to the same version as a previous successful release. This is a quick fix that should guarantee a successful CI build. + +## Finding the lock files + +Every release of the project will have a set of requirements files published as release assets. + +For example take a look at the release page for python-copier-template [here](https://github.com/DiamondLightSource/python-copier-template/releases/tag/1.1.0) + +There is a single `dev-requirements.txt` file showing as an asset on the release. This has been created using `pip freeze --exclude-editable` on a successful test run using the same version of python as the devcontainer, and will contain a full list of the dependencies and sub-dependencies with pinned versions. You can download this file by clicking on it. + +## Applying the lock file + +To apply a lockfile: + +- copy the requirements file you have downloaded to the root of your repository +- commit it into the repo +- push the changes + +The CI looks for a `dev-requirements.txt` in the root and will pass it to pip as a constraint when installing the dev environment. If a package is required to be installed by `pyproject.toml` then `pip` will use the version specified in `dev-requirements.txt`. + +## Removing dependency locking from CI + +Once the reasons for locking the build have been resolved it is a good idea to go back to an unlocked build. This is because you get an early indication of any incoming problems. + +To restore unlocked builds in CI simply remove `dev-requirements.txt` from the root of the repo and push. diff --git a/docs/developer/how-to/make-new-ophyd-async-device.rst b/docs/how-to/make-new-ophyd-async-device.rst similarity index 100% rename from docs/developer/how-to/make-new-ophyd-async-device.rst rename to docs/how-to/make-new-ophyd-async-device.rst diff --git a/docs/how-to/make-release.md b/docs/how-to/make-release.md new file mode 100644 index 0000000000..caa42a4bca --- /dev/null +++ b/docs/how-to/make-release.md @@ -0,0 +1,32 @@ +# Make a release + +Releases are created through the Github release interface. + +To make a new release, please follow this checklist: + +- Ensure that you have previously followed [](./pypi) +- Choose a new PEP440 compliant release number (see ) (The release version should + look like `{major}.{minor}.{patch}`). See [Deciding release numbers](#Deciding release numbers) if you're unsure on + what the release version should be. +- Go to the GitHub [release] page +- Choose `Draft New Release` +- Click `Choose Tag` and supply the new tag you chose (click create new tag) +- Click `Generate release notes`, review and edit these notes. Confirm they do not omit anything important and make sense (to a user, not just a developer). +- Choose a title and click `Publish Release`. This will create a release on `pypi` automatically and post to the + `bluesky` slack channel. +- Manually confirm that the `pypi` version has been updated (after all tests have run) and that slack is notified. + +Note that tagging and pushing to the main branch has the same effect except that +you will not get the option to edit the release notes. + +A new release will be made and the wheel and sdist uploaded to PyPI. + +[release]: https://github.com/DiamondLightSource/python-copier-template/releases + +## Deciding release numbers + +Releases should obviously be versioned higher than the previous latest release. Otherwise you should follow this guide: + +* **Major** - Changes that have the potential to break plans +* **Minor** - New features +* **Patch** - Bug fixes diff --git a/docs/developer/how-to/move-code.rst b/docs/how-to/move-code.rst similarity index 100% rename from docs/developer/how-to/move-code.rst rename to docs/how-to/move-code.rst diff --git a/docs/how-to/pypi.md b/docs/how-to/pypi.md new file mode 100644 index 0000000000..5cc9eaaf73 --- /dev/null +++ b/docs/how-to/pypi.md @@ -0,0 +1,24 @@ +# Setting up PyPI publishing + +To publish your package on PyPI requires a PyPI account and for PyPI to be setup for [Trusted Publisher](https://docs.pypi.org/trusted-publishers/). + +## Gather the information + +You will need the following information: + +- Owner: The GitHub org that the repo is contained in, e.g. `DiamondLightSource` +- Repository name: The GitHub repository name, e.g. `python-copier-template-example` +- PyPI Project Name: The distribution name on PyPI, e.g. `dls-python-copier-template-example` +- Workflow name: The workflow that does publishing, `_pypi.yml` for `python-copier-template` projects +- Environment name: The GitHub environment that publishing is done with, `release` for `python-copier-template` projects + +## If publishing to the DiamondLightSource PyPI organisation + +If you are publishing to the DiamondLightSource PyPI organisation then use the above information and follow the [Developer Portal Guide on PyPI publishing](https://dev-portal.diamond.ac.uk/guide/python/how-tos/pypi/). + +## If publishing the PyPI project to another organisation + +If you are publishing to a different PyPI organisation then use the above information in one of the following guides: + +- [Creating a PyPI project with a trusted publisher](https://docs.pypi.org/trusted-publishers/creating-a-project-through-oidc/) +- [Adding a trusted publisher to an existing PyPI project](https://docs.pypi.org/trusted-publishers/adding-a-publisher/) diff --git a/docs/how-to/run-tests.md b/docs/how-to/run-tests.md new file mode 100644 index 0000000000..60d8f0bc5e --- /dev/null +++ b/docs/how-to/run-tests.md @@ -0,0 +1,20 @@ +(using-pytest)= + +# Run the tests using pytest + +Testing is done with [pytest]. It will find functions in the project that [look like tests][look like tests], and run them to check for errors. You can run it with: + +``` +$ pytest +``` + +When you have some fully working tests then you can run it with coverage: + +``` +$ tox -e tests +``` + +It will also report coverage to the commandline and to `cov.xml`. + +[look like tests]: https://docs.pytest.org/explanation/goodpractices.html#test-discovery +[pytest]: https://pytest.org/ diff --git a/docs/how-to/static-analysis.md b/docs/how-to/static-analysis.md new file mode 100644 index 0000000000..3ece75002f --- /dev/null +++ b/docs/how-to/static-analysis.md @@ -0,0 +1,7 @@ +# Run static analysis using pyright or mypy + +Static type analysis is done with [pyright](https://microsoft.github.io/pyright) or [mypy](https://www.mypy-lang.org) dependent on the settings in `pyproject.toml`. It checks type definition in source files without running them, and highlights potential issues where types do not match. You can run it with: + +``` +$ tox -e type-checking +``` diff --git a/docs/how-to/update-template.md b/docs/how-to/update-template.md new file mode 100644 index 0000000000..a6a2135672 --- /dev/null +++ b/docs/how-to/update-template.md @@ -0,0 +1,9 @@ +# How to update to the latest template structure + +To track changes to the upstream template, run + +``` +copier update +``` + +This will fetch the latest tagged release of the template, and apply any changes to your working copy. It will prompt for answers again, giving your previous answers as the defaults. diff --git a/docs/developer/how-to/zocalo.rst b/docs/how-to/zocalo.rst similarity index 94% rename from docs/developer/how-to/zocalo.rst rename to docs/how-to/zocalo.rst index c807058e05..c9cab27e69 100644 --- a/docs/developer/how-to/zocalo.rst +++ b/docs/how-to/zocalo.rst @@ -1,7 +1,7 @@ Zocalo Interaction ================== -.. image:: ../../assets/zocalo.png +.. image:: ../assets/zocalo.png :alt: Diagram of zocalo Zocalo jobs are triggered based on their ISPyB DCID using the ``ZocaloTrigger`` class in a callback subscribed to the diff --git a/docs/images/dls-favicon.ico b/docs/images/dls-favicon.ico deleted file mode 100644 index 9a11f508ef8aed28f14c5ce0d8408e1ec8b614a1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 99678 zcmeI537lO;m4{y-5^&fxAd7U62#m-odom^>E-11%hzcU;%qW8>qNAV!X%rBR1O<_G zlo>@u83#o{W)z#S%OZm8BQq&!G^+HP(&n3&@W+)-9$hLOTPl^tjT;RAocFi!Zm+$n;Ww8`pBh^#O`bd$ z-t~}DY10X%Qg3fHyy2+Q{%4m;y8?rxKpcFJm&pY|u-6wCRW5(cjPg@`tAm&CdMS9B z-%o#TQRNE0?HvbX^O@z1HkeVq^0H->N|}gPE~^Af__3Vl@}-qP@2*;2sSxMtEoPQq zYs1-$wB*q@dPX_;yP4(Sk(Y_=xW{?7G2ax2xYNn62IKezl`B5Buo8T2aYcCq3)VS_ z2|mxetNC`;i~d2h<| z1L0&p|I2sR_3;k8>*A623f?_wr#*T>B~WUWL3O6z&+%LSv3#@RlJ;qyHRj!$W|xB( zN%WHym4NyQ9$Hfg9(}nIY|8IzDf?2s?L21)2hy%J={F+IpH>IKr=B0mmvt^~WxsY|c^bETWshNJpW zo$@@vv!?nyiT?vrUORpeluB!QN~QiWrBdJegHP`$_({ZLzALWMD6RO+IG)Ko;$Mxr zZTricy>@2#IB>ms%#88_@SR08{a5sSWpQPZ-fcLue2wC4*IyQkE5reRJkK>V)&{E% z92jcH7t#KVy8@nOXuCIU{mHcfy&?D^&(3*~*uKBK5q)ne?R>4thi)5uo^}hZ1Mv;x z{>%rxJDI*_y$&v2R#^*-Y1_{p;)z-Cfk*5Fyhl_f>NJ@C(okN?Q~cX?FFL&S{xv}W zEy8*M*5Bamnd$?A*(yZ;*}=7!GXGstcPv-!+svtxk;n?+nIj;uKAVVKj4>H-SrGs?lGN^-$l0Z(cPHo;nGh{BdY^4mkch_3#He)3d}>zw>nrufYt`-Uf^x z0&5B|PXf01zW6tJ{!nG#y1%>$ZElsJPn55|eJW#CR`+Fi1pKhZlcHdf=jyHClkkUQ zqrSWEz7GCb-8AGnH+@u?ypIFV$T8NAe+YH9E_?Q&d~`VN--Z$Oo4l`~ZtsoyX5P_P zf_YX)5G(v8{mX6>bd}&2yt8G*7f2(%W#B~l|GM@^IHb8--!6QO3C11uTy*|QW9Sjp7Rc)X`oQHj?0=(Pqw3p^ zqu;wTwitIH@~r#a4T~OU)1K`2+ihDPm^AQF*-*m)ZOP**fh8%qAo4#;w8A1NQUC9Xpx)qI~4V-LvBGFZ5~6 zN8Eg(!oXaJejuDzN9Ak3Q$0{mskHb2d@pVuZsVXjPb;^bzkY8;d#JX_*nY9s+)ALi zyq%ZxdoBI!+wiIlUHDnU>YL&Z)ZZ{3#k){OaPrh#XC-N_BJKFB`J}}g3!fCP2JYq5 z=e;}&c-B-O{nooHh;uA)H%WtMzK1-#e@qbcjtVNJ(v)?j(xf$|QqR&-X|sM8#lYW9pmxw^n**Nr$3;l zcor0v@`QQ}{AF*QQ=Y-MKN9Cs;-1hmyS)8uDOB3zz-dcl%G0)-Rlc8gRntMK%}F2P zy7xM=meNp;2k%`Ie1W*HYgIAGYa5>L@vP)Q=NT{`t{k5!LhU6{s`YXJ3w<5~0 z`Kz;>I6s;&zf&peU<4Z8;5#mNRE)L1bNr^ ziwi#~Ou7djVE({*;?^1;lH$gF(|UQMPP*hc_$luzto?4!`1j$Ic#-h;g*Quw+^F*z z!(2SU{RHN87rF1#!WvVggD%R6w@A00maqFA+%Kga{oZ|_7QP-H5#@e|F!5E|gXS}? z({hLO#P<4z9p_fk!UMg^fX%>djLD%rN*d1QdsLej5BjV%Kb&gW02myvw&q_aF~5}T z<~rZL0PZt*78%^q{HQknEbVAN%YH#HPLAl;XFB~9S*vbMNoDcv3*f$j=cP2f^*yT1 zt1TcC4x_o&JzS?cck@B64}Qd$Xgi<20Pba;)h^tqu-)cOdlCPSikn4$VyAQ4Q`Wvv z#Xq(E*lk|zMRLELzxx~AlwGCa?>%WRZah2ewx=w80sNQqB=%ps&BwJD8xQ@4uMM+t zU_Cw&f2FhAQYAAGP@? z{t_48e*af%y;}0B{VmIH^razx(mGIF*`f0#jKOv5j0U#WI@Mn6`J4Hc#aF(@-N)Q1 zOBy$hX;0E~xZe~;2K^W^&{k3M+hLBrH7b45JKL`4*VIE&+_Y~oxKz-ih4V1l(OqdU ze7|e`%Q)K(&lgTyd~m+s$p6emPKk?`_q}uo#`Q+nG3147(t-2o27lR5(uV4EoF-mg zU;1d{Bv0gp6O|5JSJ8Ir)(q&&v3w{BM%uec=%tL4{wOWJ&v(hqrtXc8zFPfwnGc+# zxLVUN?tql>Ith;Z4IEdZBiz>DZTqyTFS_ybhS8yhHtZ^cw9MSBOgT-tse-T`YW31rt*8EKy_tFp4YY`A z>N%|V!Tn^D0ny9TY&$Koh;?t7Q{En35Jwcz#9P3rKS;a_0`QfIIX9I*C#A`-3U#GtG{o?b2|G@o|K(!L|MYJQI^=fDLW+S619$izU~?F_!3WB`KnEW zYPr9TFT2E=(>@gR2QEDW>EGg<_Ha1#5A|#jYdgz;aRE=;>VdWM_0R!+8vB6fz5=FGTAv(v!xyZ!W1U0*6zNTUdefw$8COJRUxEhoRLC=mF!L_F<_% zFusO;LUt^1PJ2{MJlW+)KON^3cT9EujI41ldsZ{eAsekF`0_`Q7wTj@tu-alNoCNU z#w=^IGoiPhB&WHz%PZhF(!ZS4X!+vObDqF@*osXxGwBhP#GD{TPZzTVC!>bKf#vz=^sqw==jf$NRz``a*2S=}@T&#P=eU;m8_ zKkhfOe~`or8m$|{AL8=2--Gk-_Z?`g4zPz_kGlN14L9w#c!AcXigt@5`g|HL)WMDK zou9uiAcuZCEsv=0K6`(&)>GcK8p?2q+orRG8CO3NRkpNulKW)uS+tAVkC}#x`A%6* z%2H+%hdJ<@C`Y0`Mtz-l?CBWXQ*{RVB-!BG>$64If%MMWJIwhV!Ewk@+DnBj5-V$) z@@s4a*G%%kM;DgYL!P)@bd>yhP_=xrbK$I>KzpYjW1oH#NSwR6w4}@#BH`Y~tH4pV zQpcm?n+WdMfQI$MJnCNdzop8F$QGYWJHIG5qHRj3`cd2AETmIR8;|lqZxfycZ9=mZ z+3F0O*f|r`bWSUfXlEXj@q#GY?>xJ_DSYz9x3W)N`=Dgo^lfYfbT-Pp2(~&$i?ki@ zgrjVH?gwYdV&7q{MY;p&%lBmeDe}p3)(W?D>wt0cG{Z0BeAY~Y-Kd}UF{jnpTRKxq zYf-8noh`;+)1Bmh&3|-;k@N!Er=vZ1+S|JaxFP?i%Ey%B47>cC%}|0rJ|0)@tnaDA zaBgCs|4~$hXs?BI2Re@}D?V}Ym}AfQ#KIw68a8bE#>LI^Ui1B;-DR}nJh;TAc|H0> z(*}@}FN}+q=Y2EeKkf05%#{b9s5F%MyQciKhex8~wwNO!R-+s}Ys%E!H_$ZrZ3bUIjyIW{+BS?{>OIcmd^K&69YRU{o3OF0RkJ z?cGh!94l5naL?PY(+D|d@<;V~o!=PM-t97&-%)$K)RM5Rifl6`oqY8N zSW2DCD;KEzzU@D%&x^muwRanL^E+y-OmdU?p5{mOhdjK1@~i#NNXyUu?)Le#_HL&& zzjd~$>!hDD-?R8p{Xw{7No(Rz_Ib#`nfF-OeLjxA8`w#H#Cm>kJ4(8wG;!awC&?Zk ze0Tw6e~2;gx;WVOd%Ms3ws#wjeqYL5)^*aOxbd=v?f&4y3nc#_|DBbVkKO0qfB*ZKFZbNA6^ffE(S^as(&mA%~f z)Y&oP=ak11FV@ae@_3Rw#=)e_Ppa&4@PPB<;x*&F)(u@J-E=eZii1g+rwx{# zvm5)%`^3d-#(QM0VYV{h(9-hLrVlpd=Y9(Hfx>ivS?WxCh>eptr1jP;>57O$S)XP- z1Z(Ia#~AmSB4B5Qq4gQ#^6X>Inom?b%KC3ZB_I67-iVE%LGHP5R6a@XZektXIISNg z#Vzt1Wn9X>!Oh+BD~vplDhm~bi`MCl)A?CN!A*lh8PAIAAWjQ+N@kwQN zzp>BygQPEHUCiKN`#RUuxc#XM`&-e!ndcnmmM=?~aq=5Q<6__;e+H%oTw87vrwAW@ zKkV%KEM-@mchXo;oPoKYjzkzIhKCVvC*N+Cy4N>?v`c8N1 z$*!nTI8o`r`8Vu6E9AUpY<{#yxA1nLJwgxXyAL9<&M5oOlg?9(qjl2zcgzcI!Nm^> z^e)Raav;X`}MU^iLoFkDF8COrF-gD0vbpDg>Me?P!iBH}Ok!k<=o%6~~ zYwu}wfgH23=8fRuJ$KgrHOT>{JXwA6dXYSP(O+(whF`0`b63*F(xEg{j|A+;$m2Bb zSm>B?yY-8WONskT_J*$KgYUyhy7dh7uBbkNbs;eKMMvyr*YRQ6#aOMeP=>SMnb%RC zJK90HRoXfo*vvo(EUDrOpWtX74 zL$W$?3V2NJ{B({V_ruHw%!NEV6ETOheH!Rh0DJV)@fO|R!kmZnFiF4W&A^4!joSb=;GoowoT z#sl5WuWEl^9=6RL754Yv%vpH5k+$jmtdla}jKK{#gXUcHqTyXgI`<~8(|Evoa3ZaAwvDe# zvt88vI4S%-G0UG-_eG#5UW?uERL(lwxRYqqEL^Z*pTL~C?hYgMqdYV+6`V94Xk5-g z{t$HB-me_|-k=)#(l6+)R-3=T$7Zs&|1J*CZC2H{748YoSH{rJFJvwjsjrdkyzU{* z>(s-qVa?s%ldL&>Bj@(%-dq=+?k38~@57?0Epmoo9qmm!kUoj_`hE7M{9Rj#RdD9q z<^E>$ruUn2#`(IV+nGCgHwWEKtb1+0k8GfR)~J&`zxGLK?%Cf!`!smyo~^j@oA>B7 zALUN^-3ul|%fZcnmtlW@G;5!kZB84J1xy`xs;@Dhb%a#mLx8JF)ASYjZ#-jXZowmHwlOeVu8?h#m zdakftR{OVPHr=m1Qk>Qk;>LWt+;P8IQ}`WcKXE_TmxS{dE%QTypTg{E2Y@EP;n^1ET?h)=^u z#&sIqh0nx+^0wetH?Mc`_YG^^t`WUJRvI-cp5`Aq7s$8VN%65k>ECy5rKL8}Y3+@( z^xptph0@;CfnUv>IYft0q()z*1Xoc z3zh`yp&R{KBl!EI)qMyur051$^qDtF^@L90X7*dP)GqlM^m^zerX=CjjBh$0z0;l6 zA#^uIGs+(a6B+5^sY_d@CqyrKgs)yN4)?6@wLbKUDz^)q?2WRPtB80SYp{Vop%tS5 z>k>Pmn|`qfytBg4I^HkPop+1Vx*_{VTG|G%rC7=O@gB`=14lhq*#JG%Jz43NH=gJ% z9;$VA?x74Gi#a2>liR~Q^w+(de}jEf0KW{FA2+={FeiBQu;t65+DUlirKOL9fG2znk3P-_6XAGmLI5c~&r3g^-`bYA8=xf_-> z(p>uu>^e2SS$FyVpH~(y3t$g_Ao{phOg>3Iyh!6wFp2)Feeh>PU)g4ezRy74h+~31 zYI0;omED8v3%El&(D@lUN9>pc>Vl;DBisk(tE!lGTz%s_o|pT0$K?B6^=;i>+ZK}usKHGew+5dYkr}5 z#(B&)evDi>9r;r85bc3@wQ+Pt~a8i zMjybMLZaQa^qJC2NIxMxh4dBDTink4RCb_2`_}cCqrMI zQJ}q#oLyR*`x_mN>!YuEkK51V!mKGbysj@Dp!AxYwH)d>rSFv9vkx7D^q{Zm0EcL< z{wsTT-G%&9=&Q3+b$0xFsXNwm0_odadisX3)A@ZIz3unhy}2!V-nG8)ed24qQf1P* zh}KF!L0Pp{axLxSPqYu+%OoAsNO985h`x71-|L|71y%aWPDJ;wp<8Xby^z-HS-aiE zrghYB)(_6|p=Gn;YJ5@q&^=w!J9eAXy{7*{yAJtt3+S7L4&04&Q54P1JJxE}qb)w0 z1y(ELXiwM=SRd>br*)84toQoT0G_+x6ARDrjqJN_W!pI@=8oi6 z)m2hHth;}}^mo^%jxS3}+wO1AcZvO9Gw(fVlm^Iw*SU08`1pmD(eQ_XM&UOrz2*|# zG6G;H)v&zYta`+DZ|RZP?YnJ2_8ra2vk17d59$`DdAjzl6;bYHz~DT@!(93!8#e7u zs7A}6{?u)YP_k)jwA{@~kACM;m;T88_cbfOM&N1=xTp)peU~>$|JiCg@T~RB+~ld7 ztaHn`XJ!ld)yrAaw<@0OfW=F@)#>b@?OMDSBnxe1BY5zhW_m7xTV$kC*_Cz za-ehEMvCi1SpXT}Zqc7QF7Z3}U3W=z%=1lSzSglvnv*QRp4pD!1Jv`1ud;6#`;M`! z$CdNYsu^jfjes#fuI+Y`ETA>meK?mBUOS-~bj$;@h%sOLPh|h1b0oEwrcw6@>v)>W z>n{60%c8nL*GaMfXDS?y9C%_LS{0q9(RsecSlO6p{4ls_-SCPA^oD<69nZGCP@j>l zg3nztZgY{X2lMS3jt19u_?+Ky8hXFpcI0j6+2}l9wr|>JYr{0ZS?>sz=9DFOk2$AV zHg!JtNx5yHQ)B_;{)>68GIiB1zmYLtcde)CSZ?&V`_0fw`;e3BLpD1)6FRSk;#Tk$ ze@e=u+27Cu-#|Hj)9ieb;7hlkrw+yMH6~}#t>n=oWxc;%``2~l(VR~zC2k=fLZyyi=%jjuRATn zJq>O?3Tr&@ogcJIFF>1J$r!LOsvOOH=R42$<@YY`;?KVBSnQ5nI9bDa#)Edq0`UG< zcv^avm+zRLMZQm?i_YTmH6hU1Q)zIMzWa^`?N}o~w^42-{e8xKBj4++sHA$%@=fzB z-!r8ex&PP3$!C7hYFR+^ZzccFI_4+obL_hH`K@znvO2Xr*_->oPm1fKFKVSMApXz% zxh4BOvX1$Z@6+@-Np&6fO?&IIx+RDU()Gr{%JZJO&OAS8l`J6n54@T_|I0Gw8-AZf zpOdHleeMC&y$yNt$dV?@co6CZ*jJqeUL$cBj|ZUtU5&s_&l+}Hi^#V72Gs8*af%-|a_7QMy$VHs#d_vJ>OB(Zw(C6gA zSNAVwbvm;+Pach=Ntz!tOBSH-e-8VvgBrm*Ds9x5-)esE;w4!sD+hRYj4g=^vl-#I z`GMA>i=G=%C-1}l^L5O1*A-QksCmBlz0MIUDvvyHkaaSjDHCV+lPBLiY2wC%B4q(+ zUcvq|yhji{;M_cTx@n^3`K^-gU0kBVYKLh~qXc7OTidE|H{*eg@1QJDOh00bUdH{Z z>uV1HbAX$o>dWUH`^xL=_6@&pw~dos2Hne&=CpRJve@a``P-cz%wr*|hWeJ?`Y6o zB2V7FX+DEZSDMo~bG~p}9badHicj5#Jd-DH#{S2CcNw)BuRQrluGaladDf}X`_{&O!vi>9>uq`P=%zFW(^k{mr&uTGrZVNhl`(tR zoe&>dP+1>6Kz|;1-I7M<3Zyyi%^K14XKuUbkolD{rr+B>bAs=73oY~D$zHcWa#NDq zFQ-hE2cLGNVIFa_G<{bT=j;MA%-HD;!rA=>J@yIWOulMiP-#ohyed^8GKuIct* z2A6jDe@ob>r8^0_MV8G|ce3|7;oa~PC$kW|YcExLNQ2z>>nK`By+aqU7kseX4dwF1@rwz2!F9*6FT8GuueE;UzR6Lvj(XQSE6|$o z&D~HoUmTB1*b9EX=bmrhyxSEY-TvKYEGc{41IwD=1ht!X;oPiz51ALw|38~^&v&zM zEefveyrTMf(z~;lj!Yh)`y4mf;{+jN}BgYoCo=(7Vr5kx-O9Sm!PlNxlm%q07IX6DC5j4MVFyf@b-?_3og6* zR^?xGKGO5BC+veUJ>*mW?T&kswHJJ52k!Y!svl_oBHidPn6ce$*{v!Pl+5;Q!U(d%jKk6VGSwZ%6few;k+1x4a2* zNypm`o?`6Qp`9LDpVyoekTGbeCR_J36NvpVNM?Xqx7MCtWf2LmjtXzbk2T5F@zL^8DmH1`vX#m4AM z%on=uOe*C0XTbeyd$ND7C6zUTGdY@b$&eBD)48RrfpzQ|mEbl2j+fEbC%$KXdu*~s za5D&t_Cd}mWqf!W>r3ar7w&|=wrx)m`kI%es{@yB&^`}@=80#kjda?yqkIQ*c0F?A zyXbdkGtS-w-<^xBRrq{TFzMg(C7+U4FY6kI9XL?lq8(*^HP7T4VEuVZc<_O&Kc2uC zd=~Sq%Xw}TzrcSCp79LrWC7vDgcs{K@1EuN<2-ls{@3_dl6DGusuO$q%M&KfE00ai zwL8C>HSl_WU8yw5e$!hjjk3aPRMwuM7kvt^KNME5RH}u6CO65vSUMOUW5Rud;TnL! zU=2Vuc@03AyW;c=0_ZpKs{s2gaRpa#C0K@EI0gDSQHvYF!d*T9v+ z4Eu({VTQd!;jqqzf?`SF7EI`=bC)J@7B4nWxB4nWxBIJhqZFnHqXNN)14fopL zLD&u3pH+bR@RU0ADUcJMWYw-x4hz>6j{>^ky5dpbv~Yhteq(&Yef8Ob9ufIP3F}~rn_UC?g+p`-^>mN>ka{Jd5w?8`J;r+SSt^oRbpB;|i5B>Ic z_(@#>VTf+Hu7Ewm`B`0oT>eM6t^fpWh7|Hs3*nI8S_p>x*g`1e*A_xOf@jtEB!w-6 zrYJm=VVIp&Lt%E-01##u1hou$!sJ64Od1T=N>mM+DzAd8Rbhy&;#4u5Wa3u=)PjQm ZYRRh@^bCDh5vs@!z69bV>$COq{{Z);QUw42 diff --git a/docs/images/excalidraw-example.svg b/docs/images/excalidraw-example.svg new file mode 100644 index 0000000000..9f8b3fd963 --- /dev/null +++ b/docs/images/excalidraw-example.svg @@ -0,0 +1,16 @@ + + + eyJ2ZXJzaW9uIjoiMSIsImVuY29kaW5nIjoiYnN0cmluZyIsImNvbXByZXNzZWQiOnRydWUsImVuY29kZWQiOiJ4nO1aXXObOFx1MDAxNH3Pr8i4r3WKvqW+pU13J1snTep02mans0OA2DRcdTAwMThcXMD56vS/r8AxQmBcdTAwMDduN9ntdOuHTJDOQVfiXHUwMDFl6d5cdTAwMGJft7a3XHUwMDA3+c08XHUwMDE4PN9cdTAwMWVcdTAwMDTXnlx1MDAxYoV+6l5ccp5cdTAwMTbtl0GahUmsu3B5nSWL1CuR0zyfZ8+fPTOMXHUwMDFkL5ktWUFcdTAwMTTMgjjPNO5Pfb29/bX8q3tCv+BcdTAwMGX5x1x1MDAwZvH49q+Tj1x1MDAxZc3PxrtMJKODklqCVsb4oTtLYt90XFzrVqZkdX1T2CVZdX1cdTAwMTX6+VS3oVrbNFxiJ9O80ejGk6hcdTAwMTjCqVqyPE0ugpdJlKTF0E9cXI5cdTAwMDJaXHUwMDFi+sz1LiZpsoj9XG6Tp26czd1Uz9TgzsMoXHUwMDFh5zfRco1cXG+6SINBY5T3d2bSRnvFy1x1MDAxMr2ihqWHnUzjICvWXHUwMDEzVa3J3PXCvFhcdTAwMDLkmHlcdTAwMTQ2zvf9cuk/Ne8/ddP53X1cdTAwMDZZcVGzLFxiikeDXHUwMDExxYJcdTAwMTOH46rHuFx1MDAwMKKi2XqYxKU7KFx1MDAwN1x0hVx1MDAxODW0MNvTbpCXdz13oywwS1ms4qumi9TdpOYqt55/hN5cdTAwMWSeXHUwMDFlvp1E4Zfsy3BcdTAwMTJkf1RmW+7ipmlyNah6vj1df99cdTAwMTU8XHUwMDBmrnP7RstcdTAwMTE/777YRePI3TuavkHZdLR7dlx1MDAxNo5qt737zyzuYu67y3lcIk6VZFx1MDAwMisplHm4UVx1MDAxOF/oznhcdTAwMTFFpi3xLszSbNXsbUjl/vk35m5cdEVIWyhEiLZQhNNcdTAwMTZcblx1MDAwNsgk4EI4zk8sk3IuLZkoppR299pDNipRfJNIXHUwMDE4xophQfl3iMTynpbHXHUwMDExzjnBXHUwMDAwj6ua50nYVKH5b9s8+fKi+v/T07XomjtcdTAwMTU/bPAt0URulr9MZrMw11x1MDAxMzkqjGhcdTAwMWGc5W6av1xiYz+MJ7rPaHh1vOz3OEtcbldLvEUxwaGzozcp6XCHXCJCXHUwMDE4ZVx1MDAwNNVQXHUwMDEzd15oZEdK5UitYM4wv+utNpJBXHUwMDEw+8agtrG7hVxip4HrN3s1r95X36nWq/70lry/TEen/sn48PgwOj99M75cdTAwMTm3VX+eandcXJ3VZU8hfCWNg5XCp6QtfLXuhDS4buVcdTAwMTNOzz3x/1M+J4pyKtYpXHUwMDFmUdRsXUlcdTAwMWZhobh2KkN7KOnr+EdcIvWfS5+A0FxmhEZAUzjMXHUwMDE2y3TUXHUwMDA1N5FR8cOdcFWHky648aviRzvhXHUwMDAyXHUwMDA2VyC4tNaddcJcdTAwMTFcZo5hcFx1MDAwMoMzXHUwMDE4XFzA4FxuXHUwMDA0V7CpKthTRVx1MDAwZVx1MDAwNvlcdTAwMThyXHUwMDE0yONcdTAwMTG2/GDY6fO1PKEk8E5cdTAwMDKz3H4oulx0XHUwMDFjSpBcdTAwMTZBdlx1MDAxMrhcdTAwMDMlIChcdTAwMDFDXHRcdTAwMDRKoFCCXHUwMDAwLqu9vfUg2Fx1MDAxMWJcdTAwMGbXXHUwMDEwXGJKsJe1U23I3nOH3XpcdTAwMTPW3jLsXHUwMDE2nLC9tVtAQlx1MDAwMlx09lY97Na0vVn3IVAogUNcdFx1MDAxMkhQ0Enb+3CPZVVcdTAwMDRKoFBcdTAwMDL7Z4Ru59tM2GpcdTAwMTBcdTAwMDfzVMe+Olx1MDAxZW9cdTAwMDbH4WxcdTAwMTHpqPPorlv35unCXHUwMDA0kmtzOWODZUE1/nL09enPwWR0ffx6fFx1MDAxMtxeXHUwMDA3Izo9il4t3FH/9MekMcu6h0mHVulcdTAwMGZ2TFuV/pimX9nPhuxHn4pcdTAwMThcdTAwMTNH4laNo1x1MDAxY2dj5UN3OUinP/LhS1x1MDAxZj9I/oNB6MfMllx1MDAwNFxijVx1MDAxMFxmXHUwMDBl3KMxcMfFXG6GJ8CDklx1MDAwMe/fOMR6xOJcdTAwMTJIQNBcdTAwMTGQgFx1MDAxMqAmNeL9XHUwMDFlXHUwMDA0XHUwMDA2JFx1MDAxMOghRlx1MDAxMey5IVxuPVcp0DNcdTAwMTDDMCkgxqBcdTAwMDRcdTAwMDUkNFx1MDAxMoRcdTAwMWVcdTAwMDSoSVx1MDAxY1x1MDAxYZhcbmjuKKAxmoCGXFxcdTAwMDJcdTAwMWGYXG5gvixhJaJGaN1cdTAwMDMvYHi1qSrfK4jtgZcgPHZg64NcdTAwMWRcdTAwMDbE81x1MDAwN8JvNXiPXHUwMDE0IFtcdTAwMDashr8vPo7Op3uvboLLsz3+ct+7unWPs7dO3/hYh4P2+1x1MDAwMWrOrNqLQfM2qVxukFxy7FeAvCFAJpRiSSVd+/pcdTAwMWPLZusqPuY6NFx1MDAxNsRcdTAwMWPUP1t0XGZcdTAwMGIyoadcdTAwMTi49lx1MDAwMYwxgaFcdTAwMDRcdTAwMDPWkzhcdTAwMTRv299Z4GpcdTAwMWOQnVx1MDAxNTT7+Oqu6TXKMN14XHUwMDA2w9fk2ZdcdTAwMDBcdTAwMWVcdTAwMDFaxrTTpj5cdTAwMDRohVx1MDAxYkFr6JjA3KK2XHUwMDFk9SRcdTAwMTBcZnNURKCVVWI/h1x1MDAxZXlcdTAwMDG0QNd4NdEjXGJcdTAwMDcmyLb6e+Ct4KOHPVx1MDAxMoa332P0wGMgXHUwMDFl9vq0kXL0wFx1MDAwM9eHXHUwMDAz10dcdTAwMDDXx35cdTAwMWbRXHUwMDAzXHUwMDBmq1xmbcZvNXiPVa2tXHUwMDFisFx1MDAxYf6+YHT993KtYNT63K5cZkS50yjUMrqz5ltOseZLXHUwMDE1XG6p1TpSnLOzx1xyRVGj/Vx1MDAwN/iUXHUwMDEzXHUwMDExrVx1MDAwNeJI0oo6y1XdXHUwMDE0imJGueSYXHUwMDE5O1x1MDAxZewjNcLrj1x1MDAxY1x1MDAxMIuWvqMnejJccrPa80nifFx1MDAxY95cdTAwMTZGm08+ytbf3FlcdTAwMTjdWEtcXNxiN1xuJ8XkXHUwMDA3nrY3SFx1MDAwN/UlyEPPjSrALPT9KKi7TFx1MDAxNmh7y6FMaO/poVxc3Zr2+Vx1MDAxMm2QpOEkjN3opD6Ze3SVvEv916NcdTAwMDN68Plm/Lsz+bB/SI8u+uhcbjnmXHUwMDE4XSZ4mLZkpdbkd79U1a0qKZlcdTAwMTJS4XVcdJ6p17Xef1BBXHUwMDE4XHUwMDE2Qj18hkcocWpq/i5Vufm/oqo8mcMkZc2irVx1MDAxZm32Uj9bd8fiwJ3Px7len8Hqe9HBZVx1MDAxOFxcvWi75lx1MDAxM8dxXG5yKb3CJ8tT9Ou3rW9/XHUwMDAzNtdm0SJ9 + + + + ThisThat \ No newline at end of file diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000000..730b3fdc1a --- /dev/null +++ b/docs/index.md @@ -0,0 +1,56 @@ +--- +html_theme.sidebar_secondary.remove: true +--- + +```{include} ../README.md +:end-before: + +::::{grid} 2 +:gutter: 4 + +:::{grid-item-card} {material-regular}`directions_walk;2em` +```{toctree} +:maxdepth: 2 +tutorials +``` ++++ +Tutorials for installation and typical usage. New users start here. +::: + +:::{grid-item-card} {material-regular}`directions;2em` +```{toctree} +:maxdepth: 2 +how-to +``` ++++ +Practical step-by-step guides for the more experienced user. +::: + +:::{grid-item-card} {material-regular}`info;2em` +```{toctree} +:maxdepth: 2 +explanations +``` ++++ +Explanations of how it works and why it works that way. +::: + +:::{grid-item-card} {material-regular}`menu_book;2em` +```{toctree} +:maxdepth: 2 +reference +``` ++++ +Technical reference material including APIs and release notes. +::: + +:::: diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 3cfd8009ee..0000000000 --- a/docs/index.rst +++ /dev/null @@ -1,29 +0,0 @@ -:html_theme.sidebar_secondary.remove: - -.. include:: ../README.rst - :end-before: when included in index.rst - -How the documentation is structured ------------------------------------ - -The documentation is split into 2 sections: - -.. grid:: 2 - - .. grid-item-card:: :material-regular:`person;4em` - :link: user/index - :link-type: doc - - The User Guide contains documentation on how to install and use dodal. - - .. grid-item-card:: :material-regular:`code;4em` - :link: developer/index - :link-type: doc - - The Developer Guide contains documentation on how to develop and contribute changes back to dodal. - -.. toctree:: - :hidden: - - user/index - developer/index diff --git a/docs/reference.md b/docs/reference.md new file mode 100644 index 0000000000..7362151f5c --- /dev/null +++ b/docs/reference.md @@ -0,0 +1,12 @@ +# Reference + +Technical reference material including APIs and release notes. + +```{toctree} +:maxdepth: 1 +:glob: + +reference/* +genindex +Release Notes +``` diff --git a/docs/user/reference/api.rst b/docs/reference/api.md similarity index 89% rename from docs/user/reference/api.rst rename to docs/reference/api.md index eccd886421..0c92e9e065 100644 --- a/docs/user/reference/api.rst +++ b/docs/reference/api.md @@ -1,6 +1,6 @@ -API -=== +# API +```{eval-rst} .. autosummary:: :recursive: :toctree: generated @@ -10,17 +10,17 @@ API dodal.devices dodal.plans - -:ref: `modindex` - .. automodule:: dodal ``dodal`` ----------------------------------- +``` This is the internal API reference for dodal +```{eval-rst} .. data:: dodal.__version__ :type: str Version number as calculated by https://github.com/pypa/setuptools_scm +``` diff --git a/docs/developer/reference/device-standards.rst b/docs/reference/device-standards.rst similarity index 100% rename from docs/developer/reference/device-standards.rst rename to docs/reference/device-standards.rst diff --git a/docs/developer/reference/standards.rst b/docs/reference/standards.rst similarity index 80% rename from docs/developer/reference/standards.rst rename to docs/reference/standards.rst index 3dcf073080..5afa997e54 100644 --- a/docs/developer/reference/standards.rst +++ b/docs/reference/standards.rst @@ -1,5 +1,5 @@ -General Coding Standards -======================== +Standards +========= This document defines the code and documentation standards used in this repository. @@ -9,14 +9,19 @@ Code Standards The code in this repository conforms to standards set by the following tools: -- ruff_ for style checks and code formatting -- mypy_ for static type checking +- ruff_ for code formatting +- flake8_ for style checks +- isort_ for import ordering +- pyright_ for static type checking .. seealso:: How-to guides `../how-to/lint` and `../how-to/static-analysis` -.. _documentation_standards: +.. _ruff: https://docs.astral.sh/ruff/ +.. _flake8: https://flake8.pycqa.org/en/latest/ +.. _isort: https://pycqa.github.io/isort/ +.. _pyright: https://github.com/microsoft/pyright Supported Python Versions ------------------------- @@ -26,6 +31,7 @@ https://numpy.org/neps/nep-0029-deprecation_policy.html. Currently supported versions are: 3.10, 3.11. +.. _documentation_standards: Documentation Standards ----------------------- @@ -68,4 +74,4 @@ Docs follow the underlining convention:: .. seealso:: - How-to guide `../how-to/build-docs` + How-to guide `../how-to/build-docs` \ No newline at end of file diff --git a/docs/tutorials.md b/docs/tutorials.md new file mode 100644 index 0000000000..1fe66c541d --- /dev/null +++ b/docs/tutorials.md @@ -0,0 +1,10 @@ +# Tutorials + +Tutorials for installation and typical usage. New users start here. + +```{toctree} +:maxdepth: 1 +:glob: + +tutorials/* +``` diff --git a/docs/user/tutorials/get_started.rst b/docs/tutorials/get_started.rst similarity index 100% rename from docs/user/tutorials/get_started.rst rename to docs/tutorials/get_started.rst diff --git a/docs/tutorials/installation.md b/docs/tutorials/installation.md new file mode 100644 index 0000000000..2b33095142 --- /dev/null +++ b/docs/tutorials/installation.md @@ -0,0 +1,42 @@ +# Installation + +## Check your version of python + +You will need python 3.10 or later. You can check your version of python by +typing into a terminal: + +``` +$ python3 --version +``` + +## Create a virtual environment + +It is recommended that you install into a “virtual environment” so this +installation will not interfere with any existing Python software: + +``` +$ python3 -m venv /path/to/venv +$ source /path/to/venv/bin/activate +``` + +## Installing the library + +You can now use `pip` to install the library and its dependencies: + +``` +$ python3 -m pip install dls-dodal +``` + +If you require a feature that is not currently released you can also install +from github: + +``` +$ python3 -m pip install git+https://github.com/DiamondLightSource/dodal.git +``` + +The library should now be installed and the commandline interface on your path. +You can check the version that has been installed by typing: + +``` +$ dodal --version +``` diff --git a/docs/user/explanations/docs-structure.rst b/docs/user/explanations/docs-structure.rst deleted file mode 100644 index f25a09baad..0000000000 --- a/docs/user/explanations/docs-structure.rst +++ /dev/null @@ -1,18 +0,0 @@ -About the documentation ------------------------ - - :material-regular:`format_quote;2em` - - The Grand Unified Theory of Documentation - - -- David Laing - -There is a secret that needs to be understood in order to write good software -documentation: there isn't one thing called *documentation*, there are four. - -They are: *tutorials*, *how-to guides*, *technical reference* and *explanation*. -They represent four different purposes or functions, and require four different -approaches to their creation. Understanding the implications of this will help -improve most documentation - often immensely. - -`More information on this topic. `_ diff --git a/docs/user/index.rst b/docs/user/index.rst deleted file mode 100644 index d5367e7707..0000000000 --- a/docs/user/index.rst +++ /dev/null @@ -1,56 +0,0 @@ -User Guide -========== - -Documentation is split into four categories, also accessible from links in the -side-bar. - -.. grid:: 2 - :gutter: 4 - - .. grid-item-card:: :material-regular:`directions_walk;3em` - - .. toctree:: - :caption: Tutorials - :maxdepth: 1 - - tutorials/installation - tutorials/get_started - - +++ - - Tutorials for installation and typical usage. New users start here. - - .. grid-item-card:: :material-regular:`directions;3em` - - .. toctree:: - :caption: How-to Guides - :maxdepth: 1 - - +++ - - Practical step-by-step guides for the more experienced user. - - .. grid-item-card:: :material-regular:`info;3em` - - .. toctree:: - :caption: Explanations - :maxdepth: 1 - - explanations/docs-structure - - +++ - - Explanations of how the library works and why it works that way. - - .. grid-item-card:: :material-regular:`menu_book;3em` - - .. toctree:: - :caption: Reference - :maxdepth: 1 - - reference/api - ../genindex - - +++ - - Technical reference material including APIs and release notes. diff --git a/docs/user/tutorials/installation.rst b/docs/user/tutorials/installation.rst deleted file mode 100644 index 0cf4d4cbdd..0000000000 --- a/docs/user/tutorials/installation.rst +++ /dev/null @@ -1,38 +0,0 @@ -Installation -============ - -Check your version of python ----------------------------- - -You will need python 3.8 or later. You can check your version of python by -typing into a terminal:: - - $ python3 --version - - -Create a virtual environment ----------------------------- - -It is recommended that you install into a “virtual environment” so this -installation will not interfere with any existing Python software:: - - $ python3 -m venv /path/to/venv - $ source /path/to/venv/bin/activate - - -Installing the library ----------------------- - -You can now use ``pip`` to install the library and its dependencies:: - - $ python3 -m pip install dls-dodal - -If you require a feature that is not currently released you can also install -from github:: - - $ python3 -m pip install git+https://github.com/DiamondLightSource/dodal.git - -The library should now be installed and the commandline interface on your path. -You can check the version that has been installed by typing:: - - $ dodal --version diff --git a/pyproject.toml b/pyproject.toml index 3b3db5beeb..fed5b2eed8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=64", "setuptools_scm[toml]>=6.2", "wheel"] +requires = ["setuptools>=64", "setuptools_scm[toml]>=6.2"] build-backend = "setuptools.build_meta" [project] @@ -33,17 +33,24 @@ dependencies = [ dynamic = ["version"] license.file = "LICENSE" -readme = "README.rst" +readme = "README.md" requires-python = ">=3.10" [project.optional-dependencies] dev = [ + "black", + "diff-cover", "mypy", "mockito", + # Commented out due to dependency version conflict with pydantic 1.x + # "copier", + "myst-parser", "pipdeptree", "pre-commit", "psutil", "pydata-sphinx-theme>=0.12", + "pyright", + "pyright_diff_quality_plugin @ git+https://github.com/DiamondLightSource/pyright_diff_quality_plugin.git", "pytest", "pytest-asyncio", "pytest-cov", @@ -80,20 +87,32 @@ dodal = ["*.txt"] [tool.setuptools_scm] write_to = "src/dodal/_version.py" -[tool.mypy] -plugins = ["pydantic.mypy"] -ignore_missing_imports = true # Ignore missing stubs in imported modules +[tool.pyright] +reportMissingImports = false # Ignore missing stubs in imported modules [tool.pytest.ini_options] # Run pytest with all our checkers, and don't spam us with massive tracebacks on error asyncio_mode = "auto" markers = [ "s03: marks tests as requiring the s03 simulator running (deselect with '-m \"not s03\"')", + "skip_in_pycharm: marks test as not working in pycharm testrunner" ] addopts = """ --cov=dodal --cov-report term --cov-report xml:cov.xml --tb=native -vv --doctest-modules --doctest-glob="*.rst" """ +# https://iscinumpy.gitlab.io/post/bound-version-constraints/#watch-for-warnings +filterwarnings = [ + "error", + # Ignore deprecation warning from ophyd_async + "ignore:dep_util is Deprecated. Use functions from setuptools instead.:DeprecationWarning", + # Ignore deprecation warning from zocalo + "ignore:.*pkg_resources.*:DeprecationWarning", + # Ignore Pydantic v2 warnings about deprecated @root-validators and config + "ignore::pydantic.warnings.PydanticDeprecatedSince20", + "ignore:Valid config keys have changed in V2.*:UserWarning" + +] # Doctest python code in docs, python code in src docstrings, test functions in tests testpaths = "docs src tests" @@ -116,24 +135,24 @@ legacy_tox_ini = """ [tox] skipsdist=True -[testenv:{pre-commit,mypy,pytest,docs}] +[testenv:{pre-commit,type-checking,tests,docs,diff-quality}] # Don't create a virtualenv for the command, requires tox-direct plugin direct = True passenv = * allowlist_externals = pytest pre-commit - mypy + pyright sphinx-build sphinx-autobuild commands = - pytest: pytest -m 'not s03' {posargs} - mypy: mypy src tests -v --ignore-missing-imports --show-traceback --no-strict-optional --check-untyped-defs {posargs} - pre-commit: pre-commit run --all-files {posargs} + tests: pytest -m 'not s03' --cov=dodal --cov-report term --cov-report xml:cov.xml {posargs} + type-checking: pyright src tests {posargs} + pre-commit: pre-commit run --all-files --show-diff-on-failure {posargs} docs: sphinx-{posargs:build -E} -T docs build/html + diff-quality: diff-quality --violations=pyright --fail-under=100 """ - [tool.ruff] src = ["src", "tests"] line-length = 88 @@ -142,9 +161,15 @@ lint.extend-ignore = [ "F811", # support typing.overload decorator ] lint.select = [ + "B", # flake8-bugbear - https://docs.astral.sh/ruff/rules/#flake8-bugbear-b "C4", # flake8-comprehensions - https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4 "E", # pycodestyle errors - https://beta.ruff.rs/docs/rules/#error-e "F", # pyflakes rules - https://beta.ruff.rs/docs/rules/#pyflakes-f + "I", # isort - https://docs.astral.sh/ruff/rules/#isort-i "W", # pycodestyle warnings - https://beta.ruff.rs/docs/rules/#warning-w + "UP", # pyupgrade - https://docs.astral.sh/ruff/rules/#pyupgrade-up "I001", # isort ] + +[tool.ruff.lint.flake8-bugbear] +extend-immutable-calls = ["dodal.common.coordination.inject"] diff --git a/src/dodal/__init__.py b/src/dodal/__init__.py index ee133d851c..26d23badb6 100644 --- a/src/dodal/__init__.py +++ b/src/dodal/__init__.py @@ -1,6 +1,3 @@ -from importlib.metadata import version - -__version__ = version("dls-dodal") -del version +from ._version import __version__ __all__ = ["__version__"] diff --git a/src/dodal/beamlines/__init__.py b/src/dodal/beamlines/__init__.py index 8e0a346422..9d7d2b63b6 100644 --- a/src/dodal/beamlines/__init__.py +++ b/src/dodal/beamlines/__init__.py @@ -1,7 +1,7 @@ import importlib.util +from collections.abc import Iterable, Mapping from functools import lru_cache from pathlib import Path -from typing import Iterable, Mapping # Where beamline names (per the ${BEAMLINE} environment variable don't always # match up, we have to map between them bidirectionally). The most common use case is diff --git a/src/dodal/beamlines/i22.py b/src/dodal/beamlines/i22.py index c2040ee5a8..93e52034a6 100644 --- a/src/dodal/beamlines/i22.py +++ b/src/dodal/beamlines/i22.py @@ -234,7 +234,7 @@ def slits_3( ) -@skip_device +@skip_device() def slits_4( wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False, @@ -300,7 +300,7 @@ def panda1( ) -@skip_device +@skip_device() def panda2( wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False, @@ -315,7 +315,7 @@ def panda2( ) -@skip_device +@skip_device() def panda3( wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False, @@ -330,7 +330,7 @@ def panda3( ) -@skip_device +@skip_device() def panda4( wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False, @@ -366,7 +366,7 @@ def oav( ) -@skip_device +@skip_device() def linkam( wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False ) -> Linkam3: diff --git a/src/dodal/beamlines/p38.py b/src/dodal/beamlines/p38.py index 17ad9c9dd0..9c011a9495 100644 --- a/src/dodal/beamlines/p38.py +++ b/src/dodal/beamlines/p38.py @@ -55,7 +55,7 @@ def d3( # Disconnected -@skip_device +@skip_device() def d11( wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False ) -> AravisDetector: @@ -262,7 +262,7 @@ def undulator( # Must find which PandA IOC(s) are compatible # Must document what PandAs are physically connected to # See: https://github.com/bluesky/ophyd-async/issues/284 -@skip_device +@skip_device() def panda1( wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False, @@ -277,7 +277,7 @@ def panda1( ) -@skip_device +@skip_device() def panda2( wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False, @@ -292,7 +292,7 @@ def panda2( ) -@skip_device +@skip_device() def panda3( wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False, @@ -307,7 +307,7 @@ def panda3( ) -@skip_device +@skip_device() def linkam( wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False ) -> Linkam3: diff --git a/src/dodal/beamlines/p45.py b/src/dodal/beamlines/p45.py index f3bd342bd7..19e9cf3273 100644 --- a/src/dodal/beamlines/p45.py +++ b/src/dodal/beamlines/p45.py @@ -48,7 +48,7 @@ def choppers( # Disconnected -@skip_device +@skip_device() def det( wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False ) -> AravisDetector: @@ -65,7 +65,7 @@ def det( # Disconnected -@skip_device +@skip_device() def diff( wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False ) -> AravisDetector: @@ -84,7 +84,7 @@ def diff( # Must find which PandA IOC(s) are compatible # Must document what PandAs are physically connected to # See: https://github.com/bluesky/ophyd-async/issues/284 -@skip_device +@skip_device() def panda1( wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False, @@ -99,7 +99,7 @@ def panda1( ) -@skip_device +@skip_device() def panda2( wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False, diff --git a/src/dodal/common/beamlines/beamline_parameters.py b/src/dodal/common/beamlines/beamline_parameters.py index f3b7413897..5a7d2f4f18 100644 --- a/src/dodal/common/beamlines/beamline_parameters.py +++ b/src/dodal/common/beamlines/beamline_parameters.py @@ -1,4 +1,4 @@ -from typing import Any, Tuple, cast +from typing import Any, cast from dodal.log import LOGGER from dodal.utils import get_beamline_name @@ -31,7 +31,7 @@ def from_lines(cls, file_name: str, config_lines: list[str]): for line in config_lines_nocomments ] config_pairs: list[tuple[str, Any]] = [ - cast(Tuple[str, Any], param) + cast(tuple[str, Any], param) for param in config_lines_sep_key_and_value if len(param) == 2 ] diff --git a/src/dodal/common/beamlines/beamline_utils.py b/src/dodal/common/beamlines/beamline_utils.py index beeb7811dd..4ff68ca5d9 100644 --- a/src/dodal/common/beamlines/beamline_utils.py +++ b/src/dodal/common/beamlines/beamline_utils.py @@ -1,5 +1,6 @@ import inspect -from typing import Callable, Dict, Final, List, Optional, TypeVar, cast +from collections.abc import Callable +from typing import Final, TypeVar, cast from bluesky.run_engine import call_in_bluesky_event_loop from ophyd import Device as OphydV1Device @@ -12,7 +13,7 @@ DEFAULT_CONNECTION_TIMEOUT: Final[float] = 5.0 -ACTIVE_DEVICES: Dict[str, AnyDevice] = {} +ACTIVE_DEVICES: dict[str, AnyDevice] = {} BL = "" DIRECTORY_PROVIDER: UpdatingDirectoryProvider | None = None @@ -33,7 +34,7 @@ def clear_device(name: str): del ACTIVE_DEVICES[name] -def list_active_devices() -> List[str]: +def list_active_devices() -> list[str]: global ACTIVE_DEVICES return list(ACTIVE_DEVICES.keys()) @@ -59,7 +60,7 @@ def wait_for_connection( ) else: raise TypeError( - "Invalid type {} in _wait_for_connection".format(device.__class__.__name__) + f"Invalid type {device.__class__.__name__} in _wait_for_connection" ) @@ -73,7 +74,7 @@ def device_instantiation( prefix: str, wait: bool, fake: bool, - post_create: Optional[Callable[[T], None]] = None, + post_create: Callable[[T], None] | None = None, bl_prefix: bool = True, **kwargs, ) -> T: diff --git a/src/dodal/common/maths.py b/src/dodal/common/maths.py index a691279f24..00b8f06b3b 100644 --- a/src/dodal/common/maths.py +++ b/src/dodal/common/maths.py @@ -1,9 +1,7 @@ -from typing import Tuple - import numpy as np -def step_to_num(start: float, stop: float, step: float) -> Tuple[float, float, int]: +def step_to_num(start: float, stop: float, step: float) -> tuple[float, float, int]: """ Standard handling for converting from start, stop, step to start, stop, num Forces step to be same direction as length diff --git a/src/dodal/common/types.py b/src/dodal/common/types.py index 6e63c49256..f81518baba 100644 --- a/src/dodal/common/types.py +++ b/src/dodal/common/types.py @@ -1,8 +1,7 @@ from abc import ABC, abstractmethod +from collections.abc import Callable, Generator from typing import ( Any, - Callable, - Generator, ) from bluesky.utils import Msg diff --git a/src/dodal/common/visit.py b/src/dodal/common/visit.py index c13fe34eff..22bff392ed 100644 --- a/src/dodal/common/visit.py +++ b/src/dodal/common/visit.py @@ -1,6 +1,5 @@ from abc import ABC, abstractmethod from pathlib import Path -from typing import Optional from aiohttp import ClientSession from ophyd_async.core import DirectoryInfo @@ -105,7 +104,7 @@ def __init__( self, beamline: str, root: Path, - client: Optional[DirectoryServiceClientBase] = None, + client: DirectoryServiceClientBase | None = None, ): self._beamline = beamline self._client = client or DirectoryServiceClient(f"{beamline}-control:8088/api") diff --git a/src/dodal/devices/aperturescatterguard.py b/src/dodal/devices/aperturescatterguard.py index 96d49265fd..23a944eff5 100644 --- a/src/dodal/devices/aperturescatterguard.py +++ b/src/dodal/devices/aperturescatterguard.py @@ -4,7 +4,7 @@ from enum import Enum from bluesky.protocols import Movable, Reading -from ophyd_async.core import AsyncStatus, SignalR, StandardReadable +from ophyd_async.core import AsyncStatus, HintedSignal, SignalR, StandardReadable from ophyd_async.core.soft_signal_backend import SoftConverter, SoftSignalBackend from dodal.devices.aperture import Aperture @@ -155,11 +155,7 @@ def __init__(self, prefix: str = "", name: str = "") -> None: ) aperture_backend.converter = self.ApertureConverter() self.selected_aperture = self.SelectedAperture(backend=aperture_backend) - self.set_readable_signals( - read=[ - self.selected_aperture, - ] - ) + self.add_readables([self.selected_aperture], wrapper=HintedSignal) super().__init__(name) class ApertureConverter(SoftConverter): diff --git a/src/dodal/devices/areadetector/adaravis.py b/src/dodal/devices/areadetector/adaravis.py index f81f8c013b..aaf6720b84 100644 --- a/src/dodal/devices/areadetector/adaravis.py +++ b/src/dodal/devices/areadetector/adaravis.py @@ -1,7 +1,7 @@ -from typing import Any, Mapping +from collections.abc import Mapping +from typing import Any -from ophyd import Component as Cpt -from ophyd import DetectorBase, EpicsSignal, Signal +from ophyd import EpicsSignal, Signal from ophyd.areadetector.base import ADComponent as Cpt from ophyd.areadetector.detectors import DetectorBase @@ -75,7 +75,7 @@ def stage(self, *args, **kwargs): self._prime_hdf() # Now calling the super method should set the acquire period - super(AdAravisDetector, self).stage(*args, **kwargs) + super().stage(*args, **kwargs) def _prime_hdf(self) -> None: """ diff --git a/src/dodal/devices/areadetector/adsim.py b/src/dodal/devices/areadetector/adsim.py index fedbaa081a..258739a728 100644 --- a/src/dodal/devices/areadetector/adsim.py +++ b/src/dodal/devices/areadetector/adsim.py @@ -1,4 +1,3 @@ -from ophyd import Component as Cpt from ophyd.areadetector.base import ADComponent as Cpt from ophyd.areadetector.detectors import DetectorBase @@ -45,4 +44,4 @@ def stage(self, *args, **kwargs): self.stage_sigs[self.cam.acquire_period] = acquire_time # Now calling the super method should set the acquire period - super(AdSimDetector, self).stage(*args, **kwargs) + super().stage(*args, **kwargs) diff --git a/src/dodal/devices/detector/det_dim_constants.py b/src/dodal/devices/detector/det_dim_constants.py index d9fd6d45d7..afb71d1039 100644 --- a/src/dodal/devices/detector/det_dim_constants.py +++ b/src/dodal/devices/detector/det_dim_constants.py @@ -1,4 +1,4 @@ -from typing import Dict, Generic, TypeVar +from typing import Generic, TypeVar from pydantic.dataclasses import dataclass @@ -11,7 +11,7 @@ class DetectorSize(Generic[T]): height: T -ALL_DETECTORS: Dict[str, "DetectorSizeConstants"] = {} +ALL_DETECTORS: dict[str, "DetectorSizeConstants"] = {} @dataclass diff --git a/src/dodal/devices/detector/det_dist_to_beam_converter.py b/src/dodal/devices/detector/det_dist_to_beam_converter.py index c248a02323..25f4977aee 100644 --- a/src/dodal/devices/detector/det_dist_to_beam_converter.py +++ b/src/dodal/devices/detector/det_dist_to_beam_converter.py @@ -47,7 +47,7 @@ def reload_lookup_table(self): def parse_table(self) -> list: rows = loadtxt(self.lookup_file, delimiter=" ", comments=["#", "Units"]) - columns = list(zip(*rows)) + columns = list(zip(*rows, strict=False)) return columns diff --git a/src/dodal/devices/detector/detector.py b/src/dodal/devices/detector/detector.py index d1789dffb9..b2a3b5ee41 100644 --- a/src/dodal/devices/detector/detector.py +++ b/src/dodal/devices/detector/detector.py @@ -1,5 +1,5 @@ from enum import Enum, auto -from typing import Any, Tuple +from typing import Any from pydantic import BaseModel, root_validator, validator @@ -80,7 +80,7 @@ def _parse_directory(cls, directory: str, values: dict[str, Any]) -> str: directory += "/" return directory - def get_beam_position_mm(self, detector_distance: float) -> Tuple[float, float]: + def get_beam_position_mm(self, detector_distance: float) -> tuple[float, float]: x_beam_mm = self.beam_xy_converter.get_beam_xy_from_det_dist( detector_distance, Axis.X_AXIS ) @@ -105,7 +105,7 @@ def get_detector_size_pizels(self) -> DetectorSize: roi_size = self.detector_size_constants.roi_size_pixels return roi_size if self.use_roi_mode else full_size - def get_beam_position_pixels(self, detector_distance: float) -> Tuple[float, float]: + def get_beam_position_pixels(self, detector_distance: float) -> tuple[float, float]: full_size_pixels = self.detector_size_constants.det_size_pixels roi_size_pixels = self.get_detector_size_pizels() diff --git a/src/dodal/devices/eiger.py b/src/dodal/devices/eiger.py index bcc11c0e1f..ade4ad468f 100644 --- a/src/dodal/devices/eiger.py +++ b/src/dodal/devices/eiger.py @@ -56,17 +56,15 @@ def with_params( cls, params: DetectorParams, name: str = "EigerDetector", - *args, - **kwargs, ): - det = cls(name=name, *args, **kwargs) + det = cls(name=name) det.set_detector_parameters(params) return det def set_detector_parameters(self, detector_params: DetectorParams): self.detector_params = detector_params if self.detector_params is None: - raise Exception("Parameters for scan must be specified") + raise ValueError("Parameters for scan must be specified") to_check = [ ( @@ -318,7 +316,9 @@ def _wait_fan_ready(self) -> StatusBase: def _finish_arm(self) -> Status: LOGGER.info("Eiger staging: Finishing arming") - return Status(done=True, success=True) + status = Status() + status.set_finished() + return status def forward_bit_depth_to_filewriter(self): bit_depth = self.bit_depth.get() diff --git a/src/dodal/devices/eiger_odin.py b/src/dodal/devices/eiger_odin.py index edca8d64c0..2345434dfa 100644 --- a/src/dodal/devices/eiger_odin.py +++ b/src/dodal/devices/eiger_odin.py @@ -1,5 +1,3 @@ -from typing import List, Tuple - from ophyd import Component, Device, EpicsSignal, EpicsSignalRO, EpicsSignalWithRBV from ophyd.areadetector.plugins import HDF5Plugin_V22 from ophyd.sim import NullStatus @@ -59,12 +57,12 @@ class OdinNodesStatus(Device): node_3 = Component(OdinNode, "OD4:") @property - def nodes(self) -> List[OdinNode]: + def nodes(self) -> list[OdinNode]: return [self.node_0, self.node_1, self.node_2, self.node_3] def check_node_frames_from_attr( self, node_get_func, error_message_verb: str - ) -> Tuple[bool, str]: + ) -> tuple[bool, str]: nodes_frames_values = [0] * len(self.nodes) frames_details = [] for node_number, node_pv in enumerate(self.nodes): @@ -75,17 +73,17 @@ def check_node_frames_from_attr( bad_frames = any(v != 0 for v in nodes_frames_values) return bad_frames, "\n".join(frames_details) - def check_frames_timed_out(self) -> Tuple[bool, str]: + def check_frames_timed_out(self) -> tuple[bool, str]: return self.check_node_frames_from_attr( lambda node: node.frames_timed_out.get(), "timed out" ) - def check_frames_dropped(self) -> Tuple[bool, str]: + def check_frames_dropped(self) -> tuple[bool, str]: return self.check_node_frames_from_attr( lambda node: node.frames_dropped.get(), "dropped" ) - def get_error_state(self) -> Tuple[bool, str]: + def get_error_state(self) -> tuple[bool, str]: is_error = [] error_messages = [] for node_number, node_pv in enumerate(self.nodes): @@ -99,7 +97,7 @@ def get_error_state(self) -> Tuple[bool, str]: def get_init_state(self) -> bool: is_initialised = [] - for node_number, node_pv in enumerate(self.nodes): + for node_pv in self.nodes: is_initialised.append(node_pv.fr_initialised.get()) is_initialised.append(node_pv.fp_initialised.get()) return all(is_initialised) @@ -132,7 +130,7 @@ def check_odin_state(self) -> bool: frames_timed_out, frames_timed_out_details = self.nodes.check_frames_timed_out() if not is_initialised: - raise Exception(error_message) + raise RuntimeError(error_message) if frames_dropped: self.log.error(f"Frames dropped: {frames_dropped_details}") if frames_timed_out: @@ -140,7 +138,7 @@ def check_odin_state(self) -> bool: return is_initialised and not frames_dropped and not frames_timed_out - def check_odin_initialised(self) -> Tuple[bool, str]: + def check_odin_initialised(self) -> tuple[bool, str]: is_error_state, error_messages = self.nodes.get_error_state() to_check = [ (not self.fan.consumers_connected.get(), "EigerFan is not connected"), diff --git a/src/dodal/devices/fast_grid_scan.py b/src/dodal/devices/fast_grid_scan.py index 2e1b2cce50..f095d7d52d 100644 --- a/src/dodal/devices/fast_grid_scan.py +++ b/src/dodal/devices/fast_grid_scan.py @@ -126,7 +126,7 @@ def grid_position_to_motor_position(self, grid_position: ndarray) -> ndarray: :return: The motor position this corresponds to. :raises: IndexError if the desired position is outside the grid.""" for position, axis in zip( - grid_position, [self.x_axis, self.y_axis, self.z_axis] + grid_position, [self.x_axis, self.y_axis, self.z_axis], strict=False ): if not axis.is_within(position): raise IndexError(f"{grid_position} is outside the bounds of the grid") @@ -191,7 +191,7 @@ def __init__(self, prefix: str, name: str = "") -> None: class ExpectedImages(SignalR[int]): def __init__(self, parent: "FastGridScanCommon") -> None: super().__init__(SoftSignalBackend(int)) - self.parent: "FastGridScanCommon" = parent + self.parent: FastGridScanCommon = parent async def get_value(self): x = await self.parent.x_steps.get_value() diff --git a/src/dodal/devices/focusing_mirror.py b/src/dodal/devices/focusing_mirror.py index 50dd3c1b79..0d693be039 100644 --- a/src/dodal/devices/focusing_mirror.py +++ b/src/dodal/devices/focusing_mirror.py @@ -2,8 +2,10 @@ from ophyd_async.core import ( AsyncStatus, + ConfigSignal, Device, DeviceVector, + HintedSignal, StandardReadable, observe_value, ) @@ -144,10 +146,8 @@ def __init__( # regardless of orientation of the mirror self.incident_angle = Motor(prefix + "PITCH") - self.set_readable_signals( - read=[self.incident_angle.user_readback], - config=[self.type], - ) + self.add_readables([self.incident_angle.user_readback], wrapper=HintedSignal) + self.add_readables([self.type], wrapper=ConfigSignal) super().__init__(name) diff --git a/src/dodal/devices/i22/dcm.py b/src/dodal/devices/i22/dcm.py index eaf4a36406..6ff5f8430e 100644 --- a/src/dodal/devices/i22/dcm.py +++ b/src/dodal/devices/i22/dcm.py @@ -1,6 +1,7 @@ import time +from collections.abc import Sequence from dataclasses import dataclass -from typing import Dict, Literal, Sequence +from typing import Literal from bluesky.protocols import Reading from event_model.documents.event_descriptor import DataKey @@ -127,7 +128,7 @@ def __init__( super().__init__(name) - async def describe(self) -> Dict[str, DataKey]: + async def describe(self) -> dict[str, DataKey]: default_describe = await super().describe() return { f"{self.name}-wavelength": DataKey( @@ -139,7 +140,7 @@ async def describe(self) -> Dict[str, DataKey]: **default_describe, } - async def read(self) -> Dict[str, Reading]: + async def read(self) -> dict[str, Reading]: default_reading = await super().read() energy: float = default_reading[f"{self.name}-energy"]["value"] if energy > 0.0: diff --git a/src/dodal/devices/i22/fswitch.py b/src/dodal/devices/i22/fswitch.py index 10bc92df21..9598f3a078 100644 --- a/src/dodal/devices/i22/fswitch.py +++ b/src/dodal/devices/i22/fswitch.py @@ -1,7 +1,6 @@ import asyncio import time from enum import Enum -from typing import Dict from bluesky.protocols import DataKey, Reading from ophyd_async.core import ConfigSignal, StandardReadable, soft_signal_r_and_setter @@ -74,7 +73,7 @@ def __init__( super().__init__(name) - async def describe(self) -> Dict[str, DataKey]: + async def describe(self) -> dict[str, DataKey]: default_describe = await super().describe() return { FSwitch.NUM_LENSES_FIELD_NAME: DataKey( @@ -83,7 +82,7 @@ async def describe(self) -> Dict[str, DataKey]: **default_describe, } - async def read(self) -> Dict[str, Reading]: + async def read(self) -> dict[str, Reading]: result = await asyncio.gather( *(filter.get_value() for filter in self.filters.values()) ) diff --git a/src/dodal/devices/i22/nxsas.py b/src/dodal/devices/i22/nxsas.py index 669ebb602b..8abd038218 100644 --- a/src/dodal/devices/i22/nxsas.py +++ b/src/dodal/devices/i22/nxsas.py @@ -1,5 +1,4 @@ from dataclasses import dataclass, fields -from typing import Dict from bluesky.protocols import Reading from event_model.documents.event_descriptor import DataKey @@ -13,7 +12,7 @@ @dataclass class MetadataHolder: # TODO: just in case this is useful more widely... - async def describe(self, parent_name: str) -> Dict[str, DataKey]: + async def describe(self, parent_name: str) -> dict[str, DataKey]: def datakey(value) -> DataKey: if isinstance(value, tuple): return {"units": value[1], **datakey(value[0])} @@ -40,7 +39,7 @@ def datakey(value) -> DataKey: if getattr(self, field.name, None) is not None } - async def read(self, parent_name: str) -> Dict[str, Reading]: + async def read(self, parent_name: str) -> dict[str, Reading]: def reading(value): if isinstance(value, tuple): return reading(value[0]) @@ -101,25 +100,21 @@ def __init__( ) self._metadata_holder = metadata_holder - async def read_configuration(self) -> Dict[str, Reading]: + async def read_configuration(self) -> dict[str, Reading]: return await merge_gathered_dicts( - ( - r - for r in ( - super().read_configuration(), - self._metadata_holder.read(self.name), - ) + r + for r in ( + super().read_configuration(), + self._metadata_holder.read(self.name), ) ) - async def describe_configuration(self) -> Dict[str, DataKey]: + async def describe_configuration(self) -> dict[str, DataKey]: return await merge_gathered_dicts( - ( - r - for r in ( - super().describe_configuration(), - self._metadata_holder.describe(self.name), - ) + r + for r in ( + super().describe_configuration(), + self._metadata_holder.describe(self.name), ) ) @@ -150,24 +145,20 @@ def __init__( ) self._metadata_holder = metadata_holder - async def read_configuration(self) -> Dict[str, Reading]: + async def read_configuration(self) -> dict[str, Reading]: return await merge_gathered_dicts( - ( - r - for r in ( - super().read_configuration(), - self._metadata_holder.read(self.name), - ) + r + for r in ( + super().read_configuration(), + self._metadata_holder.read(self.name), ) ) - async def describe_configuration(self) -> Dict[str, DataKey]: + async def describe_configuration(self) -> dict[str, DataKey]: return await merge_gathered_dicts( - ( - r - for r in ( - super().describe_configuration(), - self._metadata_holder.describe(self.name), - ) + r + for r in ( + super().describe_configuration(), + self._metadata_holder.describe(self.name), ) ) diff --git a/src/dodal/devices/linkam3.py b/src/dodal/devices/linkam3.py index 0c2149a356..31621b9747 100644 --- a/src/dodal/devices/linkam3.py +++ b/src/dodal/devices/linkam3.py @@ -1,10 +1,15 @@ import asyncio import time from enum import Enum -from typing import Optional from bluesky.protocols import Location -from ophyd_async.core import StandardReadable, WatchableAsyncStatus, observe_value +from ophyd_async.core import ( + ConfigSignal, + HintedSignal, + StandardReadable, + WatchableAsyncStatus, + observe_value, +) from ophyd_async.core.utils import WatcherUpdate from ophyd_async.epics.signal import epics_signal_r, epics_signal_rw @@ -57,14 +62,15 @@ def __init__(self, prefix: str, name: str): # status is a bitfield stored in a double? self.status = epics_signal_r(float, prefix + "STATUS:") - self.set_readable_signals( - read=(self.temp,), config=(self.ramp_rate, self.speed, self.set_point) + self.add_readables((self.temp,), wrapper=HintedSignal) + self.add_readables( + (self.ramp_rate, self.speed, self.set_point), wrapper=ConfigSignal ) super().__init__(name=name) @WatchableAsyncStatus.wrap - async def set(self, new_position: float, timeout: Optional[float] = None): + async def set(self, new_position: float, timeout: float | None = None): # time.monotonic won't go backwards in case of NTP corrections start = time.monotonic() old_position = await self.set_point.get_value() diff --git a/src/dodal/devices/oav/oav_detector.py b/src/dodal/devices/oav/oav_detector.py index e67c8b07de..42c7fedef4 100644 --- a/src/dodal/devices/oav/oav_detector.py +++ b/src/dodal/devices/oav/oav_detector.py @@ -46,7 +46,7 @@ class ZoomController(Device): sxst = Component(EpicsSignal, "MP:SELECT.SXST") def set_flatfield_on_zoom_level_one(self, value): - self.parent: "OAV" + self.parent: OAV flat_applied = self.parent.proc.port_name.get() no_flat_applied = self.parent.cam.port_name.get() return self.parent.grid_snapshot.input_plugin.set( diff --git a/src/dodal/devices/oav/oav_parameters.py b/src/dodal/devices/oav/oav_parameters.py index cbb942a6c0..938388cfc0 100644 --- a/src/dodal/devices/oav/oav_parameters.py +++ b/src/dodal/devices/oav/oav_parameters.py @@ -1,7 +1,7 @@ import json -import xml.etree.cElementTree as et +import xml.etree.ElementTree as et from collections import ChainMap -from typing import Any, Tuple +from typing import Any from dodal.devices.oav.oav_errors import ( OAVError_BeamPositionNotFound, @@ -65,11 +65,11 @@ def update(name, param_type, default=None): try: param = param_type(param) return param - except AssertionError: + except AssertionError as e: raise TypeError( f"OAV param {name} from the OAV centring params json file has the " f"wrong type, should be {param_type} but is {type(param)}." - ) + ) from e self.exposure: float = update("exposure", float) self.acquire_period: float = update("acqPeriod", float) @@ -155,7 +155,7 @@ def load_microns_per_pixel(self, zoom: float, xsize: int, ysize: int) -> None: def get_beam_position_from_zoom( self, zoom: float, xsize: int, ysize: int - ) -> Tuple[int, int]: + ) -> tuple[int, int]: """ Extracts the beam location in pixels `xCentre` `yCentre`, for a requested zoom \ level. The beam location is manually inputted by the beamline operator on GDA \ @@ -164,7 +164,7 @@ def get_beam_position_from_zoom( """ crosshair_x_line = None crosshair_y_line = None - with open(self.display_config, "r") as f: + with open(self.display_config) as f: file_lines = f.readlines() for i in range(len(file_lines)): if file_lines[i].startswith("zoomLevel = " + str(zoom)): @@ -188,7 +188,7 @@ def get_beam_position_from_zoom( def calculate_beam_distance( self, horizontal_pixels: int, vertical_pixels: int - ) -> Tuple[int, int]: + ) -> tuple[int, int]: """ Calculates the distance between the beam centre and the given (horizontal, vertical). diff --git a/src/dodal/devices/oav/pin_image_recognition/__init__.py b/src/dodal/devices/oav/pin_image_recognition/__init__.py index ee3fef53b0..af5797be29 100644 --- a/src/dodal/devices/oav/pin_image_recognition/__init__.py +++ b/src/dodal/devices/oav/pin_image_recognition/__init__.py @@ -5,6 +5,7 @@ from numpy.typing import NDArray from ophyd_async.core import ( AsyncStatus, + HintedSignal, StandardReadable, observe_value, soft_signal_r_and_setter, @@ -77,12 +78,13 @@ def __init__(self, prefix: str, name: str = ""): self.min_tip_height = soft_signal_rw(int, 5, name="min_tip_height") self.validity_timeout = soft_signal_rw(float, 5.0, name="validity_timeout") - self.set_readable_signals( - read=[ + self.add_readables( + [ self.triggered_tip, self.triggered_top_edge, self.triggered_bottom_edge, ], + wrapper=HintedSignal, ) super().__init__(name=name) @@ -134,9 +136,7 @@ async def _get_tip_and_edge_data( location = sample_detection.processArray(array_data) end_time = time.time() LOGGER.debug( - "Sample location detection took {}ms".format( - (end_time - start_time) * 1000.0 - ) + f"Sample location detection took {(end_time - start_time) * 1000.0}ms" ) return location @@ -154,7 +154,7 @@ async def _set_triggered_tip(): location = await self._get_tip_and_edge_data(value) self._set_triggered_values(location) except Exception as e: - LOGGER.warn( + LOGGER.warning( f"Failed to detect pin-tip location, will retry with next image: {e}" ) else: diff --git a/src/dodal/devices/oav/pin_image_recognition/utils.py b/src/dodal/devices/oav/pin_image_recognition/utils.py index 9f5fe7da1f..00fab87273 100644 --- a/src/dodal/devices/oav/pin_image_recognition/utils.py +++ b/src/dodal/devices/oav/pin_image_recognition/utils.py @@ -1,6 +1,7 @@ +from collections.abc import Callable from dataclasses import dataclass from enum import Enum -from typing import Callable, Final, Tuple +from typing import Final import cv2 import numpy as np @@ -103,7 +104,7 @@ class SampleLocation: edge_bottom: np.ndarray -class MxSampleDetect(object): +class MxSampleDetect: def __init__( self, *, @@ -161,7 +162,7 @@ def processArray(self, arr: np.ndarray) -> SampleLocation: @staticmethod def _first_and_last_nonzero_by_columns( arr: np.ndarray, - ) -> Tuple[np.ndarray, np.ndarray]: + ) -> tuple[np.ndarray, np.ndarray]: """ Finds the indexes of the first & last non-zero values by column in a 2d array. @@ -243,9 +244,7 @@ def _locate_sample(self, edge_arr: np.ndarray) -> SampleLocation: bottom[x + 1 :] = NONE_VALUE LOGGER.info( - "pin-tip detection: Successfully located pin tip at (x={}, y={})".format( - tip_x, tip_y - ) + f"pin-tip detection: Successfully located pin tip at (x={tip_x}, y={tip_y})" ) return SampleLocation( tip_x=tip_x, tip_y=tip_y, edge_bottom=bottom, edge_top=top diff --git a/src/dodal/devices/oav/utils.py b/src/dodal/devices/oav/utils.py index 45447c6a2a..24071502fe 100644 --- a/src/dodal/devices/oav/utils.py +++ b/src/dodal/devices/oav/utils.py @@ -1,5 +1,5 @@ +from collections.abc import Generator from enum import IntEnum -from typing import Generator, Tuple import bluesky.plan_stubs as bps import numpy as np @@ -10,7 +10,7 @@ from dodal.devices.oav.pin_image_recognition import PinTipDetection from dodal.devices.smargon import Smargon -Pixel = Tuple[int, int] +Pixel = tuple[int, int] class PinNotFoundException(Exception): diff --git a/src/dodal/devices/robot.py b/src/dodal/devices/robot.py index 3745d7ff31..623a06f86e 100644 --- a/src/dodal/devices/robot.py +++ b/src/dodal/devices/robot.py @@ -1,5 +1,5 @@ import asyncio -from asyncio import FIRST_COMPLETED, Task +from asyncio import FIRST_COMPLETED, CancelledError, Task from dataclasses import dataclass from enum import Enum @@ -74,19 +74,28 @@ async def raise_if_no_pin(): await wait_for_value(self.error_code, self.NO_PIN_ERROR_CODE, None) raise RobotLoadFailed(self.NO_PIN_ERROR_CODE, "Pin was not detected") - finished, unfinished = await asyncio.wait( - [ - Task(raise_if_no_pin()), - Task( - wait_for_value(self.gonio_pin_sensor, PinMounted.PIN_MOUNTED, None) - ), - ], - return_when=FIRST_COMPLETED, - ) - for task in unfinished: - task.cancel() - for task in finished: - await task + async def wfv(): + await wait_for_value(self.gonio_pin_sensor, PinMounted.PIN_MOUNTED, None) + + tasks = [ + (Task(raise_if_no_pin())), + (Task(wfv())), + ] + try: + finished, unfinished = await asyncio.wait( + tasks, + return_when=FIRST_COMPLETED, + ) + for task in unfinished: + task.cancel() + for task in finished: + await task + except CancelledError: + # If the outer enclosing task cancels after LOAD_TIMEOUT, this causes CancelledError to be raised + # in the current task, when it propagates to here we should cancel all pending tasks before bubbling up + for task in tasks: + task.cancel() + raise async def _load_pin_and_puck(self, sample_location: SampleLocation): LOGGER.info(f"Loading pin {sample_location}") @@ -113,7 +122,7 @@ async def set(self, sample_location: SampleLocation): await asyncio.wait_for( self._load_pin_and_puck(sample_location), timeout=self.LOAD_TIMEOUT ) - except asyncio.TimeoutError: + except asyncio.TimeoutError as e: error_code = await self.error_code.get_value() error_string = await self.error_str.get_value() - raise RobotLoadFailed(error_code, error_string) + raise RobotLoadFailed(error_code, error_string) from e diff --git a/src/dodal/devices/smargon.py b/src/dodal/devices/smargon.py index 4d5bf14c66..27ea6f2a7a 100644 --- a/src/dodal/devices/smargon.py +++ b/src/dodal/devices/smargon.py @@ -1,8 +1,8 @@ -from collections.abc import Generator +from collections.abc import Collection, Generator from dataclasses import dataclass from enum import Enum from math import isclose -from typing import Collection, cast +from typing import cast from bluesky import plan_stubs as bps from bluesky.utils import Msg @@ -87,7 +87,7 @@ class XYZLimits: def position_valid(self, pos: Collection[float]) -> bool: return all( axis_limits.contains(value) - for axis_limits, value in zip([self.x, self.y, self.z], pos) + for axis_limits, value in zip([self.x, self.y, self.z], pos, strict=False) ) diff --git a/src/dodal/devices/tetramm.py b/src/dodal/devices/tetramm.py index 7ced99c28e..f1ef468a37 100644 --- a/src/dodal/devices/tetramm.py +++ b/src/dodal/devices/tetramm.py @@ -1,6 +1,6 @@ import asyncio +from collections.abc import Sequence from enum import Enum -from typing import Sequence from bluesky.protocols import Hints from ophyd_async.core import ( diff --git a/src/dodal/devices/util/adjuster_plans.py b/src/dodal/devices/util/adjuster_plans.py index 3c907fddac..a5509dc41f 100644 --- a/src/dodal/devices/util/adjuster_plans.py +++ b/src/dodal/devices/util/adjuster_plans.py @@ -3,7 +3,7 @@ according to some criteria either via feedback, preset positions, lookup tables etc. """ -from typing import Callable, Generator +from collections.abc import Callable, Generator from bluesky import plan_stubs as bps from bluesky.run_engine import Msg diff --git a/src/dodal/devices/util/epics_util.py b/src/dodal/devices/util/epics_util.py index 5aea307914..75d411561b 100644 --- a/src/dodal/devices/util/epics_util.py +++ b/src/dodal/devices/util/epics_util.py @@ -1,5 +1,5 @@ +from collections.abc import Callable, Sequence from functools import partial -from typing import Callable, Sequence from bluesky.protocols import Movable from ophyd import Component, EpicsSignal @@ -61,9 +61,9 @@ def closing_func(old_status: Status): # Wrap each function by first checking the previous status and attaching a callback # to the next function in the chain def wrap_func( - old_status: Status, current_func: Callable[[], StatusBase], next_func + old_status: Status | None, current_func: Callable[[], StatusBase], next_func ): - if old_status.exception() is not None: + if old_status is not None and old_status.exception() is not None: set_global_exception_and_log(old_status) return @@ -96,7 +96,7 @@ def set_global_exception_and_log(status: Status): ) # Wrap each function in reverse - for num, func in enumerate(list(reversed(functions_to_chain))[1:-1]): + for func in list(reversed(functions_to_chain))[1:-1]: wrapped_funcs.append( partial( wrap_func, @@ -105,10 +105,8 @@ def set_global_exception_and_log(status: Status): ) ) - starting_status = Status(done=True, success=True) - # Initiate the chain of functions - wrap_func(starting_status, functions_to_chain[0], wrapped_funcs[-1]) + wrap_func(None, functions_to_chain[0], wrapped_funcs[-1]) return full_status diff --git a/src/dodal/devices/util/lookup_tables.py b/src/dodal/devices/util/lookup_tables.py index c7da7409c0..d920aa8eac 100644 --- a/src/dodal/devices/util/lookup_tables.py +++ b/src/dodal/devices/util/lookup_tables.py @@ -3,9 +3,8 @@ converts the source value s to a target value t for different values of s. """ -from collections.abc import Sequence +from collections.abc import Callable, Sequence from io import StringIO -from typing import Callable import aiofiles import numpy as np @@ -36,7 +35,7 @@ async def energy_distance_table(lookup_table_path: str) -> np.ndarray: def linear_interpolation_lut(filename: str) -> Callable[[float], float]: """Returns a callable that converts values by linear interpolation of lookup table values""" LOGGER.info(f"Using lookup table {filename}") - s_and_t_vals = zip(*loadtxt(filename, comments=["#", "Units"])) + s_and_t_vals = zip(*loadtxt(filename, comments=["#", "Units"]), strict=False) s_values: Sequence t_values: Sequence diff --git a/src/dodal/devices/webcam.py b/src/dodal/devices/webcam.py index 12a1595422..3805b08e7f 100644 --- a/src/dodal/devices/webcam.py +++ b/src/dodal/devices/webcam.py @@ -3,7 +3,7 @@ import aiofiles from aiohttp import ClientSession from bluesky.protocols import Triggerable -from ophyd_async.core import AsyncStatus, StandardReadable, soft_signal_rw +from ophyd_async.core import AsyncStatus, HintedSignal, StandardReadable, soft_signal_rw from dodal.log import LOGGER @@ -15,7 +15,7 @@ def __init__(self, name, prefix, url): self.directory = soft_signal_rw(str, name="directory") self.last_saved_path = soft_signal_rw(str, name="last_saved_path") - self.set_readable_signals([self.last_saved_path]) + self.add_readables([self.last_saved_path], wrapper=HintedSignal) super().__init__(name=name) async def _write_image(self, file_path: str): @@ -24,7 +24,7 @@ async def _write_image(self, file_path: str): response.raise_for_status() LOGGER.info(f"Saving webcam image from {self.url} to {file_path}") async with aiofiles.open(file_path, "wb") as file: - await file.write((await response.read())) + await file.write(await response.read()) @AsyncStatus.wrap async def trigger(self) -> None: diff --git a/src/dodal/devices/zebra.py b/src/dodal/devices/zebra.py index 2abaa1f7c1..1f311f6e36 100644 --- a/src/dodal/devices/zebra.py +++ b/src/dodal/devices/zebra.py @@ -3,7 +3,6 @@ import asyncio from enum import Enum from functools import partialmethod -from typing import List from ophyd_async.core import ( AsyncStatus, @@ -166,7 +165,7 @@ def __init__(self, prefix: str, name: str = "") -> None: super().__init__(name) -def boolean_array_to_integer(values: List[bool]) -> int: +def boolean_array_to_integer(values: list[bool]) -> int: """Converts a boolean array to integer by interpretting it in binary with LSB 0 bit numbering. @@ -245,8 +244,8 @@ class LogicGateConfiguration: NUMBER_OF_INPUTS = 4 def __init__(self, input_source: int, invert: bool = False) -> None: - self.sources: List[int] = [] - self.invert: List[bool] = [] + self.sources: list[int] = [] + self.invert: list[bool] = [] self.add_input(input_source, invert) def add_input( @@ -271,7 +270,9 @@ def add_input( def __str__(self) -> str: input_strings = [] - for input, (source, invert) in enumerate(zip(self.sources, self.invert)): + for input, (source, invert) in enumerate( + zip(self.sources, self.invert, strict=False) + ): input_strings.append(f"INP{input+1}={'!' if invert else ''}{source}") return ", ".join(input_strings) diff --git a/src/dodal/devices/zocalo/zocalo_results.py b/src/dodal/devices/zocalo/zocalo_results.py index f847f67ab1..4967f1db8b 100644 --- a/src/dodal/devices/zocalo/zocalo_results.py +++ b/src/dodal/devices/zocalo/zocalo_results.py @@ -1,8 +1,9 @@ import asyncio from collections import OrderedDict +from collections.abc import Generator, Sequence from enum import Enum from queue import Empty, Queue -from typing import Any, Generator, Sequence, Tuple, TypedDict +from typing import Any, TypedDict import bluesky.plan_stubs as bps import numpy as np @@ -10,7 +11,7 @@ import workflows.transport from bluesky.protocols import Descriptor, Triggerable from numpy.typing import NDArray -from ophyd_async.core import StandardReadable, soft_signal_r_and_setter +from ophyd_async.core import HintedSignal, StandardReadable, soft_signal_r_and_setter from ophyd_async.core.async_status import AsyncStatus from workflows.transport.common_transport import CommonTransport @@ -88,14 +89,15 @@ def __init__( ) self.ispyb_dcid, _ = soft_signal_r_and_setter(int, name="ispyb_dcid") self.ispyb_dcgid, _ = soft_signal_r_and_setter(int, name="ispyb_dcgid") - self.set_readable_signals( - read=[ + self.add_readables( + [ self.results, self.centres_of_mass, self.bbox_sizes, self.ispyb_dcid, self.ispyb_dcgid, - ] + ], + wrapper=HintedSignal, ) super().__init__(name) @@ -242,7 +244,7 @@ def _receive_result( def get_processing_result( zocalo: ZocaloResults, -) -> Generator[Any, Any, Tuple[np.ndarray, np.ndarray] | Tuple[None, None]]: +) -> Generator[Any, Any, tuple[np.ndarray, np.ndarray] | tuple[None, None]]: """A minimal plan which will extract the top ranked xray centre and crystal bounding box size from the zocalo results. Returns (None, None) if no crystals were found.""" diff --git a/src/dodal/log.py b/src/dodal/log.py index 0d71f949ab..64745c9f8a 100644 --- a/src/dodal/log.py +++ b/src/dodal/log.py @@ -6,7 +6,7 @@ from logging.handlers import TimedRotatingFileHandler from os import environ from pathlib import Path -from typing import Deque, Tuple, TypedDict +from typing import TypedDict from bluesky.log import logger as bluesky_logger from graypy import GELFTCPHandler @@ -37,11 +37,14 @@ class CircularMemoryHandler(logging.Handler): that always contains the last {capacity} number of messages, this is only flushed when a log of specific {flushLevel} comes in. On flush this buffer is then passed to the {target} handler. + + The CircularMemoryHandler becomes the owner of the target handler which will be closed + on close of this handler. """ def __init__(self, capacity, flushLevel=logging.ERROR, target=None): logging.Handler.__init__(self) - self.buffer: Deque[logging.LogRecord] = deque(maxlen=capacity) + self.buffer: deque[logging.LogRecord] = deque(maxlen=capacity) self.flushLevel = flushLevel self.target = target @@ -66,6 +69,12 @@ def close(self): self.acquire() try: self.buffer.clear() + if self.target: + self.target.acquire() + try: + self.target.close() + finally: + self.target.release() self.target = None logging.Handler.close(self) finally: @@ -121,7 +130,7 @@ def set_up_graylog_handler(logger: Logger, host: str, port: int): def set_up_INFO_file_handler(logger, path: Path, filename: str): """Set up a file handler for the logger, at INFO level, which will keep 30 days of logs, rotating once per day. Creates the directory if necessary.""" - print(f"Logging to {path/filename}") + print(f"Logging to INFO file handler {path/filename}") path.mkdir(parents=True, exist_ok=True) file_handler = TimedRotatingFileHandler( filename=path / filename, when="MIDNIGHT", backupCount=INFO_LOG_DAYS @@ -137,8 +146,8 @@ def set_up_DEBUG_memory_handler( """Set up a Memory handler which holds 200k lines, and writes them to an hourly log file when it sees a message of severity ERROR. Creates the directory if necessary""" - print(f"Logging to {path/filename}") debug_path = path / "debug" + print(f"Logging to DEBUG handler {debug_path/filename}") debug_path.mkdir(parents=True, exist_ok=True) file_handler = TimedRotatingFileHandler( filename=debug_path / filename, when="H", backupCount=DEBUG_LOG_FILES_TO_KEEP @@ -240,7 +249,7 @@ def get_logging_file_path() -> Path: def get_graylog_configuration( dev_mode: bool, graylog_port: int | None = None -) -> Tuple[str, int]: +) -> tuple[str, int]: """Get the host and port for the graylog handler. If running in dev mode, this switches to localhost. Otherwise it publishes to the diff --git a/src/dodal/utils.py b/src/dodal/utils.py index a21ae450d6..0622726d0d 100644 --- a/src/dodal/utils.py +++ b/src/dodal/utils.py @@ -4,6 +4,7 @@ import re import socket import string +from collections.abc import Callable, Iterable, Mapping from dataclasses import dataclass from functools import wraps from importlib import import_module @@ -12,13 +13,6 @@ from types import ModuleType from typing import ( Any, - Callable, - Dict, - Iterable, - List, - Mapping, - Tuple, - Type, TypeVar, ) @@ -46,7 +40,7 @@ try: from typing import TypeAlias except ImportError: - from typing_extensions import TypeAlias + from typing import TypeAlias #: Protocols defining interface to hardware @@ -110,7 +104,7 @@ def wrapper(*args, **kwds) -> T: def make_all_devices( module: str | ModuleType | None = None, include_skipped: bool = False, **kwargs -) -> Tuple[Dict[str, AnyDevice], Dict[str, Exception]]: +) -> tuple[dict[str, AnyDevice], dict[str, Exception]]: """Makes all devices in the given beamline module. In cases of device interdependencies it ensures a device is created before any which @@ -129,7 +123,7 @@ def make_all_devices( if isinstance(module, str) or module is None: module = import_module(module or __name__) factories = collect_factories(module, include_skipped) - devices: Tuple[Dict[str, AnyDevice], Dict[str, Exception]] = invoke_factories( + devices: tuple[dict[str, AnyDevice], dict[str, Exception]] = invoke_factories( factories, **kwargs ) @@ -139,7 +133,7 @@ def make_all_devices( def invoke_factories( factories: Mapping[str, AnyDeviceFactory], **kwargs, -) -> Tuple[Dict[str, AnyDevice], Dict[str, Exception]]: +) -> tuple[dict[str, AnyDevice], dict[str, Exception]]: """Call device factory functions in the correct order to resolve dependencies. Inspect function signatures to work out dependencies and execute functions in correct order. @@ -259,15 +253,13 @@ def is_any_device_factory(func: Callable) -> bool: return is_v1_device_factory(func) or is_v2_device_factory(func) -def is_v2_device_type(obj: Type[Any]) -> bool: +def is_v2_device_type(obj: type[Any]) -> bool: return inspect.isclass(obj) and issubclass(obj, OphydV2Device) -def is_v1_device_type(obj: Type[Any]) -> bool: +def is_v1_device_type(obj: type[Any]) -> bool: is_class = inspect.isclass(obj) - follows_protocols = any( - (isinstance(obj, protocol) for protocol in BLUESKY_PROTOCOLS) - ) + follows_protocols = any(isinstance(obj, protocol) for protocol in BLUESKY_PROTOCOLS) return is_class and follows_protocols and not is_v2_device_type(obj) @@ -292,13 +284,11 @@ def get_beamline_based_on_environment_variable() -> ModuleType: or any(c not in valid_characters for c in beamline) ): raise ValueError( - "Invalid BEAMLINE variable - module name is not a permissible python module name, got '{}'".format( - beamline - ) + f"Invalid BEAMLINE variable - module name is not a permissible python module name, got '{beamline}'" ) try: - return importlib.import_module("dodal.beamlines.{}".format(beamline)) + return importlib.import_module(f"dodal.beamlines.{beamline}") except ImportError as e: raise ValueError( f"Failed to import beamline-specific dodal module 'dodal.beamlines.{beamline}'." @@ -306,7 +296,7 @@ def get_beamline_based_on_environment_variable() -> ModuleType: ) from e -def _find_next_run_number_from_files(file_names: List[str]) -> int: +def _find_next_run_number_from_files(file_names: list[str]) -> int: valid_numbers = [] for file_name in file_names: diff --git a/tests/common/beamlines/test_beamline_utils.py b/tests/common/beamlines/test_beamline_utils.py index 7040c5f5ef..6cf3b54a8c 100644 --- a/tests/common/beamlines/test_beamline_utils.py +++ b/tests/common/beamlines/test_beamline_utils.py @@ -1,4 +1,5 @@ -from unittest.mock import ANY, MagicMock, patch +import asyncio +from unittest.mock import ANY, AsyncMock, MagicMock, patch import pytest from bluesky.run_engine import RunEngine as RE @@ -14,11 +15,22 @@ from dodal.devices.qbpm1 import QBPM1 from dodal.devices.smargon import Smargon from dodal.devices.zebra import Zebra +from dodal.log import LOGGER from dodal.utils import make_all_devices from ...conftest import mock_beamline_module_filepaths +@pytest.fixture(autouse=True) +def flush_event_loop_on_finish(event_loop): + # wait for the test function to complete + yield None + + if pending_tasks := asyncio.all_tasks(event_loop): + LOGGER.warning(f"Waiting for pending tasks to complete {pending_tasks}") + event_loop.run_until_complete(asyncio.gather(*pending_tasks)) + + @pytest.fixture(autouse=True) def setup(): beamline_utils.clear_devices() @@ -107,10 +119,11 @@ def test_wait_for_v1_device_connection_passes_through_timeout(kwargs, expected_t @pytest.mark.parametrize( "kwargs,expected_timeout", [({}, 5.0), ({"timeout": 15.0}, 15.0)] ) -@patch.object(beamline_utils, "call_in_bluesky_event_loop", spec=callable) -def test_wait_for_v2_device_connection_passes_through_timeout( - call_in_bluesky_el, kwargs, expected_timeout -): +@patch( + "dodal.common.beamlines.beamline_utils.v2_device_wait_for_connection", + new=AsyncMock(), +) +def test_wait_for_v2_device_connection_passes_through_timeout(kwargs, expected_timeout): RE() device = OphydV2Device() device.connect = MagicMock() diff --git a/tests/common/beamlines/test_device_instantiation.py b/tests/common/beamlines/test_device_instantiation.py index cc9e455a13..c3ca85ade0 100644 --- a/tests/common/beamlines/test_device_instantiation.py +++ b/tests/common/beamlines/test_device_instantiation.py @@ -8,7 +8,7 @@ def follows_bluesky_protocols(obj: Any) -> bool: - return any((isinstance(obj, protocol) for protocol in BLUESKY_PROTOCOLS)) + return any(isinstance(obj, protocol) for protocol in BLUESKY_PROTOCOLS) @pytest.mark.parametrize( diff --git a/tests/conftest.py b/tests/conftest.py index 92a855bc0c..afbcc4ba25 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,13 +4,14 @@ import os import sys import time +from collections.abc import Mapping from os import environ, getenv from pathlib import Path -from typing import Mapping from unittest.mock import MagicMock, patch import pytest from bluesky.run_engine import RunEngine +from ophyd.status import Status from dodal.beamlines import i03 from dodal.common.beamlines import beamline_utils @@ -31,6 +32,18 @@ "s04": mock_paths, } +# Prevent pytest from catching exceptions when debugging in vscode so that break on +# exception works correctly (see: https://github.com/pytest-dev/pytest/issues/7409) +if os.getenv("PYTEST_RAISE", "0") == "1": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call): + raise call.excinfo.value + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo): + raise excinfo.value + def mock_beamline_module_filepaths(bl_name, bl_module): if mock_attributes := mock_attributes_table.get(bl_name): @@ -114,3 +127,9 @@ def append_and_print(name, doc): RE.subscribe(append_and_print) return docs + + +def failed_status(failure: Exception) -> Status: + status = Status() + status.set_exception(failure) + return status diff --git a/tests/devices/i22/test_dcm.py b/tests/devices/i22/test_dcm.py index 7693283a06..fab25936f5 100644 --- a/tests/devices/i22/test_dcm.py +++ b/tests/devices/i22/test_dcm.py @@ -1,4 +1,4 @@ -from typing import Mapping +from collections.abc import Mapping from unittest.mock import ANY import bluesky.plans as bp diff --git a/tests/devices/system_tests/test_aperturescatterguard_system.py b/tests/devices/system_tests/test_aperturescatterguard_system.py index adfa5f94b1..6f90d4ceff 100644 --- a/tests/devices/system_tests/test_aperturescatterguard_system.py +++ b/tests/devices/system_tests/test_aperturescatterguard_system.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Tuple, cast +from typing import Any, cast import bluesky.plan_stubs as bps import pytest @@ -20,7 +20,7 @@ class GDABeamlineParameters: - params: Dict[str, Any] + params: dict[str, Any] def __repr__(self) -> str: return repr(self.params) @@ -39,11 +39,11 @@ def from_file(cls, path: str): for line in config_lines_nocomments ] config_pairs: list[tuple[str, Any]] = [ - cast(Tuple[str, Any], param) + cast(tuple[str, Any], param) for param in config_lines_sep_key_and_value if len(param) == 2 ] - for i, (param, value) in enumerate(config_pairs): + for i, (_, value) in enumerate(config_pairs): if value == "Yes": config_pairs[i] = (config_pairs[i][0], True) elif value == "No": @@ -131,7 +131,7 @@ class MonitorCallback(CallbackBase): t_ap_y: float = 0 t_sg_y: float = 0 - event_docs: List[Dict] = [] + event_docs: list[dict] = [] def event(self, doc): self.event_docs.append(doc) diff --git a/tests/devices/unit_tests/oav/image_recognition/test_pin_tip_detect.py b/tests/devices/unit_tests/oav/image_recognition/test_pin_tip_detect.py index ecfa678f04..2167241a0c 100644 --- a/tests/devices/unit_tests/oav/image_recognition/test_pin_tip_detect.py +++ b/tests/devices/unit_tests/oav/image_recognition/test_pin_tip_detect.py @@ -159,7 +159,7 @@ async def get_array_data(_): assert mock_process_array.call_count > 1 -@patch("dodal.devices.oav.pin_image_recognition.LOGGER.warn") +@patch("dodal.devices.oav.pin_image_recognition.LOGGER.warning") @patch("dodal.devices.oav.pin_image_recognition.observe_value") async def test_given_tip_invalid_then_loop_keeps_retrying_until_valid( mock_image_read: MagicMock, diff --git a/tests/devices/unit_tests/test_aperture_scatterguard.py b/tests/devices/unit_tests/test_aperture_scatterguard.py index 6c40e01e39..214fafd683 100644 --- a/tests/devices/unit_tests/test_aperture_scatterguard.py +++ b/tests/devices/unit_tests/test_aperture_scatterguard.py @@ -1,6 +1,6 @@ +from collections.abc import Sequence from contextlib import ExitStack from dataclasses import asdict -from typing import Sequence from unittest.mock import ANY, MagicMock, call import bluesky.plan_stubs as bps @@ -115,6 +115,7 @@ def _assert_patched_ap_sg_has_call( for motor, pos in zip( get_all_motors(ap_sg), position, + strict=False, ): get_mock_put(motor.user_setpoint).assert_called_with( pos, wait=True, timeout=ANY @@ -193,6 +194,7 @@ def set_underlying_motors( for motor, pos in zip( get_all_motors(ap_sg), position, + strict=False, ): motor.set(pos) diff --git a/tests/devices/unit_tests/test_eiger.py b/tests/devices/unit_tests/test_eiger.py index 4da5bf3ee5..4cdcd6a611 100644 --- a/tests/devices/unit_tests/test_eiger.py +++ b/tests/devices/unit_tests/test_eiger.py @@ -3,8 +3,9 @@ import pytest from mockito import ANY, mock, verify, when -from ophyd.sim import make_fake_device +from ophyd.sim import NullStatus, make_fake_device from ophyd.status import Status +from ophyd.utils import UnknownStatusFailure from dodal.devices.detector import DetectorParams, TriggerMode from dodal.devices.detector.det_dim_constants import EIGER2_X_16M_SIZE @@ -13,6 +14,8 @@ from dodal.devices.util.epics_util import run_functions_without_blocking from dodal.log import LOGGER +from ...conftest import failed_status + TEST_DETECTOR_SIZE_CONSTANTS = EIGER2_X_16M_SIZE TEST_EXPECTED_ENERGY = 100.0 @@ -160,7 +163,7 @@ def test_check_detector_variables( try: fake_eiger.set_detector_parameters(detector_params) except Exception as e: - assert False, f"exception was raised {e}" + raise AssertionError(f"exception was raised {e}") from e # Tests transition from set_odin_pvs_after_file_writer_set to set_mx_settings_pvs @@ -215,17 +218,18 @@ def test_stage_raises_exception_if_odin_initialisation_status_not_ok(fake_eiger) @pytest.mark.parametrize( - "roi_mode, expected_num_change_roi_calls", [(True, 1), (False, 0)] + "roi_mode, expected_num_change_roi_calls, expected_exception", + [(True, 1, "Test Exception 2"), (False, 0, "Test Exception 1")], ) @patch("dodal.devices.eiger.await_value") def test_stage_enables_roi_mode_correctly( - mock_await, fake_eiger, roi_mode, expected_num_change_roi_calls + mock_await, fake_eiger, roi_mode, expected_num_change_roi_calls, expected_exception ): when(fake_eiger.odin.nodes).clear_odin_errors().thenReturn(None) when(fake_eiger.odin).check_odin_initialised().thenReturn((True, "")) fake_eiger.detector_params.use_roi_mode = roi_mode - mock_await.return_value = Status(done=True) + mock_await.return_value = failed_status(UnknownStatusFailure("Test Exception")) change_roi_mode_status = Status() fake_eiger.change_roi_mode = MagicMock(return_value=change_roi_mode_status) @@ -234,9 +238,10 @@ def test_stage_enables_roi_mode_correctly( assert fake_eiger.change_roi_mode.call_count == expected_num_change_roi_calls # Tidy up async staging - change_roi_mode_status.set_exception(Exception) - with pytest.raises(Exception): + change_roi_mode_status.set_exception(UnknownStatusFailure("Test Exception 2")) + with pytest.raises(UnknownStatusFailure) as e: returned_status.wait(0.1) + assert e.args[0] == expected_exception def test_disable_roi_mode_sets_correct_roi_mode(fake_eiger): @@ -360,12 +365,9 @@ def test_stage_runs_successfully(mock_await, fake_eiger: EigerDetector): fake_eiger.arming_status.wait(1) # This should complete long before 1s -@patch("dodal.devices.eiger.await_value") def test_given_stale_parameters_goes_high_before_callbacks_then_stale_parameters_waited_on( - mock_await, fake_eiger: EigerDetector, ): - mock_await.return_value = Status(done=True) set_up_eiger_to_stage_happily(fake_eiger) mock_eiger_odin_statuses(fake_eiger) @@ -544,7 +546,7 @@ def test_given_in_free_run_mode_and_not_all_frames_collected_in_time_when_unstag fake_eiger.detector_params.trigger_mode = TriggerMode.FREE_RUN fake_eiger.ALL_FRAMES_TIMEOUT = 0.1 - with pytest.raises(Exception): + with pytest.raises(TimeoutError): fake_eiger.unstage() assert fake_eiger.odin.meta.stop_writing.get() == 1 @@ -605,7 +607,7 @@ def test_given_detector_arming_status_failed_when_unstage_then_detector_still_di fake_eiger.cam.acquire.sim_put(1) # type: ignore fake_eiger.arming_status = get_bad_status() - with pytest.raises(Exception): + with pytest.raises(RuntimeError): fake_eiger.unstage() assert fake_eiger.cam.acquire.get() == 0 @@ -638,9 +640,9 @@ def test_unwrapped_arm_chain_functions_are_not_called_outside_util( call_func: MagicMock, fake_eiger: EigerDetector, ): - fake_eiger.odin.stop = MagicMock(return_value=Status(done=True, success=True)) - fake_eiger.detector_params.use_roi_mode = True - done_status = Status(done=True, success=True) + fake_eiger.odin.stop = MagicMock(return_value=NullStatus()) + fake_eiger.detector_params.use_roi_mode = True # type: ignore + done_status = NullStatus() call_func.return_value = done_status fake_eiger.enable_roi_mode = MagicMock(name="enable_roi_mode") diff --git a/tests/devices/unit_tests/test_focusing_mirror.py b/tests/devices/unit_tests/test_focusing_mirror.py index c10180e7b4..32e749b726 100644 --- a/tests/devices/unit_tests/test_focusing_mirror.py +++ b/tests/devices/unit_tests/test_focusing_mirror.py @@ -116,7 +116,7 @@ def test_mirror_set_voltage_sets_and_waits_happy_path( RE: RunEngine, vfm_mirror_voltages_with_set: VFMMirrorVoltages, ): - async def completed(): + def completed(): pass mock_put = get_mock_put( @@ -142,7 +142,7 @@ def test_mirror_set_voltage_sets_and_waits_happy_path_spin_while_waiting_for_sle RE: RunEngine, vfm_mirror_voltages_with_set_multiple_spins: VFMMirrorVoltages, ): - async def completed(): + def completed(): pass mock_put = get_mock_put( diff --git a/tests/devices/unit_tests/test_hutch_shutter.py b/tests/devices/unit_tests/test_hutch_shutter.py index 2dc6b19d6c..3b129fdd94 100644 --- a/tests/devices/unit_tests/test_hutch_shutter.py +++ b/tests/devices/unit_tests/test_hutch_shutter.py @@ -1,4 +1,3 @@ -from typing import List from unittest.mock import call import bluesky.plan_stubs as bps @@ -61,7 +60,7 @@ async def test_shutter_raises_error_on_set_if_hutch_not_interlocked( ) async def test_shutter_operations( demand: ShutterDemand, - expected_calls: List, + expected_calls: list, expected_state: ShutterState, fake_shutter: HutchShutter, RE, diff --git a/tests/devices/unit_tests/test_odin.py b/tests/devices/unit_tests/test_odin.py index ac46588bc0..8170c20351 100644 --- a/tests/devices/unit_tests/test_odin.py +++ b/tests/devices/unit_tests/test_odin.py @@ -38,7 +38,7 @@ def test_check_odin_state( if is_initialised: assert fake_odin.check_odin_state() == expected_state else: - with pytest.raises(Exception): + with pytest.raises(RuntimeError): fake_odin.check_odin_state() diff --git a/tests/devices/unit_tests/test_slits.py b/tests/devices/unit_tests/test_slits.py index ea158e71e6..593733d368 100644 --- a/tests/devices/unit_tests/test_slits.py +++ b/tests/devices/unit_tests/test_slits.py @@ -1,4 +1,5 @@ -from typing import Any, Mapping +from collections.abc import Mapping +from typing import Any from unittest.mock import ANY import pytest diff --git a/tests/devices/unit_tests/test_smargon.py b/tests/devices/unit_tests/test_smargon.py index d65aca29e1..e0e3cbcc93 100644 --- a/tests/devices/unit_tests/test_smargon.py +++ b/tests/devices/unit_tests/test_smargon.py @@ -1,5 +1,3 @@ -from typing import Tuple - import pytest from bluesky import RunEngine from bluesky import plan_stubs as bps @@ -15,7 +13,7 @@ async def smargon() -> Smargon: return smargon -def set_smargon_pos(smargon: Smargon, pos: Tuple[float, float, float]): +def set_smargon_pos(smargon: Smargon, pos: tuple[float, float, float]): set_mock_value(smargon.x.user_readback, pos[0]) set_mock_value(smargon.y.user_readback, pos[1]) set_mock_value(smargon.z.user_readback, pos[2]) diff --git a/tests/devices/unit_tests/test_synchrotron.py b/tests/devices/unit_tests/test_synchrotron.py index 73d2034b00..197fb52a22 100644 --- a/tests/devices/unit_tests/test_synchrotron.py +++ b/tests/devices/unit_tests/test_synchrotron.py @@ -1,5 +1,6 @@ import json -from typing import Any, Awaitable, Callable, Dict, List +from collections.abc import Awaitable, Callable +from typing import Any import bluesky.plan_stubs as bps import pytest @@ -21,7 +22,7 @@ TYPE = "Synchrotron X-ray Source" NUMBER = "number" STRING = "string" -EMPTY_LIST: List = [] +EMPTY_LIST: list = [] READINGS = [CURRENT, ENERGY] CONFIGS = [PROBE, MODE.value, TYPE] @@ -164,16 +165,16 @@ async def test_synchrotron_count(RE: RunEngine, sim_synchrotron: Synchrotron): assert len(docs) == 4 assert sim_synchrotron.name in docs[1]["configuration"] cfg_data_keys = docs[1]["configuration"][sim_synchrotron.name]["data_keys"] - for sig, addr in zip(CONFIG_SIGNALS, CONFIG_ADDRESSES): + for sig, addr in zip(CONFIG_SIGNALS, CONFIG_ADDRESSES, strict=False): assert sig in cfg_data_keys assert cfg_data_keys[sig][DESCRIPTION_FIELDS[0]] == addr assert cfg_data_keys[sig][DESCRIPTION_FIELDS[1]] == STRING assert cfg_data_keys[sig][DESCRIPTION_FIELDS[2]] == EMPTY_LIST cfg_data = docs[1]["configuration"][sim_synchrotron.name]["data"] - for sig, value in zip(CONFIG_SIGNALS, CONFIGS): + for sig, value in zip(CONFIG_SIGNALS, CONFIGS, strict=False): assert cfg_data[sig] == value data_keys = docs[1]["data_keys"] - for sig, addr in zip(READ_SIGNALS, READING_ADDRESSES): + for sig, addr in zip(READ_SIGNALS, READING_ADDRESSES, strict=False): assert sig in data_keys assert data_keys[sig][DESCRIPTION_FIELDS[0]] == addr assert data_keys[sig][DESCRIPTION_FIELDS[1]] == NUMBER @@ -181,15 +182,15 @@ async def test_synchrotron_count(RE: RunEngine, sim_synchrotron: Synchrotron): data = docs[2]["data"] assert len(data) == len(READ_SIGNALS) - for sig, value in zip(READ_SIGNALS, READINGS): + for sig, value in zip(READ_SIGNALS, READINGS, strict=False): assert sig in data assert data[sig] == value async def verify( - func: Callable[[], Awaitable[Dict[str, Any]]], - signals: List[str], - fields: List[str], + func: Callable[[], Awaitable[dict[str, Any]]], + signals: list[str], + fields: list[str], expectation: str, ): expected = json.loads(expectation) diff --git a/tests/devices/unit_tests/test_thawer.py b/tests/devices/unit_tests/test_thawer.py index da145fe7c2..80335205fc 100644 --- a/tests/devices/unit_tests/test_thawer.py +++ b/tests/devices/unit_tests/test_thawer.py @@ -50,14 +50,18 @@ async def test_given_thawing_already_triggered_when_triggered_again_then_fails( mock_sleep: AsyncMock, thawer: Thawer, ): - patch_sleep(mock_sleep) + release_sleep = patch_sleep(mock_sleep) - thawer.thaw_for_time_s.set(10) + status = thawer.thaw_for_time_s.set(10) - await asyncio.sleep(0.01) + try: + await asyncio.sleep(0.01) - with pytest.raises(ThawingException): - await thawer.thaw_for_time_s.set(10) + with pytest.raises(ThawingException): + await thawer.thaw_for_time_s.set(10) + finally: + release_sleep.set() + await status @patch("dodal.devices.thawer.sleep") diff --git a/tests/devices/unit_tests/test_undulator_dcm.py b/tests/devices/unit_tests/test_undulator_dcm.py index e50b304bda..6b9fd1a1b9 100644 --- a/tests/devices/unit_tests/test_undulator_dcm.py +++ b/tests/devices/unit_tests/test_undulator_dcm.py @@ -16,6 +16,7 @@ UndulatorDCM, _get_closest_gap_for_energy, ) +from dodal.log import LOGGER from ...conftest import MOCK_DAQ_CONFIG_PATH @@ -24,6 +25,16 @@ ) +@pytest.fixture(autouse=True) +def flush_event_loop_on_finish(event_loop): + # wait for the test function to complete + yield None + + if pending_tasks := asyncio.all_tasks(event_loop): + LOGGER.warning(f"Waiting for pending tasks to complete {pending_tasks}") + event_loop.run_until_complete(asyncio.gather(*pending_tasks)) + + @pytest.fixture async def fake_undulator_dcm() -> UndulatorDCM: async with DeviceCollector(mock=True): diff --git a/tests/devices/unit_tests/test_utils.py b/tests/devices/unit_tests/test_utils.py index a188fd4682..f4739a8aac 100644 --- a/tests/devices/unit_tests/test_utils.py +++ b/tests/devices/unit_tests/test_utils.py @@ -23,9 +23,11 @@ def discard_status(status: Status): def reset_logs(): - for handler in LOGGER.handlers: + old_handlers = list(LOGGER.handlers) + for handler in old_handlers: handler.close() - LOGGER.handlers = [] + LOGGER.removeHandler(handler) + mock_graylog_handler_class = MagicMock(spec=GELFTCPHandler) mock_graylog_handler_class.return_value.level = logging.DEBUG with patch("dodal.log.GELFTCPHandler", mock_graylog_handler_class): @@ -60,7 +62,6 @@ def test_full_status_gives_error_if_intermediate_status_fails(): def test_check_call_back_error_gives_correct_error(): LOGGER.error = MagicMock() - returned_status = Status(done=True, success=True) with pytest.raises(StatusException): returned_status = run_functions_without_blocking([get_bad_status]) returned_status.wait(0.1) @@ -86,7 +87,6 @@ def get_good_status(): return status dummy_func = MagicMock(return_value=3) - returned_status = Status(done=True, success=True) returned_status = run_functions_without_blocking( [lambda: get_good_status(), dummy_func], timeout=0.05 ) @@ -126,7 +126,7 @@ async def test_given_disp_high_when_set_SetWhenEnabled_then_proc_not_set_until_d def test_if_one_status_errors_then_later_functions_not_called(): - tester = MagicMock(return_value=Status(done=True, success=True)) + tester = MagicMock(return_value=NullStatus()) status_calls = [ NullStatus, NullStatus, @@ -144,7 +144,7 @@ def test_if_one_status_errors_then_later_functions_not_called(): def test_if_one_status_pending_then_later_functions_not_called(): - tester = MagicMock(return_value=Status(done=True, success=True)) + tester = MagicMock(return_value=NullStatus()) pending_status = Status() status_calls = [ NullStatus, diff --git a/tests/devices/unit_tests/test_webcam.py b/tests/devices/unit_tests/test_webcam.py index 0df2534f8f..bf8f28c5bf 100644 --- a/tests/devices/unit_tests/test_webcam.py +++ b/tests/devices/unit_tests/test_webcam.py @@ -37,6 +37,9 @@ async def test_given_filename_and_directory_when_trigger_and_read_then_returns_e webcam: Webcam, ): mock_get.return_value.__aenter__.return_value = AsyncMock() + mock_get.return_value.__aenter__.return_value = (mock_response := AsyncMock()) + # raise_for_status should be MagicMock() not AsyncMock() + mock_response.raise_for_status = MagicMock() await webcam.filename.set(filename) await webcam.directory.set(directory) await webcam.trigger() @@ -50,6 +53,8 @@ async def test_given_data_returned_from_url_when_trigger_then_data_written( mock_get: MagicMock, mock_aiofiles, webcam: Webcam ): mock_get.return_value.__aenter__.return_value = (mock_response := AsyncMock()) + # raise_for_status should be MagicMock() not AsyncMock() + mock_response.raise_for_status = MagicMock() mock_response.read.return_value = (test_web_data := "TEST") mock_open = mock_aiofiles.open mock_open.return_value.__aenter__.return_value = (mock_file := AsyncMock()) diff --git a/tests/devices/unit_tests/test_xspress3.py b/tests/devices/unit_tests/test_xspress3.py index 0ff442507f..f8cd341723 100644 --- a/tests/devices/unit_tests/test_xspress3.py +++ b/tests/devices/unit_tests/test_xspress3.py @@ -3,6 +3,7 @@ import bluesky.plan_stubs as bps import pytest +from bluesky import FailedStatus from bluesky.run_engine import RunEngine from ophyd_async.core import ( DeviceCollector, @@ -69,7 +70,7 @@ async def test_stage_fail_on_detector_not_busy_state( mock_xspress3mini.timeout = 0.1 with pytest.raises(TimeoutError): await mock_xspress3mini.stage() - with pytest.raises(Exception): + with pytest.raises(FailedStatus): RE(bps.stage(mock_xspress3mini, wait=True)) await asyncio.sleep(0.2) assert 2 == get_mock_put(mock_xspress3mini.trigger_mode).call_count @@ -85,7 +86,7 @@ async def test_stage_fail_to_acquire_timeout( mock_xspress3mini.timeout = 0.1 with pytest.raises(TimeoutError): await mock_xspress3mini.stage() - with pytest.raises(Exception): + with pytest.raises(FailedStatus): RE(bps.stage(mock_xspress3mini, wait=True)) await asyncio.sleep(0.2) assert 2 == get_mock_put(mock_xspress3mini.trigger_mode).call_count diff --git a/tests/devices/unit_tests/test_zebra.py b/tests/devices/unit_tests/test_zebra.py index 38b13c6016..2db870d645 100644 --- a/tests/devices/unit_tests/test_zebra.py +++ b/tests/devices/unit_tests/test_zebra.py @@ -111,7 +111,7 @@ async def run_configurer_test( else: configurer.apply_or_gate_config(gate_num, config) - for pv, value in zip(mock_pvs, expected_pv_values): + for pv, value in zip(mock_pvs, expected_pv_values, strict=False): pv.set.assert_called_once_with(value) diff --git a/tests/devices/unit_tests/test_zocalo_interaction.py b/tests/devices/unit_tests/test_zocalo_interaction.py index 688a3d751d..3ddd042c3d 100644 --- a/tests/devices/unit_tests/test_zocalo_interaction.py +++ b/tests/devices/unit_tests/test_zocalo_interaction.py @@ -1,7 +1,7 @@ import getpass import socket +from collections.abc import Callable from functools import partial -from typing import Callable, Dict from unittest.mock import MagicMock, patch from pytest import mark, raises @@ -64,9 +64,9 @@ def normally(function_to_run, mock_transport): def with_exception(function_to_run, mock_transport): - mock_transport.send.side_effect = Exception() + mock_transport.send.side_effect = AssertionError("Test exception") - with raises(Exception): + with raises(AssertionError): function_to_run() @@ -83,7 +83,7 @@ def with_exception(function_to_run, mock_transport): ), ], ) -def test_run_start(function_wrapper: Callable, expected_message: Dict): +def test_run_start(function_wrapper: Callable, expected_message: dict): """ Args: function_wrapper (Callable): A wrapper used to test for expected exceptions @@ -102,7 +102,7 @@ def test_run_start(function_wrapper: Callable, expected_message: Dict): (with_exception, EXPECTED_RUN_END_MESSAGE), ], ) -def test__run_start_and_end(function_wrapper: Callable, expected_message: Dict): +def test__run_start_and_end(function_wrapper: Callable, expected_message: dict): """ Args: function_wrapper (Callable): A wrapper used to test for expected exceptions diff --git a/tests/fake_zocalo/__main__.py b/tests/fake_zocalo/__main__.py index f24d20f40d..65116e181d 100644 --- a/tests/fake_zocalo/__main__.py +++ b/tests/fake_zocalo/__main__.py @@ -3,7 +3,7 @@ import time from collections import defaultdict from pathlib import Path -from typing import Any, Tuple +from typing import Any import ispyb.sqlalchemy import pika @@ -56,7 +56,7 @@ def load_configuration_file(filename): return conf -def get_dcgid_and_prefix(dcid: int, Session) -> Tuple[int, str]: +def get_dcgid_and_prefix(dcid: int, Session) -> tuple[int, str]: try: with Session() as session: query = ( diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000000..0653ebd9f0 --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,12 @@ +import subprocess +import sys + +import pytest + +from dodal import __version__ + + +@pytest.mark.skip_in_pycharm(reason="subprocess returns tty escape sequences") +def test_cli_version(): + cmd = [sys.executable, "-m", "dodal", "--version"] + assert subprocess.check_output(cmd).decode().strip() == __version__ diff --git a/tests/unit_tests/test_log.py b/tests/unit_tests/test_log.py index 038485d7a4..7304c76f28 100644 --- a/tests/unit_tests/test_log.py +++ b/tests/unit_tests/test_log.py @@ -1,5 +1,6 @@ import logging from pathlib import Path, PosixPath +from typing import cast from unittest.mock import MagicMock, call, patch import pytest @@ -13,6 +14,7 @@ LOGGER, BeamlineFilter, CircularMemoryHandler, + DodalLogHandlers, clear_all_loggers_and_handlers, do_default_logging_setup, get_logging_file_path, @@ -30,7 +32,10 @@ def mock_logger(): @pytest.fixture() def dodal_logger_for_tests(): logger = logging.getLogger("Dodal") - logger.handlers.clear() + for handler in list(logger.handlers): + logger.removeHandler(handler) + handler.close() + return logger @@ -67,8 +72,11 @@ def test_dev_mode_sets_correct_graypy_handler( mock_logger: MagicMock, ): mock_GELFTCPHandler.return_value.level = logging.INFO - set_up_all_logging_handlers(mock_logger, Path("tmp/dev"), "dodal.log", True, 10000) + handler_config = set_up_all_logging_handlers( + mock_logger, Path("tmp/dev"), "dodal.log", True, 10000 + ) mock_GELFTCPHandler.assert_called_once_with("localhost", 5555) + _close_all_handlers(handler_config) @patch("dodal.log.GELFTCPHandler", autospec=True) @@ -77,10 +85,13 @@ def test_prod_mode_sets_correct_graypy_handler( mock_logger: MagicMock, ): mock_GELFTCPHandler.return_value.level = logging.INFO - set_up_all_logging_handlers(mock_logger, Path("tmp/dev"), "dodal.log", False, 10000) + handler_config = set_up_all_logging_handlers( + mock_logger, Path("tmp/dev"), "dodal.log", False, 10000 + ) mock_GELFTCPHandler.assert_called_once_with( "graylog-log-target.diamond.ac.uk", 12231 ) + _close_all_handlers(handler_config) @patch("dodal.log.GELFTCPHandler", autospec=True) @@ -96,7 +107,7 @@ def test_no_env_variable_sets_correct_file_handler( mock_file_handler.return_value.level = logging.INFO mock_GELFTCPHandler.return_value.level = logging.INFO clear_all_loggers_and_handlers() - _ = set_up_all_logging_handlers( + handler_config = set_up_all_logging_handlers( LOGGER, get_logging_file_path(), "dodal.log", True, ERROR_LOG_BUFFER_LINES ) integrate_bluesky_and_ophyd_logging(LOGGER) @@ -107,6 +118,7 @@ def test_no_env_variable_sets_correct_file_handler( ] mock_file_handler.assert_has_calls(expected_calls, any_order=True) + _close_all_handlers(handler_config) def test_beamline_filter_adds_dev_if_no_beamline(): @@ -248,3 +260,8 @@ def __init__(self, name: str = test_device_name) -> None: assert f"[{test_signal_name}]" in stream_handler.stream.write.call_args.args[0] device.log.debug("test message") assert f"[{test_device_name}]" in stream_handler.stream.write.call_args.args[0] + + +def _close_all_handlers(handler_config: DodalLogHandlers): + for handler in handler_config.values(): + cast(logging.Handler, handler).close() From 8ae03430ebba75754f37ec0011021ed52768d30f Mon Sep 17 00:00:00 2001 From: rtuck99 Date: Tue, 23 Jul 2024 10:32:30 +0100 Subject: [PATCH 2/2] (DiamondLightSource/hyperion#1169) remove test feature flags from dodal (#682) --- src/dodal/devices/eiger.py | 15 +++++---------- tests/devices/unit_tests/test_eiger.py | 4 +--- 2 files changed, 6 insertions(+), 13 deletions(-) diff --git a/src/dodal/devices/eiger.py b/src/dodal/devices/eiger.py index ade4ad468f..aae546e714 100644 --- a/src/dodal/devices/eiger.py +++ b/src/dodal/devices/eiger.py @@ -12,10 +12,6 @@ FREE_RUN_MAX_IMAGES = 1000000 -# TODO present for testing purposes, remove -TEST_1169_FIX = True -TEST_1169_INJECT = False - class InternalEigerTriggerMode(Enum): INTERNAL_SERIES = 0 @@ -101,7 +97,7 @@ def wait_on_arming_if_started(self): def stage(self): self.wait_on_arming_if_started() - if TEST_1169_INJECT or not self.is_armed(): + if not self.is_armed(): LOGGER.info("Eiger not armed, arming") self.async_stage().wait(timeout=self.ARMING_TIMEOUT) @@ -341,6 +337,10 @@ def do_arming_chain(self) -> Status: functions_to_do_arm.append(self.enable_roi_mode) arming_sequence_funcs = [ + # If a beam dump occurs after arming the eiger but prior to eiger staging, + # the odin may timeout which will cause the arming sequence to be retried; + # if this previously completed successfully we must reset the odin first + self.odin.stop, lambda: self.change_dev_shm(detector_params.enable_dev_shm), lambda: self.set_detector_threshold(detector_params.expected_energy_ev), self.set_cam_pvs, @@ -354,11 +354,6 @@ def do_arming_chain(self) -> Status: self._wait_fan_ready, self._finish_arm, ] - if TEST_1169_FIX: - # If a beam dump occurs after arming the eiger but prior to eiger staging, - # the odin may timeout which will cause the arming sequence to be retried; - # if this previously completed successfully we must reset the odin first - arming_sequence_funcs.insert(0, self.odin.stop) functions_to_do_arm.extend(arming_sequence_funcs) diff --git a/tests/devices/unit_tests/test_eiger.py b/tests/devices/unit_tests/test_eiger.py index 4cdcd6a611..a8e3cbdc21 100644 --- a/tests/devices/unit_tests/test_eiger.py +++ b/tests/devices/unit_tests/test_eiger.py @@ -9,7 +9,7 @@ from dodal.devices.detector import DetectorParams, TriggerMode from dodal.devices.detector.det_dim_constants import EIGER2_X_16M_SIZE -from dodal.devices.eiger import TEST_1169_FIX, EigerDetector +from dodal.devices.eiger import EigerDetector from dodal.devices.status import await_value from dodal.devices.util.epics_util import run_functions_without_blocking from dodal.log import LOGGER @@ -31,8 +31,6 @@ TEST_USE_ROI_MODE = False TEST_DET_DIST_TO_BEAM_CONVERTER_PATH = "tests/devices/unit_tests/test_lookup_table.txt" -TEST_1169_FIX = True - class StatusException(Exception): pass