From d56c2f39f06de31afe83a9845fe243e4a67b9d89 Mon Sep 17 00:00:00 2001 From: Eva Date: Fri, 18 Oct 2024 08:55:41 +0100 Subject: [PATCH 1/7] adopted copier template --- .copier-answers.yml | 14 ++ .devcontainer/devcontainer.json | 54 +++++ .github/CONTRIBUTING.rst | 2 +- .../actions/install_requirements/action.yml | 16 +- .github/dependabot.yml | 4 + .github/pages/index.html | 2 +- .github/pages/make_switcher.py | 4 +- .github/workflows/code.yml | 139 +++++++++-- .github/workflows/docs.yml | 5 +- .github/workflows/linkcheck.yml | 3 +- .gitignore | 19 +- .pre-commit-config.yaml | 13 +- .vscode/extensions.json | 10 + .vscode/launch.json | 25 ++ .vscode/settings.json | 22 ++ .vscode/tasks.json | 16 ++ LICENSE | 229 +++++++++++++++--- README.rst | 32 ++- catalog-info.yaml | 10 + docs/conf.py | 145 ++++------- .../0001-record-architecture-decisions.rst | 2 +- .../0002-switched-to-pip-skeleton.rst | 35 +++ docs/developer/how-to/build-docs.rst | 4 +- docs/developer/how-to/lint.rst | 14 +- docs/developer/how-to/test-container.rst | 25 ++ docs/developer/how-to/update-tools.rst | 6 +- docs/developer/index.rst | 1 + docs/developer/reference/standards.rst | 117 ++------- docs/developer/tutorials/dev-install.rst | 35 ++- docs/images/dls-favicon.ico | Bin 0 -> 99678 bytes docs/images/dls-logo.svg | 11 + docs/index.rst | 32 +-- docs/user/explanations/docs-structure.rst | 2 +- docs/user/how-to/run-container.rst | 15 ++ docs/user/index.rst | 6 +- docs/user/reference/api.rst | 106 +------- docs/user/tutorials/installation.rst | 2 +- pyproject.toml | 95 +++----- src/event_model/__init__.py | 11 + src/event_model/__main__.py | 16 ++ tests/test_cli.py | 9 + 41 files changed, 794 insertions(+), 514 deletions(-) create mode 100644 .copier-answers.yml create mode 100644 .devcontainer/devcontainer.json create mode 100644 .vscode/extensions.json create mode 100644 .vscode/launch.json create mode 100644 .vscode/settings.json create mode 100644 .vscode/tasks.json create mode 100644 catalog-info.yaml create mode 100644 docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst create mode 100644 docs/developer/how-to/test-container.rst create mode 100644 docs/images/dls-favicon.ico create mode 100644 docs/images/dls-logo.svg create mode 100644 docs/user/how-to/run-container.rst create mode 100644 src/event_model/__init__.py create mode 100644 src/event_model/__main__.py create mode 100644 tests/test_cli.py diff --git a/.copier-answers.yml b/.copier-answers.yml new file mode 100644 index 00000000..9b32e88e --- /dev/null +++ b/.copier-answers.yml @@ -0,0 +1,14 @@ +# Changes here will be overwritten by Copier +_commit: 1.0.0 +_src_path: gh:DiamondLightSource/python-copier-template +author_email: eva.lott@diamond.ac.uk +author_name: Eva Lott +component_owner: '' +description: '' +distribution_name: event-model +docker: false +docs_type: sphinx +git_platform: github.com +github_org: bluesky +package_name: event_model +repo_name: event-model diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 00000000..44de8d36 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,54 @@ +// For format details, see https://containers.dev/implementors/json_reference/ +{ + "name": "Python 3 Developer Container", + "build": { + "dockerfile": "../Dockerfile", + "target": "build", + // Only upgrade pip, we will install the project below + "args": { + "PIP_OPTIONS": "--upgrade pip" + } + }, + "remoteEnv": { + "DISPLAY": "${localEnv:DISPLAY}" + }, + // Add the URLs of features you want added when the container is built. + "features": { + "ghcr.io/devcontainers/features/common-utils:1": { + "username": "none", + "upgradePackages": false + } + }, + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/venv/bin/python" + }, + "customizations": { + "vscode": { + // Add the IDs of extensions you want installed when the container is created. + "extensions": [ + "ms-python.python", + "tamasfe.even-better-toml", + "redhat.vscode-yaml", + "ryanluker.vscode-coverage-gutters" + ] + } + }, + // Make sure the files we are mapping into the container exist on the host + "initializeCommand": "bash -c 'for i in $HOME/.inputrc; do [ -f $i ] || touch $i; done'", + "runArgs": [ + "--net=host", + "--security-opt=label=type:container_runtime_t" + ], + "mounts": [ + "source=${localEnv:HOME}/.ssh,target=/root/.ssh,type=bind", + "source=${localEnv:HOME}/.inputrc,target=/root/.inputrc,type=bind", + // map in home directory - not strictly necessary but useful + "source=${localEnv:HOME},target=${localEnv:HOME},type=bind,consistency=cached" + ], + // make the workspace folder the same inside and outside of the container + "workspaceMount": "source=${localWorkspaceFolder},target=${localWorkspaceFolder},type=bind", + "workspaceFolder": "${localWorkspaceFolder}", + // After the container is created, install the python project in editable form + "postCreateCommand": "pip install -e '.[dev]'" +} diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst index 3303ec01..f30c7e92 100644 --- a/.github/CONTRIBUTING.rst +++ b/.github/CONTRIBUTING.rst @@ -32,4 +32,4 @@ The `Developer Guide`_ contains information on setting up a development environment, running the tests and what standards the code and documentation should follow. -.. _Developer Guide: https://blueskyproject.io/event-model/main/developer/how-to/contribute.html +.. _Developer Guide: https://bluesky.github.io/event-model/main/developer/how-to/contribute.html diff --git a/.github/actions/install_requirements/action.yml b/.github/actions/install_requirements/action.yml index cd5e50f5..79d1a71e 100644 --- a/.github/actions/install_requirements/action.yml +++ b/.github/actions/install_requirements/action.yml @@ -7,19 +7,19 @@ inputs: install_options: description: Parameters to pass to pip install required: true + artifact_name: + description: A user friendly name to give the produced artifacts + required: true python_version: description: Python version to install default: "3.x" - jsonschema_version: - description: version of the jsonschema pip package to install - default: 4 runs: using: composite steps: - name: Setup python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ inputs.python_version }} @@ -28,7 +28,6 @@ runs: touch ${{ inputs.requirements_file }} # -c uses requirements.txt as constraints, see 'Validate requirements file' pip install -c ${{ inputs.requirements_file }} ${{ inputs.install_options }} - pip install --upgrade "jsonschema==${{inputs.jsonschema_version}}.*" shell: bash - name: Create lockfile @@ -36,13 +35,13 @@ runs: mkdir -p lockfiles pip freeze --exclude-editable > lockfiles/${{ inputs.requirements_file }} # delete the self referencing line and make sure it isn't blank - sed -i '/file:/d' lockfiles/${{ inputs.requirements_file }} + sed -i'' -e '/file:/d' lockfiles/${{ inputs.requirements_file }} shell: bash - name: Upload lockfiles - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4.0.0 with: - name: lockfiles + name: lockfiles-${{ inputs.python_version }}-${{ inputs.artifact_name }}-${{ github.sha }} path: lockfiles # This eliminates the class of problems where the requirements being given no @@ -59,4 +58,3 @@ runs: fi fi shell: bash - diff --git a/.github/dependabot.yml b/.github/dependabot.yml index fb7c6ee6..2d1af873 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -9,6 +9,10 @@ updates: directory: "/" schedule: interval: "weekly" + groups: + github-artifacts: + patterns: + - actions/*-artifact - package-ecosystem: "pip" directory: "/" diff --git a/.github/pages/index.html b/.github/pages/index.html index 80f0a009..c495f39f 100644 --- a/.github/pages/index.html +++ b/.github/pages/index.html @@ -8,4 +8,4 @@ - \ No newline at end of file + diff --git a/.github/pages/make_switcher.py b/.github/pages/make_switcher.py index 39c12772..ae227ab7 100755 --- a/.github/pages/make_switcher.py +++ b/.github/pages/make_switcher.py @@ -59,12 +59,12 @@ def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[st def write_json(path: Path, repository: str, versions: str): org, repo_name = repository.split("/") struct = [ - dict(version=version, url=f"https://{org}.github.io/{repo_name}/{version}/") + {"version": version, "url": f"https://{org}.github.io/{repo_name}/{version}/"} for version in versions ] text = json.dumps(struct, indent=2) print(f"JSON switcher:\n{text}") - path.write_text(text) + path.write_text(text, encoding="utf-8") def main(args=None): diff --git a/.github/workflows/code.yml b/.github/workflows/code.yml index 19fb1feb..364ff43d 100644 --- a/.github/workflows/code.yml +++ b/.github/workflows/code.yml @@ -5,7 +5,8 @@ on: pull_request: env: # The target python version, which must match the Dockerfile version - CONTAINER_PYTHON: "3.12" + CONTAINER_PYTHON: "3.11" + DIST_WHEEL_PATH: dist-${{ github.sha }} jobs: lint: @@ -22,25 +23,24 @@ jobs: with: requirements_file: requirements-dev-3.x.txt install_options: -e .[dev] + artifact_name: lint - name: Lint run: tox -e pre-commit,mypy - + test: if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository strategy: fail-fast: false matrix: os: ["ubuntu-latest"] # can add windows-latest, macos-latest - python: ["3.9", "3.10", "3.11", "3.12"] - + python: ["3.8", "3.9", "3.10", "3.11"] install: ["-e .[dev]"] # Make one version be non-editable to test both paths of version code include: - os: "ubuntu-latest" - python: "3.8" + python: "3.7" install: ".[dev]" - jsonschema: 4 runs-on: ${{ matrix.os }} env: @@ -58,14 +58,15 @@ jobs: uses: ./.github/actions/install_requirements with: python_version: ${{ matrix.python }} - requirements_file: requirements-test-${{ matrix.os }}-${{matrix.python }}-${{ matrix.jsonschema }}.txt + requirements_file: requirements-test-${{ matrix.os }}-${{ matrix.python }}.txt install_options: ${{ matrix.install }} + artifact_name: tests - name: List dependency tree run: pipdeptree - name: Run tests - run: pytest + run: tox -e pytest - name: Upload coverage to Codecov uses: codecov/codecov-action@v3 @@ -90,9 +91,9 @@ jobs: pipx run build - name: Upload sdist and wheel as artifacts - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v4.0.0 with: - name: dist + name: ${{ env.DIST_WHEEL_PATH }} path: dist - name: Check for packaging errors @@ -101,38 +102,136 @@ jobs: - name: Install python packages uses: ./.github/actions/install_requirements with: - python_version: ${{ env.CONTAINER_PYTHON }} + python_version: ${{env.CONTAINER_PYTHON}} requirements_file: requirements.txt install_options: dist/*.whl + artifact_name: dist - name: Test module --version works using the installed wheel - run: python -m event_model --version + # If more than one module in src/ replace with module name to test + run: python -m $(ls --hide='*.egg-info' src | head -1) --version + + container: + needs: [lint, dist, test] + runs-on: ubuntu-latest + + permissions: + contents: read + packages: write + + env: + TEST_TAG: "testing" + + steps: + - name: Checkout + uses: actions/checkout@v4 + # image names must be all lower case + - name: Generate image repo name + run: echo IMAGE_REPOSITORY=ghcr.io/$(tr '[:upper:]' '[:lower:]' <<< "${{ github.repository }}") >> $GITHUB_ENV + + - name: Set lockfile location in environment + run: | + echo "DIST_LOCKFILE_PATH=lockfiles-${{ env.CONTAINER_PYTHON }}-dist-${{ github.sha }}" >> $GITHUB_ENV + + - name: Download wheel and lockfiles + uses: actions/download-artifact@v4.1.0 + with: + path: artifacts/ + pattern: "*dist*" + + - name: Log in to GitHub Docker Registry + if: github.event_name != 'pull_request' + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v3 + + - name: Build and export to Docker local cache + uses: docker/build-push-action@v5 + with: + # Note build-args, context, file, and target must all match between this + # step and the later build-push-action, otherwise the second build-push-action + # will attempt to build the image again + build-args: | + PIP_OPTIONS=-r ${{ env.DIST_LOCKFILE_PATH }}/requirements.txt ${{ env.DIST_WHEEL_PATH }}/*.whl + context: artifacts/ + file: ./Dockerfile + target: runtime + load: true + tags: ${{ env.TEST_TAG }} + # If you have a long docker build (2+ minutes), uncomment the + # following to turn on caching. For short build times this + # makes it a little slower + #cache-from: type=gha + #cache-to: type=gha,mode=max + + - name: Test cli works in cached runtime image + run: docker run docker.io/library/${{ env.TEST_TAG }} --version + + - name: Create tags for publishing image + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.IMAGE_REPOSITORY }} + tags: | + type=ref,event=tag + type=raw,value=latest, enable=${{ github.ref_type == 'tag' }} + # type=edge,branch=main + # Add line above to generate image for every commit to given branch, + # and uncomment the end of if clause in next step + + - name: Push cached image to container registry + if: github.ref_type == 'tag' # || github.ref_name == 'main' + uses: docker/build-push-action@v5 + # This does not build the image again, it will find the image in the + # Docker cache and publish it + with: + # Note build-args, context, file, and target must all match between this + # step and the previous build-push-action, otherwise this step will + # attempt to build the image again + build-args: | + PIP_OPTIONS=-r ${{ env.DIST_LOCKFILE_PATH }}/requirements.txt ${{ env.DIST_WHEEL_PATH }}/*.whl + context: artifacts/ + file: ./Dockerfile + target: runtime + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} release: # upload to PyPI and make a release on every tag needs: [lint, dist, test] - if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags') }} + if: ${{ github.event_name == 'push' && github.ref_type == 'tag' }} runs-on: ubuntu-latest env: - HAS_PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN != '' }} + HAS_PYPI_TOKEN: ${{ secrets.PYPI_TOKEN != '' }} steps: - - uses: actions/download-artifact@v4 + - name: Download wheel and lockfiles + uses: actions/download-artifact@v4.1.0 + with: + path: artifacts/ + pattern: "*dist*" - name: Fixup blank lockfiles # Github release artifacts can't be blank - run: for f in lockfiles/*; do [ -s $f ] || echo '# No requirements' >> $f; done + run: for f in ${{ env.DIST_LOCKFILE_PATH }}/*; do [ -s $f ] || echo '# No requirements' >> $f; done - name: Github Release # We pin to the SHA, not the tag, for security reasons. # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions - uses: softprops/action-gh-release@d99959edae48b5ffffd7b00da66dcdb0a33a52ee # v0.1.15 + uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15 with: prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} files: | - dist/* - lockfiles/* + ${{ env.DIST_WHEEL_PATH }}/* + ${{ env.DIST_LOCKFILE_PATH }}/* generate_release_notes: true env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -141,4 +240,4 @@ jobs: if: ${{ env.HAS_PYPI_TOKEN }} uses: pypa/gh-action-pypi-publish@release/v1 with: - password: ${{ secrets.PYPI_API_TOKEN }} + password: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index f170cbec..3c29ff94 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -29,6 +29,7 @@ jobs: with: requirements_file: requirements-dev-3.x.txt install_options: -e .[dev] + artifact_name: docs - name: Build docs run: tox -e docs @@ -46,8 +47,8 @@ jobs: if: github.event_name == 'push' && github.actor != 'dependabot[bot]' # We pin to the SHA, not the tag, for security reasons. # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions - uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0 + uses: peaceiris/actions-gh-pages@64b46b4226a4a12da2239ba3ea5aa73e3163c75b # v3.9.1 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: .github/pages - keep_files: true \ No newline at end of file + keep_files: true diff --git a/.github/workflows/linkcheck.yml b/.github/workflows/linkcheck.yml index 1586799b..7f651a27 100644 --- a/.github/workflows/linkcheck.yml +++ b/.github/workflows/linkcheck.yml @@ -19,9 +19,10 @@ jobs: with: requirements_file: requirements-dev-3.x.txt install_options: -e .[dev] + artifact_name: link_check - name: Check links run: tox -e docs build -- -b linkcheck - name: Keepalive Workflow - uses: gautamkrishnar/keepalive-workflow@v2 \ No newline at end of file + uses: gautamkrishnar/keepalive-workflow@v1 diff --git a/.gitignore b/.gitignore index eb222117..a37be99b 100644 --- a/.gitignore +++ b/.gitignore @@ -23,9 +23,6 @@ var/ *.egg-info/ .installed.cfg *.egg -*.egg-info/ -.installed.cfg -*.egg **/_version.py # PyInstaller @@ -70,17 +67,5 @@ venv* # further build artifacts lockfiles/ -# Editor files -#mac -.DS_Store -*~ - -#vim -*.swp -*.swo - -#pycharm -.idea/* - -#vscode -.vscode/* \ No newline at end of file +# ruff cache +.ruff_cache/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 136b6801..5bc9f001 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,16 +15,9 @@ repos: entry: black --check --diff types: [python] - - id: flake8 - name: Run flake8 + - id: ruff + name: Run ruff stages: [commit] language: system - entry: flake8 - types: [python] - - - id: mypy - name: Run mypy - stages: [commit] - language: system - entry: mypy --install-types --non-interactive + entry: ruff types: [python] diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 00000000..a1227b34 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,10 @@ +{ + "recommendations": [ + "ms-vscode-remote.remote-containers", + "ms-python.python", + "tamasfe.even-better-toml", + "redhat.vscode-yaml", + "ryanluker.vscode-coverage-gutters", + "charliermarsh.Ruff" + ] +} diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..3cda7432 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,25 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Debug Unit Test", + "type": "python", + "request": "launch", + "justMyCode": false, + "program": "${file}", + "purpose": [ + "debug-test" + ], + "console": "integratedTerminal", + "env": { + // The default config in pyproject.toml's "[tool.pytest.ini_options]" adds coverage. + // Cannot have coverage and debugging at the same time. + // https://github.com/microsoft/vscode-python/issues/693 + "PYTEST_ADDOPTS": "--no-cov" + }, + } + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..72259a62 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,22 @@ +{ + "python.linting.pylintEnabled": false, + "python.linting.flake8Enabled": false, + "python.linting.mypyEnabled": true, + "python.linting.enabled": true, + "python.testing.pytestArgs": [ + "--cov=event_model", + "--cov-report", + "xml:cov.xml" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "python.formatting.provider": "black", + "python.languageServer": "Pylance", + "editor.formatOnSave": true, + "[python]": { + "editor.codeActionsOnSave": { + "source.fixAll.ruff": false, + "source.organizeImports.ruff": true + } + } +} diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 00000000..c999e864 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,16 @@ +// See https://go.microsoft.com/fwlink/?LinkId=733558 +// for the documentation about the tasks.json format +{ + "version": "2.0.0", + "tasks": [ + { + "type": "shell", + "label": "Tests, lint and docs", + "command": "tox -p", + "options": { + "cwd": "${workspaceRoot}" + }, + "problemMatcher": [], + } + ] +} diff --git a/LICENSE b/LICENSE index eda57e7d..8dada3ed 100644 --- a/LICENSE +++ b/LICENSE @@ -1,28 +1,201 @@ -BSD 3-Clause License - -Copyright (c) 2015, Brookhaven National Laboratory - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.rst b/README.rst index 01de2697..eba650fe 100644 --- a/README.rst +++ b/README.rst @@ -1,15 +1,32 @@ -Event Model -=========================== +event_model +============================================================================= |code_ci| |docs_ci| |coverage| |pypi_version| |license| +This is where you should write a short paragraph that describes what your module does, +how it does it, and why people should use it. + ============== ============================================================== PyPI ``pip install event-model`` Source code https://github.com/bluesky/event-model -Documentation https://blueskyproject.io/event-model +Documentation https://bluesky.github.io/event-model Releases https://github.com/bluesky/event-model/releases ============== ============================================================== +This is where you should put some images or code snippets that illustrate +some relevant examples. If it is a library then you might put some +introductory code here: + +.. code-block:: python + + from event_model import __version__ + + print(f"Hello event_model {__version__}") + +Or if it is a commandline tool then you might put some example commands here:: + + $ python -m event_model --version + .. |code_ci| image:: https://github.com/bluesky/event-model/actions/workflows/code.yml/badge.svg?branch=main :target: https://github.com/bluesky/event-model/actions/workflows/code.yml :alt: Code CI @@ -26,12 +43,13 @@ Releases https://github.com/bluesky/event-model/releases :target: https://pypi.org/project/event-model :alt: Latest PyPI version -.. |license| image:: https://img.shields.io/badge/License-BSD-blue.svg - :target: https://opensource.org/license/bsd-3-clause/ - :alt: BSD License +.. |license| image:: https://img.shields.io/badge/License-Apache%202.0-blue.svg + :target: https://opensource.org/licenses/Apache-2.0 + :alt: Apache License + .. Anything below this line is used when viewing README.rst and will be replaced when included in index.rst -See https://blueskyproject.io/event-model for more detailed documentation. +See https://bluesky.github.io/event-model for more detailed documentation. diff --git a/catalog-info.yaml b/catalog-info.yaml new file mode 100644 index 00000000..e4206bb5 --- /dev/null +++ b/catalog-info.yaml @@ -0,0 +1,10 @@ +apiVersion: backstage.io/v1alpha1 +kind: Component +metadata: + name: event-model + title: event-model + description: +spec: + type: documentation + lifecycle: experimental + owner: \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py index 7c501e44..a0b50a24 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -16,9 +16,6 @@ # General information about the project. project = "event-model" -copyright = "2019, Brookhaven National Lab" -author = "Brookhaven National Lab" - # The full version, including alpha/beta/rc tags. release = event_model.__version__ @@ -32,13 +29,6 @@ else: version = release -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = "en" - extensions = [ # Use this for generating API docs "sphinx.ext.autodoc", @@ -54,21 +44,11 @@ "sphinx_copybutton", # For the card element "sphinx_design", - "sphinx.ext.autosummary", - "sphinx.ext.mathjax", - "sphinx.ext.githubpages", - "matplotlib.sphinxext.plot_directive", - "sphinx_copybutton", - "IPython.sphinxext.ipython_directive", - "IPython.sphinxext.ipython_console_highlighting", ] -napoleon_google_docstring = False -napoleon_numpy_docstring = True - # If true, Sphinx will warn about all references where the target cannot # be found. -nitpicky = False +nitpicky = True # A list of (type, target) tuples (by default empty) that should be ignored when # generating warnings in "nitpicky mode". Note that type should include the @@ -113,34 +93,21 @@ # These patterns also affect html_static_path and html_extra_path exclude_patterns = ["_build"] -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = False - # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" -# Example configuration for intersphinx: refer to the Python standard library. # This means you can link things like `str` and `asyncio` to the relevant # docs in the python documentation. -intersphinx_mapping = { - "python": ("https://docs.python.org/3/", None), - "cachetools": ("https://cachetools.readthedocs.io/en/stable/", None), - "numpy": ("https://docs.scipy.org/doc/numpy/", None), - "scipy": ("https://docs.scipy.org/doc/scipy/reference/", None), - "pandas": ("https://pandas.pydata.org/pandas-docs/stable", None), - "matplotlib": ("https://matplotlib.org", None), - "jsonschema": ("https://python-jsonschema.readthedocs.io/en/stable/", None), -} +intersphinx_mapping = {"python": ("https://docs.python.org/3/", None)} # A dictionary of graphviz graph attributes for inheritance diagrams. -inheritance_graph_attrs = dict(rankdir="TB") +inheritance_graph_attrs = {"rankdir": "TB"} # Common links that should be available on every page rst_epilog = """ -.. _NSLS: https://www.bnl.gov/nsls2 +.. _Diamond Light Source: http://www.diamond.ac.uk .. _black: https://github.com/psf/black -.. _flake8: https://flake8.pycqa.org/en/latest/ -.. _isort: https://github.com/PyCQA/isort +.. _ruff: https://beta.ruff.rs/docs/ .. _mypy: http://mypy-lang.org/ .. _pre-commit: https://pre-commit.com/ """ @@ -153,52 +120,15 @@ copybutton_prompt_text = r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: " copybutton_prompt_is_regexp = True -# -- Options for manual page output --------------------------------------- - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "event-model.tex", - "Bluesky Event Model Documentation", - "Contributors", - "manual", - ), -] - - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, "event-model", "Bluesky Event Model Documentation", [author], 1) -] - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "event-model", - "Bluesky Event Model Documentation", - author, - "event-model", - "Data model used by the bluesky ecosystem", - "Miscellaneous", - ), -] - # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = "pydata_sphinx_theme" -github_repo = project +github_repo = "event-model" github_user = "bluesky" -switcher_json = f"https://blueskyproject.io/{github_repo}/switcher.json" +switcher_json = f"https://{github_user}.github.io/{github_repo}/switcher.json" switcher_exists = requests.get(switcher_json).ok if not switcher_exists: print( @@ -218,34 +148,41 @@ # Point 3 makes checking switcher difficult, because the updated skeleton # will fix the switcher at the end of the docs workflow, but never gets a chance # to complete as the docs build warns and fails. -html_theme_options = dict( - logo=dict( - text=project, - ), - use_edit_page_button=True, - github_url=f"https://github.com/{github_user}/{github_repo}", - icon_links=[ - dict( - name="PyPI", - url=f"https://pypi.org/project/{project}", - icon="fas fa-cube", - ) +html_theme_options = { + "logo": { + "text": project, + }, + "use_edit_page_button": True, + "github_url": f"https://github.com/{github_user}/{github_repo}", + "icon_links": [ + { + "name": "PyPI", + "url": f"https://pypi.org/project/{project}", + "icon": "fas fa-cube", + } + ], + "switcher": { + "json_url": switcher_json, + "version_match": version, + }, + "check_switcher": False, + "navbar_end": ["theme-switcher", "icon-links", "version-switcher"], + "external_links": [ + { + "name": "Release Notes", + "url": f"https://github.com/{github_user}/{github_repo}/releases", + } ], - switcher=dict( - json_url=switcher_json, - version_match=version, - ), - check_switcher=False, - navbar_end=["theme-switcher", "icon-links", "version-switcher"], -) + "navigation_with_keys": False, +} # A dictionary of values to pass into the template engine’s context for all pages -html_context = dict( - github_user=github_user, - github_repo=project, - github_version=version, - doc_path="docs", -) +html_context = { + "github_user": github_user, + "github_repo": project, + "github_version": version, + "doc_path": "docs", +} # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. html_show_sphinx = False @@ -254,5 +191,5 @@ html_show_copyright = False # Logo -html_logo = "images/bluesky-logo-dark.svg" -# html_favicon = "images/dls-favicon.ico" +html_logo = "images/dls-logo.svg" +html_favicon = "images/dls-favicon.ico" diff --git a/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst b/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst index 96217d58..b2d3d0fe 100644 --- a/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst +++ b/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst @@ -1,7 +1,7 @@ 1. Record architecture decisions ================================ -Date: 2023-02-18 +Date: 2022-02-18 Status ------ diff --git a/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst b/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst new file mode 100644 index 00000000..33d56981 --- /dev/null +++ b/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst @@ -0,0 +1,35 @@ +2. Adopt python_copier_template for project structure +===================================================== + +Date: 2022-02-18 + +Status +------ + +Accepted + +Context +------- + +We should use the following `python_copier_template `_. +The template will ensure consistency in developer +environments and package management. + +Decision +-------- + +We have switched to using the skeleton. + +Consequences +------------ + +This module will use a fixed set of tools as developed in python_copier_template +and can pull from this template to update the packaging to the latest techniques. + +As such, the developer environment may have changed, the following could be +different: + +- linting +- formatting +- pip venv setup +- CI/CD diff --git a/docs/developer/how-to/build-docs.rst b/docs/developer/how-to/build-docs.rst index a2a55ca3..11a5e638 100644 --- a/docs/developer/how-to/build-docs.rst +++ b/docs/developer/how-to/build-docs.rst @@ -33,6 +33,6 @@ You can view the pages at localhost:: If you are making changes to source code too, you can tell it to watch changes in this directory too:: - $ tox -e docs autobuild -- --watch event_model/ + $ tox -e docs autobuild -- --watch src -.. _sphinx: https://www.sphinx-doc.org/ \ No newline at end of file +.. _sphinx: https://www.sphinx-doc.org/ diff --git a/docs/developer/how-to/lint.rst b/docs/developer/how-to/lint.rst index 099dec5f..2df258d8 100644 --- a/docs/developer/how-to/lint.rst +++ b/docs/developer/how-to/lint.rst @@ -1,7 +1,7 @@ Run linting using pre-commit ============================ -Code linting is handled by black_, flake8_ and isort_ run under pre-commit_. +Code linting is handled by black_ and ruff_ run under pre-commit_. Running pre-commit ------------------ @@ -26,8 +26,14 @@ repository:: $ black . -Likewise with isort:: +Likewise with ruff:: - $ isort . + $ ruff --fix . -If you get any flake8 issues you will have to fix those manually. \ No newline at end of file +Ruff may not be able to automatically fix all issues; in this case, you will have to fix those manually. + +VSCode support +-------------- + +The ``.vscode/settings.json`` will run black formatting as well as +ruff checking on save. Issues will be highlighted in the editor window. diff --git a/docs/developer/how-to/test-container.rst b/docs/developer/how-to/test-container.rst new file mode 100644 index 00000000..a4a43a6f --- /dev/null +++ b/docs/developer/how-to/test-container.rst @@ -0,0 +1,25 @@ +Container Local Build and Test +============================== + +CI builds a runtime container for the project. The local tests +checks available via ``tox -p`` do not verify this because not +all developers will have docker installed locally. + +If CI is failing to build the container, then it is best to fix and +test the problem locally. This would require that you have docker +or podman installed on your local workstation. + +In the following examples the command ``docker`` is interchangeable with +``podman`` depending on which container cli you have installed. + +To build the container and call it ``test``:: + + cd + docker build -t test . + +To verify that the container runs:: + + docker run -it test --help + +You can pass any other command line parameters to your application +instead of --help. diff --git a/docs/developer/how-to/update-tools.rst b/docs/developer/how-to/update-tools.rst index 7c78f94a..c1075ee8 100644 --- a/docs/developer/how-to/update-tools.rst +++ b/docs/developer/how-to/update-tools.rst @@ -6,11 +6,11 @@ Python project structure which provides a means to keep tools and techniques in sync between multiple Python projects. To update to the latest version of the skeleton, run:: - $ git pull --rebase=false https://github.com/bluesky/python3-pip-skeleton + $ git pull --rebase=false https://github.com/DiamondLightSource/python3-pip-skeleton Any merge conflicts will indicate an area where something has changed that conflicts with the setup of the current module. Check the `closed pull requests -`_ +`_ of the skeleton module for more details. -.. _python3-pip-skeleton: https://blueskyproject.io/python3-pip-skeleton +.. _python3-pip-skeleton: https://DiamondLightSource.github.io/python3-pip-skeleton diff --git a/docs/developer/index.rst b/docs/developer/index.rst index 08d01270..8a6369b9 100644 --- a/docs/developer/index.rst +++ b/docs/developer/index.rst @@ -33,6 +33,7 @@ side-bar. how-to/update-tools how-to/make-release how-to/pin-requirements + how-to/test-container +++ diff --git a/docs/developer/reference/standards.rst b/docs/developer/reference/standards.rst index 01265b71..5a1fd478 100644 --- a/docs/developer/reference/standards.rst +++ b/docs/developer/reference/standards.rst @@ -10,8 +10,7 @@ Code Standards The code in this repository conforms to standards set by the following tools: - black_ for code formatting -- flake8_ for style checks -- isort_ for import ordering +- ruff_ for style checks - mypy_ for static type checking .. seealso:: @@ -24,106 +23,26 @@ Documentation Standards ----------------------- Docstrings are pre-processed using the Sphinx Napoleon extension. As such, -numpydoc-style_ is considered as standard for this repository. Please use type +google-style_ is considered as standard for this repository. Please use type hints in the function signature for types. For example: .. code:: python - def foo(var1, var2, *args, long_var_name="hi", only_seldom_used_keyword=0, **kwargs): - r"""Summarize the function in one line. - - Several sentences providing an extended description. Refer to - variables using back-ticks, e.g. `var`. - - Parameters - ---------- - var1 : array_like - Array_like means all those objects -- lists, nested lists, etc. -- - that can be converted to an array. We can also refer to - variables like `var1`. - var2 : int - The type above can either refer to an actual Python type - (e.g. ``int``), or describe the type of the variable in more - detail, e.g. ``(N,) ndarray`` or ``array_like``. - *args : iterable - Other arguments. - long_var_name : {'hi', 'ho'}, optional - Choices in brackets, default first when optional. - - Returns - ------- - type - Explanation of anonymous return value of type ``type``. - describe : type - Explanation of return value named `describe`. - out : type - Explanation of `out`. - type_without_description - - Other Parameters - ---------------- - only_seldom_used_keyword : int, optional - Infrequently used parameters can be described under this optional - section to prevent cluttering the Parameters section. - **kwargs : dict - Other infrequently used keyword arguments. Note that all keyword - arguments appearing after the first parameter specified under the - Other Parameters section, should also be described under this - section. - - Raises - ------ - BadException - Because you shouldn't have done that. - - See Also - -------- - numpy.array : Relationship (optional). - numpy.ndarray : Relationship (optional), which could be fairly long, in - which case the line wraps here. - numpy.dot, numpy.linalg.norm, numpy.eye - - Notes - ----- - Notes about the implementation algorithm (if needed). - - This can have multiple paragraphs. - - You may include some math: - - .. math:: X(e^{j\omega } ) = x(n)e^{ - j\omega n} - - And even use a Greek symbol like :math:`\omega` inline. - - References - ---------- - Cite the relevant literature, e.g. [1]_. You may also cite these - references in the notes section above. - - .. [1] O. McNoleg, "The integration of GIS, remote sensing, - expert systems and adaptive co-kriging for environmental habitat - modelling of the Highland Haggis using object-oriented, fuzzy-logic - and neural-network techniques," Computers & Geosciences, vol. 22, - pp. 585-588, 1996. - - Examples - -------- - These are written in doctest format, and should illustrate how to - use the function. - - >>> a = [1, 2, 3] - >>> print([x + 3 for x in a]) - [4, 5, 6] - >>> print("a\nb") - a - b - - """ - # There should be no blank lines after closing the docstring for functions, - # methods, and modules. - pass - -.. _numpydoc-style: https://numpydoc.readthedocs.io/en/latest/format.html + def func(arg1: str, arg2: int) -> bool: + """Summary line. + + Extended description of function. + + Args: + arg1: Description of arg1 + arg2: Description of arg2 + + Returns: + Description of return value + """ + return True + +.. _google-style: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/index.html#google-vs-numpy Documentation is contained in the ``docs`` directory and extracted from docstrings of the API. @@ -141,4 +60,4 @@ Docs follow the underlining convention:: .. seealso:: - How-to guide `../how-to/build-docs` \ No newline at end of file + How-to guide `../how-to/build-docs` diff --git a/docs/developer/tutorials/dev-install.rst b/docs/developer/tutorials/dev-install.rst index 0fc66de5..0f467988 100644 --- a/docs/developer/tutorials/dev-install.rst +++ b/docs/developer/tutorials/dev-install.rst @@ -15,13 +15,33 @@ First clone the repository locally using `Git Install dependencies -------------------- -You should install into a `venv` (which requires python 3.8 or later): -.. code:: +You can choose to either develop on the host machine using a `venv` (which +requires python 3.8 or later) or to run in a container under `VSCode +`_ - $ cd event-model - $ python3 -m venv venv - $ source venv/bin/activate - $ pip install -e '.[dev]' +.. tab-set:: + + .. tab-item:: Local virtualenv + + .. code:: + + $ cd event-model + $ python3 -m venv venv + $ source venv/bin/activate + $ pip install -e '.[dev]' + + .. tab-item:: VSCode devcontainer + + .. code:: + + $ code event-model + # Click on 'Reopen in Container' when prompted + # Open a new terminal + + .. note:: + + See the epics-containers_ documentation for more complex + use cases, such as integration with podman. See what was installed ---------------------- @@ -43,3 +63,6 @@ This will run in parallel the following checks: - `../how-to/run-tests` - `../how-to/static-analysis` - `../how-to/lint` + + +.. _epics-containers: https://epics-containers.github.io/main/user/tutorials/devcontainer.html diff --git a/docs/images/dls-favicon.ico b/docs/images/dls-favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..9a11f508ef8aed28f14c5ce0d8408e1ec8b614a1 GIT binary patch literal 99678 zcmeI537lO;m4{y-5^&fxAd7U62#m-odom^>E-11%hzcU;%qW8>qNAV!X%rBR1O<_G zlo>@u83#o{W)z#S%OZm8BQq&!G^+HP(&n3&@W+)-9$hLOTPl^tjT;RAocFi!Zm+$n;Ww8`pBh^#O`bd$ z-t~}DY10X%Qg3fHyy2+Q{%4m;y8?rxKpcFJm&pY|u-6wCRW5(cjPg@`tAm&CdMS9B z-%o#TQRNE0?HvbX^O@z1HkeVq^0H->N|}gPE~^Af__3Vl@}-qP@2*;2sSxMtEoPQq zYs1-$wB*q@dPX_;yP4(Sk(Y_=xW{?7G2ax2xYNn62IKezl`B5Buo8T2aYcCq3)VS_ z2|mxetNC`;i~d2h<| z1L0&p|I2sR_3;k8>*A623f?_wr#*T>B~WUWL3O6z&+%LSv3#@RlJ;qyHRj!$W|xB( zN%WHym4NyQ9$Hfg9(}nIY|8IzDf?2s?L21)2hy%J={F+IpH>IKr=B0mmvt^~WxsY|c^bETWshNJpW zo$@@vv!?nyiT?vrUORpeluB!QN~QiWrBdJegHP`$_({ZLzALWMD6RO+IG)Ko;$Mxr zZTricy>@2#IB>ms%#88_@SR08{a5sSWpQPZ-fcLue2wC4*IyQkE5reRJkK>V)&{E% z92jcH7t#KVy8@nOXuCIU{mHcfy&?D^&(3*~*uKBK5q)ne?R>4thi)5uo^}hZ1Mv;x z{>%rxJDI*_y$&v2R#^*-Y1_{p;)z-Cfk*5Fyhl_f>NJ@C(okN?Q~cX?FFL&S{xv}W zEy8*M*5Bamnd$?A*(yZ;*}=7!GXGstcPv-!+svtxk;n?+nIj;uKAVVKj4>H-SrGs?lGN^-$l0Z(cPHo;nGh{BdY^4mkch_3#He)3d}>zw>nrufYt`-Uf^x z0&5B|PXf01zW6tJ{!nG#y1%>$ZElsJPn55|eJW#CR`+Fi1pKhZlcHdf=jyHClkkUQ zqrSWEz7GCb-8AGnH+@u?ypIFV$T8NAe+YH9E_?Q&d~`VN--Z$Oo4l`~ZtsoyX5P_P zf_YX)5G(v8{mX6>bd}&2yt8G*7f2(%W#B~l|GM@^IHb8--!6QO3C11uTy*|QW9Sjp7Rc)X`oQHj?0=(Pqw3p^ zqu;wTwitIH@~r#a4T~OU)1K`2+ihDPm^AQF*-*m)ZOP**fh8%qAo4#;w8A1NQUC9Xpx)qI~4V-LvBGFZ5~6 zN8Eg(!oXaJejuDzN9Ak3Q$0{mskHb2d@pVuZsVXjPb;^bzkY8;d#JX_*nY9s+)ALi zyq%ZxdoBI!+wiIlUHDnU>YL&Z)ZZ{3#k){OaPrh#XC-N_BJKFB`J}}g3!fCP2JYq5 z=e;}&c-B-O{nooHh;uA)H%WtMzK1-#e@qbcjtVNJ(v)?j(xf$|QqR&-X|sM8#lYW9pmxw^n**Nr$3;l zcor0v@`QQ}{AF*QQ=Y-MKN9Cs;-1hmyS)8uDOB3zz-dcl%G0)-Rlc8gRntMK%}F2P zy7xM=meNp;2k%`Ie1W*HYgIAGYa5>L@vP)Q=NT{`t{k5!LhU6{s`YXJ3w<5~0 z`Kz;>I6s;&zf&peU<4Z8;5#mNRE)L1bNr^ ziwi#~Ou7djVE({*;?^1;lH$gF(|UQMPP*hc_$luzto?4!`1j$Ic#-h;g*Quw+^F*z z!(2SU{RHN87rF1#!WvVggD%R6w@A00maqFA+%Kga{oZ|_7QP-H5#@e|F!5E|gXS}? z({hLO#P<4z9p_fk!UMg^fX%>djLD%rN*d1QdsLej5BjV%Kb&gW02myvw&q_aF~5}T z<~rZL0PZt*78%^q{HQknEbVAN%YH#HPLAl;XFB~9S*vbMNoDcv3*f$j=cP2f^*yT1 zt1TcC4x_o&JzS?cck@B64}Qd$Xgi<20Pba;)h^tqu-)cOdlCPSikn4$VyAQ4Q`Wvv z#Xq(E*lk|zMRLELzxx~AlwGCa?>%WRZah2ewx=w80sNQqB=%ps&BwJD8xQ@4uMM+t zU_Cw&f2FhAQYAAGP@? z{t_48e*af%y;}0B{VmIH^razx(mGIF*`f0#jKOv5j0U#WI@Mn6`J4Hc#aF(@-N)Q1 zOBy$hX;0E~xZe~;2K^W^&{k3M+hLBrH7b45JKL`4*VIE&+_Y~oxKz-ih4V1l(OqdU ze7|e`%Q)K(&lgTyd~m+s$p6emPKk?`_q}uo#`Q+nG3147(t-2o27lR5(uV4EoF-mg zU;1d{Bv0gp6O|5JSJ8Ir)(q&&v3w{BM%uec=%tL4{wOWJ&v(hqrtXc8zFPfwnGc+# zxLVUN?tql>Ith;Z4IEdZBiz>DZTqyTFS_ybhS8yhHtZ^cw9MSBOgT-tse-T`YW31rt*8EKy_tFp4YY`A z>N%|V!Tn^D0ny9TY&$Koh;?t7Q{En35Jwcz#9P3rKS;a_0`QfIIX9I*C#A`-3U#GtG{o?b2|G@o|K(!L|MYJQI^=fDLW+S619$izU~?F_!3WB`KnEW zYPr9TFT2E=(>@gR2QEDW>EGg<_Ha1#5A|#jYdgz;aRE=;>VdWM_0R!+8vB6fz5=FGTAv(v!xyZ!W1U0*6zNTUdefw$8COJRUxEhoRLC=mF!L_F<_% zFusO;LUt^1PJ2{MJlW+)KON^3cT9EujI41ldsZ{eAsekF`0_`Q7wTj@tu-alNoCNU z#w=^IGoiPhB&WHz%PZhF(!ZS4X!+vObDqF@*osXxGwBhP#GD{TPZzTVC!>bKf#vz=^sqw==jf$NRz``a*2S=}@T&#P=eU;m8_ zKkhfOe~`or8m$|{AL8=2--Gk-_Z?`g4zPz_kGlN14L9w#c!AcXigt@5`g|HL)WMDK zou9uiAcuZCEsv=0K6`(&)>GcK8p?2q+orRG8CO3NRkpNulKW)uS+tAVkC}#x`A%6* z%2H+%hdJ<@C`Y0`Mtz-l?CBWXQ*{RVB-!BG>$64If%MMWJIwhV!Ewk@+DnBj5-V$) z@@s4a*G%%kM;DgYL!P)@bd>yhP_=xrbK$I>KzpYjW1oH#NSwR6w4}@#BH`Y~tH4pV zQpcm?n+WdMfQI$MJnCNdzop8F$QGYWJHIG5qHRj3`cd2AETmIR8;|lqZxfycZ9=mZ z+3F0O*f|r`bWSUfXlEXj@q#GY?>xJ_DSYz9x3W)N`=Dgo^lfYfbT-Pp2(~&$i?ki@ zgrjVH?gwYdV&7q{MY;p&%lBmeDe}p3)(W?D>wt0cG{Z0BeAY~Y-Kd}UF{jnpTRKxq zYf-8noh`;+)1Bmh&3|-;k@N!Er=vZ1+S|JaxFP?i%Ey%B47>cC%}|0rJ|0)@tnaDA zaBgCs|4~$hXs?BI2Re@}D?V}Ym}AfQ#KIw68a8bE#>LI^Ui1B;-DR}nJh;TAc|H0> z(*}@}FN}+q=Y2EeKkf05%#{b9s5F%MyQciKhex8~wwNO!R-+s}Ys%E!H_$ZrZ3bUIjyIW{+BS?{>OIcmd^K&69YRU{o3OF0RkJ z?cGh!94l5naL?PY(+D|d@<;V~o!=PM-t97&-%)$K)RM5Rifl6`oqY8N zSW2DCD;KEzzU@D%&x^muwRanL^E+y-OmdU?p5{mOhdjK1@~i#NNXyUu?)Le#_HL&& zzjd~$>!hDD-?R8p{Xw{7No(Rz_Ib#`nfF-OeLjxA8`w#H#Cm>kJ4(8wG;!awC&?Zk ze0Tw6e~2;gx;WVOd%Ms3ws#wjeqYL5)^*aOxbd=v?f&4y3nc#_|DBbVkKO0qfB*ZKFZbNA6^ffE(S^as(&mA%~f z)Y&oP=ak11FV@ae@_3Rw#=)e_Ppa&4@PPB<;x*&F)(u@J-E=eZii1g+rwx{# zvm5)%`^3d-#(QM0VYV{h(9-hLrVlpd=Y9(Hfx>ivS?WxCh>eptr1jP;>57O$S)XP- z1Z(Ia#~AmSB4B5Qq4gQ#^6X>Inom?b%KC3ZB_I67-iVE%LGHP5R6a@XZektXIISNg z#Vzt1Wn9X>!Oh+BD~vplDhm~bi`MCl)A?CN!A*lh8PAIAAWjQ+N@kwQN zzp>BygQPEHUCiKN`#RUuxc#XM`&-e!ndcnmmM=?~aq=5Q<6__;e+H%oTw87vrwAW@ zKkV%KEM-@mchXo;oPoKYjzkzIhKCVvC*N+Cy4N>?v`c8N1 z$*!nTI8o`r`8Vu6E9AUpY<{#yxA1nLJwgxXyAL9<&M5oOlg?9(qjl2zcgzcI!Nm^> z^e)Raav;X`}MU^iLoFkDF8COrF-gD0vbpDg>Me?P!iBH}Ok!k<=o%6~~ zYwu}wfgH23=8fRuJ$KgrHOT>{JXwA6dXYSP(O+(whF`0`b63*F(xEg{j|A+;$m2Bb zSm>B?yY-8WONskT_J*$KgYUyhy7dh7uBbkNbs;eKMMvyr*YRQ6#aOMeP=>SMnb%RC zJK90HRoXfo*vvo(EUDrOpWtX74 zL$W$?3V2NJ{B({V_ruHw%!NEV6ETOheH!Rh0DJV)@fO|R!kmZnFiF4W&A^4!joSb=;GoowoT z#sl5WuWEl^9=6RL754Yv%vpH5k+$jmtdla}jKK{#gXUcHqTyXgI`<~8(|Evoa3ZaAwvDe# zvt88vI4S%-G0UG-_eG#5UW?uERL(lwxRYqqEL^Z*pTL~C?hYgMqdYV+6`V94Xk5-g z{t$HB-me_|-k=)#(l6+)R-3=T$7Zs&|1J*CZC2H{748YoSH{rJFJvwjsjrdkyzU{* z>(s-qVa?s%ldL&>Bj@(%-dq=+?k38~@57?0Epmoo9qmm!kUoj_`hE7M{9Rj#RdD9q z<^E>$ruUn2#`(IV+nGCgHwWEKtb1+0k8GfR)~J&`zxGLK?%Cf!`!smyo~^j@oA>B7 zALUN^-3ul|%fZcnmtlW@G;5!kZB84J1xy`xs;@Dhb%a#mLx8JF)ASYjZ#-jXZowmHwlOeVu8?h#m zdakftR{OVPHr=m1Qk>Qk;>LWt+;P8IQ}`WcKXE_TmxS{dE%QTypTg{E2Y@EP;n^1ET?h)=^u z#&sIqh0nx+^0wetH?Mc`_YG^^t`WUJRvI-cp5`Aq7s$8VN%65k>ECy5rKL8}Y3+@( z^xptph0@;CfnUv>IYft0q()z*1Xoc z3zh`yp&R{KBl!EI)qMyur051$^qDtF^@L90X7*dP)GqlM^m^zerX=CjjBh$0z0;l6 zA#^uIGs+(a6B+5^sY_d@CqyrKgs)yN4)?6@wLbKUDz^)q?2WRPtB80SYp{Vop%tS5 z>k>Pmn|`qfytBg4I^HkPop+1Vx*_{VTG|G%rC7=O@gB`=14lhq*#JG%Jz43NH=gJ% z9;$VA?x74Gi#a2>liR~Q^w+(de}jEf0KW{FA2+={FeiBQu;t65+DUlirKOL9fG2znk3P-_6XAGmLI5c~&r3g^-`bYA8=xf_-> z(p>uu>^e2SS$FyVpH~(y3t$g_Ao{phOg>3Iyh!6wFp2)Feeh>PU)g4ezRy74h+~31 zYI0;omED8v3%El&(D@lUN9>pc>Vl;DBisk(tE!lGTz%s_o|pT0$K?B6^=;i>+ZK}usKHGew+5dYkr}5 z#(B&)evDi>9r;r85bc3@wQ+Pt~a8i zMjybMLZaQa^qJC2NIxMxh4dBDTink4RCb_2`_}cCqrMI zQJ}q#oLyR*`x_mN>!YuEkK51V!mKGbysj@Dp!AxYwH)d>rSFv9vkx7D^q{Zm0EcL< z{wsTT-G%&9=&Q3+b$0xFsXNwm0_odadisX3)A@ZIz3unhy}2!V-nG8)ed24qQf1P* zh}KF!L0Pp{axLxSPqYu+%OoAsNO985h`x71-|L|71y%aWPDJ;wp<8Xby^z-HS-aiE zrghYB)(_6|p=Gn;YJ5@q&^=w!J9eAXy{7*{yAJtt3+S7L4&04&Q54P1JJxE}qb)w0 z1y(ELXiwM=SRd>br*)84toQoT0G_+x6ARDrjqJN_W!pI@=8oi6 z)m2hHth;}}^mo^%jxS3}+wO1AcZvO9Gw(fVlm^Iw*SU08`1pmD(eQ_XM&UOrz2*|# zG6G;H)v&zYta`+DZ|RZP?YnJ2_8ra2vk17d59$`DdAjzl6;bYHz~DT@!(93!8#e7u zs7A}6{?u)YP_k)jwA{@~kACM;m;T88_cbfOM&N1=xTp)peU~>$|JiCg@T~RB+~ld7 ztaHn`XJ!ld)yrAaw<@0OfW=F@)#>b@?OMDSBnxe1BY5zhW_m7xTV$kC*_Cz za-ehEMvCi1SpXT}Zqc7QF7Z3}U3W=z%=1lSzSglvnv*QRp4pD!1Jv`1ud;6#`;M`! z$CdNYsu^jfjes#fuI+Y`ETA>meK?mBUOS-~bj$;@h%sOLPh|h1b0oEwrcw6@>v)>W z>n{60%c8nL*GaMfXDS?y9C%_LS{0q9(RsecSlO6p{4ls_-SCPA^oD<69nZGCP@j>l zg3nztZgY{X2lMS3jt19u_?+Ky8hXFpcI0j6+2}l9wr|>JYr{0ZS?>sz=9DFOk2$AV zHg!JtNx5yHQ)B_;{)>68GIiB1zmYLtcde)CSZ?&V`_0fw`;e3BLpD1)6FRSk;#Tk$ ze@e=u+27Cu-#|Hj)9ieb;7hlkrw+yMH6~}#t>n=oWxc;%``2~l(VR~zC2k=fLZyyi=%jjuRATn zJq>O?3Tr&@ogcJIFF>1J$r!LOsvOOH=R42$<@YY`;?KVBSnQ5nI9bDa#)Edq0`UG< zcv^avm+zRLMZQm?i_YTmH6hU1Q)zIMzWa^`?N}o~w^42-{e8xKBj4++sHA$%@=fzB z-!r8ex&PP3$!C7hYFR+^ZzccFI_4+obL_hH`K@znvO2Xr*_->oPm1fKFKVSMApXz% zxh4BOvX1$Z@6+@-Np&6fO?&IIx+RDU()Gr{%JZJO&OAS8l`J6n54@T_|I0Gw8-AZf zpOdHleeMC&y$yNt$dV?@co6CZ*jJqeUL$cBj|ZUtU5&s_&l+}Hi^#V72Gs8*af%-|a_7QMy$VHs#d_vJ>OB(Zw(C6gA zSNAVwbvm;+Pach=Ntz!tOBSH-e-8VvgBrm*Ds9x5-)esE;w4!sD+hRYj4g=^vl-#I z`GMA>i=G=%C-1}l^L5O1*A-QksCmBlz0MIUDvvyHkaaSjDHCV+lPBLiY2wC%B4q(+ zUcvq|yhji{;M_cTx@n^3`K^-gU0kBVYKLh~qXc7OTidE|H{*eg@1QJDOh00bUdH{Z z>uV1HbAX$o>dWUH`^xL=_6@&pw~dos2Hne&=CpRJve@a``P-cz%wr*|hWeJ?`Y6o zB2V7FX+DEZSDMo~bG~p}9badHicj5#Jd-DH#{S2CcNw)BuRQrluGaladDf}X`_{&O!vi>9>uq`P=%zFW(^k{mr&uTGrZVNhl`(tR zoe&>dP+1>6Kz|;1-I7M<3Zyyi%^K14XKuUbkolD{rr+B>bAs=73oY~D$zHcWa#NDq zFQ-hE2cLGNVIFa_G<{bT=j;MA%-HD;!rA=>J@yIWOulMiP-#ohyed^8GKuIct* z2A6jDe@ob>r8^0_MV8G|ce3|7;oa~PC$kW|YcExLNQ2z>>nK`By+aqU7kseX4dwF1@rwz2!F9*6FT8GuueE;UzR6Lvj(XQSE6|$o z&D~HoUmTB1*b9EX=bmrhyxSEY-TvKYEGc{41IwD=1ht!X;oPiz51ALw|38~^&v&zM zEefveyrTMf(z~;lj!Yh)`y4mf;{+jN}BgYoCo=(7Vr5kx-O9Sm!PlNxlm%q07IX6DC5j4MVFyf@b-?_3og6* zR^?xGKGO5BC+veUJ>*mW?T&kswHJJ52k!Y!svl_oBHidPn6ce$*{v!Pl+5;Q!U(d%jKk6VGSwZ%6few;k+1x4a2* zNypm`o?`6Qp`9LDpVyoekTGbeCR_J36NvpVNM?Xqx7MCtWf2LmjtXzbk2T5F@zL^8DmH1`vX#m4AM z%on=uOe*C0XTbeyd$ND7C6zUTGdY@b$&eBD)48RrfpzQ|mEbl2j+fEbC%$KXdu*~s za5D&t_Cd}mWqf!W>r3ar7w&|=wrx)m`kI%es{@yB&^`}@=80#kjda?yqkIQ*c0F?A zyXbdkGtS-w-<^xBRrq{TFzMg(C7+U4FY6kI9XL?lq8(*^HP7T4VEuVZc<_O&Kc2uC zd=~Sq%Xw}TzrcSCp79LrWC7vDgcs{K@1EuN<2-ls{@3_dl6DGusuO$q%M&KfE00ai zwL8C>HSl_WU8yw5e$!hjjk3aPRMwuM7kvt^KNME5RH}u6CO65vSUMOUW5Rud;TnL! zU=2Vuc@03AyW;c=0_ZpKs{s2gaRpa#C0K@EI0gDSQHvYF!d*T9v+ z4Eu({VTQd!;jqqzf?`SF7EI`=bC)J@7B4nWxB4nWxBIJhqZFnHqXNN)14fopL zLD&u3pH+bR@RU0ADUcJMWYw-x4hz>6j{>^ky5dpbv~Yhteq(&Yef8Ob9ufIP3F}~rn_UC?g+p`-^>mN>ka{Jd5w?8`J;r+SSt^oRbpB;|i5B>Ic z_(@#>VTf+Hu7Ewm`B`0oT>eM6t^fpWh7|Hs3*nI8S_p>x*g`1e*A_xOf@jtEB!w-6 zrYJm=VVIp&Lt%E-01##u1hou$!sJ64Od1T=N>mM+DzAd8Rbhy&;#4u5Wa3u=)PjQm ZYRRh@^bCDh5vs@!z69bV>$COq{{Z);QUw42 literal 0 HcmV?d00001 diff --git a/docs/images/dls-logo.svg b/docs/images/dls-logo.svg new file mode 100644 index 00000000..0af1a177 --- /dev/null +++ b/docs/images/dls-logo.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst index 713e5d9f..75892944 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -3,38 +3,8 @@ .. include:: ../README.rst :end-before: when included in index.rst -=================================== - Bluesky Event Model Documentation -=================================== - -A primary design goal of bluesky is to enable better research by recording -rich metadata alongside measured data for use in later analysis. Documents are -how we do this. - - -This repository contains the formal schemas for bluesky's streaming data model -and some Python tooling for composing, validating, and transforming documents -in the model. - -Where is my data? -================= - -For the full details and schema please see the :ref:`data_model` section. This is a very quick guide to where -you should look for / put different kinds of information - -* Information about your sample that you know before the measurement → *Start* Document -* What experiment you intended to do → *Start* Document -* Who you are / where you are → *Start* Document -* References to external databases → *Start* Document -* The Data™ → *Event* Document -* Detector calibrations, dark frames, flat fields , or masks → *Event* Document (probably in its own stream) -* The shape / data type / units of The Data™ → *Event Descriptor* Document in the *data_keys* entry -* Anything you read from the controls system that is not device configuration → *Event* Document -* Device configuration data → *Event Descriptor* Document in the *configuration* entry - - How the documentation is structured -=================================== +----------------------------------- The documentation is split into 2 sections: diff --git a/docs/user/explanations/docs-structure.rst b/docs/user/explanations/docs-structure.rst index e3487c27..f25a09ba 100644 --- a/docs/user/explanations/docs-structure.rst +++ b/docs/user/explanations/docs-structure.rst @@ -1,4 +1,4 @@ -About The Documentation +About the documentation ----------------------- :material-regular:`format_quote;2em` diff --git a/docs/user/how-to/run-container.rst b/docs/user/how-to/run-container.rst new file mode 100644 index 00000000..8c16ce8c --- /dev/null +++ b/docs/user/how-to/run-container.rst @@ -0,0 +1,15 @@ +Run in a container +================== + +Pre-built containers with event-model and its dependencies already +installed are available on `Github Container Registry +`_. + +Starting the container +---------------------- + +To pull the container from github container registry and run:: + + $ docker run ghcr.io/bluesky/event-model:main --version + +To get a released version, use a numbered release instead of ``main``. diff --git a/docs/user/index.rst b/docs/user/index.rst index bf49113a..2c94a0c0 100644 --- a/docs/user/index.rst +++ b/docs/user/index.rst @@ -25,8 +25,7 @@ side-bar. :caption: How-to Guides :maxdepth: 1 - how-to/use-cases - + how-to/run-container +++ @@ -39,8 +38,6 @@ side-bar. :maxdepth: 1 explanations/docs-structure - explanations/data-model - explanations/external +++ @@ -53,7 +50,6 @@ side-bar. :maxdepth: 1 reference/api - reference/release-history ../genindex +++ diff --git a/docs/user/reference/api.rst b/docs/user/reference/api.rst index ce598f80..491f7a21 100644 --- a/docs/user/reference/api.rst +++ b/docs/user/reference/api.rst @@ -1,6 +1,10 @@ -================= -API Documentation -================= +API +=== + +.. automodule:: event_model + + ``event_model`` + ----------------------------------- This is the internal API reference for event_model @@ -8,99 +12,3 @@ This is the internal API reference for event_model :type: str Version number as calculated by https://github.com/pypa/setuptools_scm - - -Schemas and Names -================= - -The ``event-model`` Python package contains tooling for composing, validating, -and transforming documents in the model. - -.. autoclass:: event_model.DocumentNames - :members: - :undoc-members: - -There are two dictionaries, :data:`event_model.schemas` and -:data:`event_model.schema_validators`, which are keyed on the members of the -:class:`event_model.DocumentNames` enum and which are mapped, respectively, to -a schema and an associated :class:`jsonschema.IValidator`. - - -Routers -======= - - -.. autoclass:: event_model.RunRouter - :members: - :undoc-members: - -.. autoclass:: event_model.SingleRunDocumentRouter - :members: - :undoc-members: - -.. autoclass:: event_model.DocumentRouter - :members: - :undoc-members: - -.. autoclass:: event_model.Filler - :members: - -.. autoclass:: event_model.NoFiller - :members: - -.. autofunction:: event_model.register_coercion - -.. autofunction:: event_model.as_is - - -.. autofunction:: event_model.force_numpy - - -Document Minting -================ - -To use these functions start with :func:`.compose_run` which will -return a :obj:`.ComposeRunBundle`. - -.. autofunction:: event_model.compose_run - -.. autoclass:: event_model.ComposeRunBundle - -.. autofunction:: event_model.compose_descriptor - -.. autoclass:: event_model.ComposeDescriptorBundle - -.. autofunction:: event_model.compose_event - -.. autofunction:: event_model.compose_event_page - -.. autofunction:: event_model.compose_resource - -.. autoclass:: event_model.ComposeResourceBundle - -.. autofunction:: event_model.compose_datum - -.. autofunction:: event_model.compose_datum_page - - -.. autofunction:: event_model.compose_stop - - -Document Munging -================ - - -.. autofunction:: event_model.pack_event_page - -.. autofunction:: event_model.unpack_event_page - -.. autofunction:: event_model.pack_datum_page - -.. autofunction:: event_model.unpack_datum_page - -.. autofunction:: event_model.sanitize_doc - -.. autofunction:: event_model.verify_filled - -.. autoclass:: event_model.NumpyEncoder - :members: \ No newline at end of file diff --git a/docs/user/tutorials/installation.rst b/docs/user/tutorials/installation.rst index e4732962..7b95d051 100644 --- a/docs/user/tutorials/installation.rst +++ b/docs/user/tutorials/installation.rst @@ -35,4 +35,4 @@ from github:: The library should now be installed and the commandline interface on your path. You can check the version that has been installed by typing:: - $ python -m event_model --version + $ event-model --version diff --git a/pyproject.toml b/pyproject.toml index 48c4ae8c..33bfbe4f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,109 +6,72 @@ build-backend = "setuptools.build_meta" name = "event-model" classifiers = [ "Development Status :: 3 - Alpha", - "License :: OSI Approved :: BSD License", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", ] -description = "Data model used by the bluesky ecosystem" +description = "" dependencies = [ - "importlib-resources", - "jsonschema>=4", - "numpy", - "typing_extensions" -] + "typing-extensions;python_version<'3.8'", +] # Add project dependencies here, e.g. ["click", "numpy"] dynamic = ["version"] license.file = "LICENSE" readme = "README.rst" -requires-python = ">=3.8" +requires-python = ">=3.7" [project.optional-dependencies] dev = [ - "dask[array]", "black", "mypy", - "flake8", - "flake8-isort", - "Flake8-pyproject", "pipdeptree", "pre-commit", "pydata-sphinx-theme>=0.12", "pytest", "pytest-cov", - "sphinx", + "ruff", "sphinx-autobuild", "sphinx-copybutton", "sphinx-design", "tox-direct", "types-mock", - "types-requests", - - # These are dependencies of various sphinx extensions for documentation. - "ipython", - "matplotlib", - "numpydoc", - - # For schema generation. - "pydantic>=2.6", ] [project.scripts] -# event_model = "event_model.__main__:main" +event-model = "event_model.__main__:main" [project.urls] GitHub = "https://github.com/bluesky/event-model" [[project.authors]] # Further authors may be added by duplicating this section -email = "dallan@bnl.gov" -name = "Brookhaven National Lab" +email = "eva.lott@diamond.ac.uk" +name = "Eva Lott" + [tool.setuptools_scm] +write_to = "src/event_model/_version.py" [tool.mypy] -# Ignore missing stubs in imported modules -ignore_missing_imports = true - - -[tool.isort] -float_to_top = true -profile = "black" - -[tool.flake8] -extend-ignore = [ - # See https://github.com/PyCQA/pycodestyle/issues/373 - "E203", - # support typing.overload decorator - "F811", - # allow Annotated[typ, some_func("some string")] - "F722", - # allow one line class definitions `class X(Y): ...` - # black formats this way, but flake8 is behind - "E701", -] -max-line-length = 88 -exclude = [".tox", ".venv", "venv"] - +ignore_missing_imports = true # Ignore missing stubs in imported modules [tool.pytest.ini_options] # Run pytest with all our checkers, and don't spam us with massive tracebacks on error addopts = """ - --tb=native -vv - --cov=event_model --cov-report term --cov-report xml:cov.xml + --tb=native -vv --doctest-modules --doctest-glob="*.rst" """ # https://iscinumpy.gitlab.io/post/bound-version-constraints/#watch-for-warnings -filterwarnings = [ - "error", - "ignore::DeprecationWarning" -] +filterwarnings = "error" # Doctest python code in docs, python code in src docstrings, test functions in tests -testpaths = "event_model/tests" +testpaths = "docs src tests" + +[tool.coverage.run] +data_file = "/tmp/event_model.coverage" [tool.coverage.paths] # Tests are run from installed location, map back to the src directory -source = ["event_model", "**/site-packages/"] +source = ["src", "**/site-packages/"] # tox must currently be configured via an embedded ini string # See: https://github.com/tox-dev/tox/issues/999 @@ -128,8 +91,20 @@ allowlist_externals = sphinx-build sphinx-autobuild commands = - pytest: pytest {posargs} - mypy: mypy event_model {posargs} + pytest: pytest --cov=event_model --cov-report term --cov-report xml:cov.xml {posargs} + mypy: mypy src tests {posargs} pre-commit: pre-commit run --all-files {posargs} - docs: sphinx-{posargs:build -E --keep-going} -T docs build/html + docs: sphinx-{posargs:build -EW --keep-going} -T docs build/html """ + + +[tool.ruff] +src = ["src", "tests"] +line-length = 88 +select = [ + "C4", # flake8-comprehensions - https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4 + "E", # pycodestyle errors - https://beta.ruff.rs/docs/rules/#error-e + "F", # pyflakes rules - https://beta.ruff.rs/docs/rules/#pyflakes-f + "W", # pycodestyle warnings - https://beta.ruff.rs/docs/rules/#warning-w + "I001", # isort +] diff --git a/src/event_model/__init__.py b/src/event_model/__init__.py new file mode 100644 index 00000000..33a32fd9 --- /dev/null +++ b/src/event_model/__init__.py @@ -0,0 +1,11 @@ +import sys + +if sys.version_info < (3, 8): + from importlib_metadata import version # noqa +else: + from importlib.metadata import version # noqa + +__version__ = version("event-model") +del version + +__all__ = ["__version__"] diff --git a/src/event_model/__main__.py b/src/event_model/__main__.py new file mode 100644 index 00000000..0564d154 --- /dev/null +++ b/src/event_model/__main__.py @@ -0,0 +1,16 @@ +from argparse import ArgumentParser + +from . import __version__ + +__all__ = ["main"] + + +def main(args=None): + parser = ArgumentParser() + parser.add_argument("-v", "--version", action="version", version=__version__) + args = parser.parse_args(args) + + +# test with: python -m event_model +if __name__ == "__main__": + main() diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 00000000..ab60b9bb --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,9 @@ +import subprocess +import sys + +from event_model import __version__ + + +def test_cli_version(): + cmd = [sys.executable, "-m", "event_model", "--version"] + assert subprocess.check_output(cmd).decode().strip() == __version__ From defaf810f4b63b8ab04184517cc1ccc1e5ba6ffe Mon Sep 17 00:00:00 2001 From: Eva Date: Fri, 18 Oct 2024 09:01:19 +0100 Subject: [PATCH 2/7] updated to latest copier template --- .copier-answers.yml | 5 +- .devcontainer/devcontainer.json | 52 ++-- .github/CONTRIBUTING.md | 30 +++ .github/CONTRIBUTING.rst | 35 --- .github/ISSUE_TEMPLATE/bug_report.md | 21 ++ .github/ISSUE_TEMPLATE/issue.md | 13 + .../pull_request_template.md | 8 + .../actions/install_requirements/action.yml | 66 ++--- .github/dependabot.yml | 8 +- .github/pages/make_switcher.py | 29 +-- .github/workflows/_check.yml | 27 ++ .github/workflows/_dist.yml | 36 +++ .github/workflows/{docs.yml => _docs.yml} | 28 +- .github/workflows/_pypi.yml | 17 ++ .github/workflows/_release.yml | 32 +++ .github/workflows/_test.yml | 62 +++++ .github/workflows/_tox.yml | 22 ++ .github/workflows/ci.yml | 59 +++++ .github/workflows/code.yml | 243 ------------------ .github/workflows/docs_clean.yml | 43 ---- .github/workflows/linkcheck.yml | 28 -- .github/workflows/periodic.yml | 13 + .gitignore | 2 +- .pre-commit-config.yaml | 19 +- .vscode/extensions.json | 5 - .vscode/launch.json | 8 +- .vscode/settings.json | 22 +- Dockerfile | 13 + README.md | 37 +++ README.rst | 55 ---- catalog-info.yaml | 10 - docs/_api.rst | 16 ++ docs/_templates/custom-module-template.rst | 37 +++ docs/conf.py | 52 ++-- docs/developer/explanations/decisions.rst | 17 -- .../0001-record-architecture-decisions.rst | 26 -- .../0002-switched-to-pip-skeleton.rst | 35 --- docs/developer/how-to/build-docs.rst | 38 --- docs/developer/how-to/contribute.rst | 1 - docs/developer/how-to/lint.rst | 39 --- docs/developer/how-to/make-release.rst | 16 -- docs/developer/how-to/pin-requirements.rst | 74 ------ docs/developer/how-to/run-tests.rst | 12 - docs/developer/how-to/static-analysis.rst | 8 - docs/developer/how-to/test-container.rst | 25 -- docs/developer/how-to/update-tools.rst | 16 -- docs/developer/index.rst | 64 ----- docs/developer/reference/standards.rst | 63 ----- docs/developer/tutorials/dev-install.rst | 68 ----- docs/explanations.md | 10 + docs/explanations/decisions.md | 12 + .../0001-record-architecture-decisions.md | 18 ++ ...0002-switched-to-python-copier-template.md | 28 ++ docs/explanations/decisions/COPYME | 19 ++ docs/genindex.md | 3 + docs/genindex.rst | 5 - docs/how-to.md | 10 + docs/how-to/contribute.md | 2 + docs/images/dls-favicon.ico | Bin 99678 -> 0 bytes docs/images/dls-logo.svg | 20 +- docs/index.md | 56 ++++ docs/index.rst | 29 --- docs/reference.md | 12 + docs/tutorials.md | 10 + docs/tutorials/installation.md | 42 +++ docs/user/explanations/docs-structure.rst | 18 -- docs/user/how-to/run-container.rst | 15 -- docs/user/index.rst | 57 ---- docs/user/reference/api.rst | 14 - docs/user/tutorials/installation.rst | 38 --- pyproject.toml | 49 ++-- src/event_model/__init__.py | 14 +- src/event_model/__main__.py | 16 +- tests/conftest.py | 21 ++ 74 files changed, 864 insertions(+), 1309 deletions(-) delete mode 100644 .github/CONTRIBUTING.rst create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/issue.md create mode 100644 .github/PULL_REQUEST_TEMPLATE/pull_request_template.md create mode 100644 .github/workflows/_check.yml create mode 100644 .github/workflows/_dist.yml rename .github/workflows/{docs.yml => _docs.yml} (69%) create mode 100644 .github/workflows/_pypi.yml create mode 100644 .github/workflows/_release.yml create mode 100644 .github/workflows/_test.yml create mode 100644 .github/workflows/_tox.yml create mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/code.yml delete mode 100644 .github/workflows/docs_clean.yml delete mode 100644 .github/workflows/linkcheck.yml create mode 100644 .github/workflows/periodic.yml create mode 100644 Dockerfile create mode 100644 README.md delete mode 100644 README.rst delete mode 100644 catalog-info.yaml create mode 100644 docs/_api.rst create mode 100644 docs/_templates/custom-module-template.rst delete mode 100644 docs/developer/explanations/decisions.rst delete mode 100644 docs/developer/explanations/decisions/0001-record-architecture-decisions.rst delete mode 100644 docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst delete mode 100644 docs/developer/how-to/build-docs.rst delete mode 100644 docs/developer/how-to/contribute.rst delete mode 100644 docs/developer/how-to/lint.rst delete mode 100644 docs/developer/how-to/make-release.rst delete mode 100644 docs/developer/how-to/pin-requirements.rst delete mode 100644 docs/developer/how-to/run-tests.rst delete mode 100644 docs/developer/how-to/static-analysis.rst delete mode 100644 docs/developer/how-to/test-container.rst delete mode 100644 docs/developer/how-to/update-tools.rst delete mode 100644 docs/developer/index.rst delete mode 100644 docs/developer/reference/standards.rst delete mode 100644 docs/developer/tutorials/dev-install.rst create mode 100644 docs/explanations.md create mode 100644 docs/explanations/decisions.md create mode 100644 docs/explanations/decisions/0001-record-architecture-decisions.md create mode 100644 docs/explanations/decisions/0002-switched-to-python-copier-template.md create mode 100644 docs/explanations/decisions/COPYME create mode 100644 docs/genindex.md delete mode 100644 docs/genindex.rst create mode 100644 docs/how-to.md create mode 100644 docs/how-to/contribute.md delete mode 100644 docs/images/dls-favicon.ico create mode 100644 docs/index.md delete mode 100644 docs/index.rst create mode 100644 docs/reference.md create mode 100644 docs/tutorials.md create mode 100644 docs/tutorials/installation.md delete mode 100644 docs/user/explanations/docs-structure.rst delete mode 100644 docs/user/how-to/run-container.rst delete mode 100644 docs/user/index.rst delete mode 100644 docs/user/reference/api.rst delete mode 100644 docs/user/tutorials/installation.rst create mode 100644 tests/conftest.py diff --git a/.copier-answers.yml b/.copier-answers.yml index 9b32e88e..8b43cc29 100644 --- a/.copier-answers.yml +++ b/.copier-answers.yml @@ -1,9 +1,8 @@ # Changes here will be overwritten by Copier -_commit: 1.0.0 +_commit: 2.3.0 _src_path: gh:DiamondLightSource/python-copier-template author_email: eva.lott@diamond.ac.uk author_name: Eva Lott -component_owner: '' description: '' distribution_name: event-model docker: false @@ -11,4 +10,6 @@ docs_type: sphinx git_platform: github.com github_org: bluesky package_name: event_model +pypi: true repo_name: event-model +type_checker: mypy diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 44de8d36..d3d639a5 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -3,52 +3,44 @@ "name": "Python 3 Developer Container", "build": { "dockerfile": "../Dockerfile", - "target": "build", - // Only upgrade pip, we will install the project below - "args": { - "PIP_OPTIONS": "--upgrade pip" - } + "target": "developer" }, "remoteEnv": { + // Allow X11 apps to run inside the container "DISPLAY": "${localEnv:DISPLAY}" }, - // Add the URLs of features you want added when the container is built. - "features": { - "ghcr.io/devcontainers/features/common-utils:1": { - "username": "none", - "upgradePackages": false - } - }, - // Set *default* container specific settings.json values on container create. - "settings": { - "python.defaultInterpreterPath": "/venv/bin/python" - }, "customizations": { "vscode": { + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/venv/bin/python" + }, // Add the IDs of extensions you want installed when the container is created. "extensions": [ "ms-python.python", + "github.vscode-github-actions", "tamasfe.even-better-toml", "redhat.vscode-yaml", - "ryanluker.vscode-coverage-gutters" + "ryanluker.vscode-coverage-gutters", + "charliermarsh.ruff", + "ms-azuretools.vscode-docker" ] } }, - // Make sure the files we are mapping into the container exist on the host - "initializeCommand": "bash -c 'for i in $HOME/.inputrc; do [ -f $i ] || touch $i; done'", + "features": { + // Some default things like git config + "ghcr.io/devcontainers/features/common-utils:2": { + "upgradePackages": false + } + }, "runArgs": [ + // Allow the container to access the host X11 display and EPICS CA "--net=host", - "--security-opt=label=type:container_runtime_t" - ], - "mounts": [ - "source=${localEnv:HOME}/.ssh,target=/root/.ssh,type=bind", - "source=${localEnv:HOME}/.inputrc,target=/root/.inputrc,type=bind", - // map in home directory - not strictly necessary but useful - "source=${localEnv:HOME},target=${localEnv:HOME},type=bind,consistency=cached" + // Make sure SELinux does not disable with access to host filesystems like tmp + "--security-opt=label=disable" ], - // make the workspace folder the same inside and outside of the container - "workspaceMount": "source=${localWorkspaceFolder},target=${localWorkspaceFolder},type=bind", - "workspaceFolder": "${localWorkspaceFolder}", + // Mount the parent as /workspaces so we can pip install peers as editable + "workspaceMount": "source=${localWorkspaceFolder}/..,target=/workspaces,type=bind", // After the container is created, install the python project in editable form - "postCreateCommand": "pip install -e '.[dev]'" + "postCreateCommand": "pip install $([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e '.[dev]' && pre-commit install" } diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 17bc44cf..a4432289 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -1,3 +1,4 @@ +<<<<<<< before updating # Contributing ## Getting Started @@ -72,3 +73,32 @@ Hints to make the integration of your changes easy (and happen faster): - Don't forget your unit tests - All algorithms need documentation, don't forget the .rst file - Don't take changes requests to change your code personally +======= +# Contribute to the project + +Contributions and issues are most welcome! All issues and pull requests are +handled through [GitHub](https://github.com/bluesky/event-model/issues). Also, please check for any existing issues before +filing a new one. If you have a great idea but it involves big changes, please +file a ticket before making a pull request! We want to make sure you don't spend +your time coding something that might not fit the scope of the project. + +## Issue or Discussion? + +Github also offers [discussions](https://github.com/bluesky/event-model/discussions) as a place to ask questions and share ideas. If +your issue is open ended and it is not obvious when it can be "closed", please +raise it as a discussion instead. + +## Code Coverage + +While 100% code coverage does not make a library bug-free, it significantly +reduces the number of easily caught bugs! Please make sure coverage remains the +same or is improved by a pull request! + +## Developer Information + +It is recommended that developers use a [vscode devcontainer](https://code.visualstudio.com/docs/devcontainers/containers). This repository contains configuration to set up a containerized development environment that suits its own needs. + +This project was created using the [Diamond Light Source Copier Template](https://github.com/DiamondLightSource/python-copier-template) for Python projects. + +For more information on common tasks like setting up a developer environment, running the tests, and setting a pre-commit hook, see the template's [How-to guides](https://diamondlightsource.github.io/python-copier-template/2.3.0/how-to.html). +>>>>>>> after updating diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst deleted file mode 100644 index f30c7e92..00000000 --- a/.github/CONTRIBUTING.rst +++ /dev/null @@ -1,35 +0,0 @@ -Contributing to the project -=========================== - -Contributions and issues are most welcome! All issues and pull requests are -handled through GitHub_. Also, please check for any existing issues before -filing a new one. If you have a great idea but it involves big changes, please -file a ticket before making a pull request! We want to make sure you don't spend -your time coding something that might not fit the scope of the project. - -.. _GitHub: https://github.com/bluesky/event-model/issues - -Issue or Discussion? --------------------- - -Github also offers discussions_ as a place to ask questions and share ideas. If -your issue is open ended and it is not obvious when it can be "closed", please -raise it as a discussion instead. - -.. _discussions: https://github.com/bluesky/event-model/discussions - -Code coverage -------------- - -While 100% code coverage does not make a library bug-free, it significantly -reduces the number of easily caught bugs! Please make sure coverage remains the -same or is improved by a pull request! - -Developer guide ---------------- - -The `Developer Guide`_ contains information on setting up a development -environment, running the tests and what standards the code and documentation -should follow. - -.. _Developer Guide: https://bluesky.github.io/event-model/main/developer/how-to/contribute.html diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000..aa65892f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,21 @@ +--- +name: Bug Report +about: The template to use for reporting bugs and usability issues +title: " " +labels: 'bug' +assignees: '' + +--- + +Describe the bug, including a clear and concise description of the expected behavior, the actual behavior and the context in which you encountered it (ideally include details of your environment). + +## Steps To Reproduce +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + + +## Acceptance Criteria +- Specific criteria that will be used to judge if the issue is fixed diff --git a/.github/ISSUE_TEMPLATE/issue.md b/.github/ISSUE_TEMPLATE/issue.md new file mode 100644 index 00000000..52c84dd8 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/issue.md @@ -0,0 +1,13 @@ +--- +name: Issue +about: The standard template to use for feature requests, design discussions and tasks +title: " " +labels: '' +assignees: '' + +--- + +A brief description of the issue, including specific stakeholders and the business case where appropriate + +## Acceptance Criteria +- Specific criteria that will be used to judge if the issue is fixed diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md new file mode 100644 index 00000000..8200afe5 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md @@ -0,0 +1,8 @@ +Fixes #ISSUE + +### Instructions to reviewer on how to test: +1. Do thing x +2. Confirm thing y happens + +### Checks for reviewer +- [ ] Would the PR title make sense to a user on a set of release notes diff --git a/.github/actions/install_requirements/action.yml b/.github/actions/install_requirements/action.yml index 79d1a71e..d33e0805 100644 --- a/.github/actions/install_requirements/action.yml +++ b/.github/actions/install_requirements/action.yml @@ -1,60 +1,34 @@ name: Install requirements -description: Run pip install with requirements and upload resulting requirements +description: Install a version of python then call pip install and report what was installed inputs: - requirements_file: - description: Name of requirements file to use and upload - required: true - install_options: + python-version: + description: Python version to install, default is from Dockerfile + default: "dev" + pip-install: description: Parameters to pass to pip install - required: true - artifact_name: - description: A user friendly name to give the produced artifacts - required: true - python_version: - description: Python version to install - default: "3.x" + default: "$([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e .[dev]" runs: using: composite - steps: + - name: Get version of python + run: | + PYTHON_VERSION="${{ inputs.python-version }}" + if [ $PYTHON_VERSION == "dev" ]; then + PYTHON_VERSION=$(sed -n "s/ARG PYTHON_VERSION=//p" Dockerfile) + fi + echo "PYTHON_VERSION=$PYTHON_VERSION" >> "$GITHUB_ENV" + shell: bash + - name: Setup python uses: actions/setup-python@v5 with: - python-version: ${{ inputs.python_version }} + python-version: ${{ env.PYTHON_VERSION }} - - name: Pip install - run: | - touch ${{ inputs.requirements_file }} - # -c uses requirements.txt as constraints, see 'Validate requirements file' - pip install -c ${{ inputs.requirements_file }} ${{ inputs.install_options }} + - name: Install packages + run: pip install ${{ inputs.pip-install }} shell: bash - - name: Create lockfile - run: | - mkdir -p lockfiles - pip freeze --exclude-editable > lockfiles/${{ inputs.requirements_file }} - # delete the self referencing line and make sure it isn't blank - sed -i'' -e '/file:/d' lockfiles/${{ inputs.requirements_file }} - shell: bash - - - name: Upload lockfiles - uses: actions/upload-artifact@v4.0.0 - with: - name: lockfiles-${{ inputs.python_version }}-${{ inputs.artifact_name }}-${{ github.sha }} - path: lockfiles - - # This eliminates the class of problems where the requirements being given no - # longer match what the packages themselves dictate. E.g. In the rare instance - # where I install some-package which used to depend on vulnerable-dependency - # but now uses good-dependency (despite being nominally the same version) - # pip will install both if given a requirements file with -r - - name: If requirements file exists, check it matches pip installed packages - run: | - if [ -s ${{ inputs.requirements_file }} ]; then - if ! diff -u ${{ inputs.requirements_file }} lockfiles/${{ inputs.requirements_file }}; then - echo "Error: ${{ inputs.requirements_file }} need the above changes to be exhaustive" - exit 1 - fi - fi + - name: Report what was installed + run: pip freeze shell: bash diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 2d1af873..184ba363 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -10,11 +10,15 @@ updates: schedule: interval: "weekly" groups: - github-artifacts: + actions: patterns: - - actions/*-artifact + - "*" - package-ecosystem: "pip" directory: "/" schedule: interval: "weekly" + groups: + dev-dependencies: + patterns: + - "*" diff --git a/.github/pages/make_switcher.py b/.github/pages/make_switcher.py index ae227ab7..c06813af 100755 --- a/.github/pages/make_switcher.py +++ b/.github/pages/make_switcher.py @@ -1,30 +1,32 @@ +"""Make switcher.json to allow docs to switch between different versions.""" + import json import logging from argparse import ArgumentParser from pathlib import Path from subprocess import CalledProcessError, check_output -from typing import List, Optional -def report_output(stdout: bytes, label: str) -> List[str]: +def report_output(stdout: bytes, label: str) -> list[str]: + """Print and return something received frm stdout.""" ret = stdout.decode().strip().split("\n") print(f"{label}: {ret}") return ret -def get_branch_contents(ref: str) -> List[str]: +def get_branch_contents(ref: str) -> list[str]: """Get the list of directories in a branch.""" stdout = check_output(["git", "ls-tree", "-d", "--name-only", ref]) return report_output(stdout, "Branch contents") -def get_sorted_tags_list() -> List[str]: +def get_sorted_tags_list() -> list[str]: """Get a list of sorted tags in descending order from the repository.""" stdout = check_output(["git", "tag", "-l", "--sort=-v:refname"]) return report_output(stdout, "Tags list") -def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[str]: +def get_versions(ref: str, add: str | None) -> list[str]: """Generate the file containing the list of all GitHub Pages builds.""" # Get the directories (i.e. builds) from the GitHub Pages branch try: @@ -36,15 +38,12 @@ def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[st # Add and remove from the list of builds if add: builds.add(add) - if remove: - assert remove in builds, f"Build '{remove}' not in {sorted(builds)}" - builds.remove(remove) # Get a sorted list of tags tags = get_sorted_tags_list() # Make the sorted versions list from main branches and tags - versions: List[str] = [] + versions: list[str] = [] for version in ["master", "main"] + tags: if version in builds: versions.append(version) @@ -56,7 +55,8 @@ def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[st return versions -def write_json(path: Path, repository: str, versions: str): +def write_json(path: Path, repository: str, versions: list[str]): + """Write the JSON switcher to path.""" org, repo_name = repository.split("/") struct = [ {"version": version, "url": f"https://{org}.github.io/{repo_name}/{version}/"} @@ -68,17 +68,14 @@ def write_json(path: Path, repository: str, versions: str): def main(args=None): + """Parse args and write switcher.""" parser = ArgumentParser( - description="Make a versions.txt file from gh-pages directories" + description="Make a versions.json file from gh-pages directories" ) parser.add_argument( "--add", help="Add this directory to the list of existing directories", ) - parser.add_argument( - "--remove", - help="Remove this directory from the list of existing directories", - ) parser.add_argument( "repository", help="The GitHub org and repository name: ORG/REPO", @@ -91,7 +88,7 @@ def main(args=None): args = parser.parse_args(args) # Write the versions file - versions = get_versions("origin/gh-pages", args.add, args.remove) + versions = get_versions("origin/gh-pages", args.add) write_json(args.output, args.repository, versions) diff --git a/.github/workflows/_check.yml b/.github/workflows/_check.yml new file mode 100644 index 00000000..a6139c19 --- /dev/null +++ b/.github/workflows/_check.yml @@ -0,0 +1,27 @@ +on: + workflow_call: + outputs: + branch-pr: + description: The PR number if the branch is in one + value: ${{ jobs.pr.outputs.branch-pr }} + +jobs: + pr: + runs-on: "ubuntu-latest" + outputs: + branch-pr: ${{ steps.script.outputs.result }} + steps: + - uses: actions/github-script@v7 + id: script + if: github.event_name == 'push' + with: + script: | + const prs = await github.rest.pulls.list({ + owner: context.repo.owner, + repo: context.repo.repo, + head: context.repo.owner + ':${{ github.ref_name }}' + }) + if (prs.data.length) { + console.log(`::notice ::Skipping CI on branch push as it is already run in PR #${prs.data[0]["number"]}`) + return prs.data[0]["number"] + } diff --git a/.github/workflows/_dist.yml b/.github/workflows/_dist.yml new file mode 100644 index 00000000..b1c4c93c --- /dev/null +++ b/.github/workflows/_dist.yml @@ -0,0 +1,36 @@ +on: + workflow_call: + +jobs: + build: + runs-on: "ubuntu-latest" + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + # Need this to get version number from last tag + fetch-depth: 0 + + - name: Build sdist and wheel + run: > + export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && + pipx run build + + - name: Upload sdist and wheel as artifacts + uses: actions/upload-artifact@v4 + with: + name: dist + path: dist + + - name: Check for packaging errors + run: pipx run twine check --strict dist/* + + - name: Install produced wheel + uses: ./.github/actions/install_requirements + with: + pip-install: dist/*.whl + + - name: Test module --version works using the installed wheel + # If more than one module in src/ replace with module name to test + run: python -m $(ls --hide='*.egg-info' src | head -1) --version diff --git a/.github/workflows/docs.yml b/.github/workflows/_docs.yml similarity index 69% rename from .github/workflows/docs.yml rename to .github/workflows/_docs.yml index 3c29ff94..a1cafcae 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/_docs.yml @@ -1,17 +1,13 @@ -name: Docs CI - on: - push: - pull_request: + workflow_call: jobs: - docs: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository + build: runs-on: ubuntu-latest steps: - name: Avoid git conflicts when tag and branch pushed at same time - if: startsWith(github.ref, 'refs/tags') + if: github.ref_type == 'tag' run: sleep 60 - name: Checkout @@ -21,19 +17,23 @@ jobs: fetch-depth: 0 - name: Install system packages - # Can delete this if you don't use graphviz in your docs run: sudo apt-get install graphviz - name: Install python packages uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - artifact_name: docs - name: Build docs run: tox -e docs + - name: Remove environment.pickle + run: rm build/html/.doctrees/environment.pickle + + - name: Upload built docs artifact + uses: actions/upload-artifact@v4 + with: + name: docs + path: build + - name: Sanitize ref name for docs version run: echo "DOCS_VERSION=${GITHUB_REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV @@ -44,10 +44,10 @@ jobs: run: python .github/pages/make_switcher.py --add $DOCS_VERSION ${{ github.repository }} .github/pages/switcher.json - name: Publish Docs to gh-pages - if: github.event_name == 'push' && github.actor != 'dependabot[bot]' + if: github.ref_type == 'tag' || github.ref_name == 'main' # We pin to the SHA, not the tag, for security reasons. # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions - uses: peaceiris/actions-gh-pages@64b46b4226a4a12da2239ba3ea5aa73e3163c75b # v3.9.1 + uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: .github/pages diff --git a/.github/workflows/_pypi.yml b/.github/workflows/_pypi.yml new file mode 100644 index 00000000..0c5258db --- /dev/null +++ b/.github/workflows/_pypi.yml @@ -0,0 +1,17 @@ +on: + workflow_call: + +jobs: + upload: + runs-on: ubuntu-latest + environment: release + + steps: + - name: Download dist artifact + uses: actions/download-artifact@v4 + with: + name: dist + path: dist + + - name: Publish to PyPI using trusted publishing + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/_release.yml b/.github/workflows/_release.yml new file mode 100644 index 00000000..10d8ed87 --- /dev/null +++ b/.github/workflows/_release.yml @@ -0,0 +1,32 @@ +on: + workflow_call: + +jobs: + artifacts: + runs-on: ubuntu-latest + + steps: + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + merge-multiple: true + + - name: Zip up docs + run: | + set -vxeuo pipefail + if [ -d html ]; then + mv html $GITHUB_REF_NAME + zip -r docs.zip $GITHUB_REF_NAME + rm -rf $GITHUB_REF_NAME + fi + + - name: Create GitHub Release + # We pin to the SHA, not the tag, for security reasons. + # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions + uses: softprops/action-gh-release@c062e08bd532815e2082a85e87e3ef29c3e6d191 # v2.0.8 + with: + prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} + files: "*" + generate_release_notes: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/_test.yml b/.github/workflows/_test.yml new file mode 100644 index 00000000..f652d414 --- /dev/null +++ b/.github/workflows/_test.yml @@ -0,0 +1,62 @@ +on: + workflow_call: + inputs: + python-version: + type: string + description: The version of python to install + required: true + runs-on: + type: string + description: The runner to run this job on + required: true + secrets: + CODECOV_TOKEN: + required: true + +env: + # https://github.com/pytest-dev/pytest/issues/2042 + PY_IGNORE_IMPORTMISMATCH: "1" + +jobs: + run: + runs-on: ${{ inputs.runs-on }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + # Need this to get version number from last tag + fetch-depth: 0 + + - if: inputs.python-version == 'dev' + name: Install dev versions of python packages + uses: ./.github/actions/install_requirements + + - if: inputs.python-version == 'dev' + name: Write the requirements as an artifact + run: pip freeze --exclude-editable > /tmp/dev-requirements.txt + + - if: inputs.python-version == 'dev' + name: Upload dev-requirements.txt + uses: actions/upload-artifact@v4 + with: + name: dev-requirements + path: /tmp/dev-requirements.txt + + - if: inputs.python-version != 'dev' + name: Install latest versions of python packages + uses: ./.github/actions/install_requirements + with: + python-version: ${{ inputs.python-version }} + pip-install: ".[dev]" + + - name: Run tests + run: tox -e tests + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4 + with: + name: ${{ inputs.python-version }}/${{ inputs.runs-on }} + files: cov.xml + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/_tox.yml b/.github/workflows/_tox.yml new file mode 100644 index 00000000..a13536d3 --- /dev/null +++ b/.github/workflows/_tox.yml @@ -0,0 +1,22 @@ +on: + workflow_call: + inputs: + tox: + type: string + description: What to run under tox + required: true + + +jobs: + run: + runs-on: "ubuntu-latest" + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install python packages + uses: ./.github/actions/install_requirements + + - name: Run tox + run: tox -e ${{ inputs.tox }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..1df64d84 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,59 @@ +name: CI + +on: + push: + pull_request: + +jobs: + check: + uses: ./.github/workflows/_check.yml + + lint: + needs: check + if: needs.check.outputs.branch-pr == '' + uses: ./.github/workflows/_tox.yml + with: + tox: pre-commit,type-checking + + test: + needs: check + if: needs.check.outputs.branch-pr == '' + strategy: + matrix: + runs-on: ["ubuntu-latest"] # can add windows-latest, macos-latest + python-version: ["3.10", "3.11", "3.12"] + include: + # Include one that runs in the dev environment + - runs-on: "ubuntu-latest" + python-version: "dev" + fail-fast: false + uses: ./.github/workflows/_test.yml + with: + runs-on: ${{ matrix.runs-on }} + python-version: ${{ matrix.python-version }} + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + + docs: + needs: check + if: needs.check.outputs.branch-pr == '' + uses: ./.github/workflows/_docs.yml + + dist: + needs: check + if: needs.check.outputs.branch-pr == '' + uses: ./.github/workflows/_dist.yml + + pypi: + if: github.ref_type == 'tag' + needs: dist + uses: ./.github/workflows/_pypi.yml + permissions: + id-token: write + + release: + if: github.ref_type == 'tag' + needs: [dist, docs] + uses: ./.github/workflows/_release.yml + permissions: + contents: write diff --git a/.github/workflows/code.yml b/.github/workflows/code.yml deleted file mode 100644 index 364ff43d..00000000 --- a/.github/workflows/code.yml +++ /dev/null @@ -1,243 +0,0 @@ -name: Code CI - -on: - push: - pull_request: -env: - # The target python version, which must match the Dockerfile version - CONTAINER_PYTHON: "3.11" - DIST_WHEEL_PATH: dist-${{ github.sha }} - -jobs: - lint: - # pull requests are a duplicate of a branch push if within the same repo. - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - artifact_name: lint - - - name: Lint - run: tox -e pre-commit,mypy - - test: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - strategy: - fail-fast: false - matrix: - os: ["ubuntu-latest"] # can add windows-latest, macos-latest - python: ["3.8", "3.9", "3.10", "3.11"] - install: ["-e .[dev]"] - # Make one version be non-editable to test both paths of version code - include: - - os: "ubuntu-latest" - python: "3.7" - install: ".[dev]" - - runs-on: ${{ matrix.os }} - env: - # https://github.com/pytest-dev/pytest/issues/2042 - PY_IGNORE_IMPORTMISMATCH: "1" - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - # Need this to get version number from last tag - fetch-depth: 0 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - python_version: ${{ matrix.python }} - requirements_file: requirements-test-${{ matrix.os }}-${{ matrix.python }}.txt - install_options: ${{ matrix.install }} - artifact_name: tests - - - name: List dependency tree - run: pipdeptree - - - name: Run tests - run: tox -e pytest - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 - with: - name: ${{ matrix.python }}/${{ matrix.os }} - files: cov.xml - - dist: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: "ubuntu-latest" - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - # Need this to get version number from last tag - fetch-depth: 0 - - - name: Build sdist and wheel - run: | - export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && \ - pipx run build - - - name: Upload sdist and wheel as artifacts - uses: actions/upload-artifact@v4.0.0 - with: - name: ${{ env.DIST_WHEEL_PATH }} - path: dist - - - name: Check for packaging errors - run: pipx run twine check --strict dist/* - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - python_version: ${{env.CONTAINER_PYTHON}} - requirements_file: requirements.txt - install_options: dist/*.whl - artifact_name: dist - - - name: Test module --version works using the installed wheel - # If more than one module in src/ replace with module name to test - run: python -m $(ls --hide='*.egg-info' src | head -1) --version - - container: - needs: [lint, dist, test] - runs-on: ubuntu-latest - - permissions: - contents: read - packages: write - - env: - TEST_TAG: "testing" - - steps: - - name: Checkout - uses: actions/checkout@v4 - - # image names must be all lower case - - name: Generate image repo name - run: echo IMAGE_REPOSITORY=ghcr.io/$(tr '[:upper:]' '[:lower:]' <<< "${{ github.repository }}") >> $GITHUB_ENV - - - name: Set lockfile location in environment - run: | - echo "DIST_LOCKFILE_PATH=lockfiles-${{ env.CONTAINER_PYTHON }}-dist-${{ github.sha }}" >> $GITHUB_ENV - - - name: Download wheel and lockfiles - uses: actions/download-artifact@v4.1.0 - with: - path: artifacts/ - pattern: "*dist*" - - - name: Log in to GitHub Docker Registry - if: github.event_name != 'pull_request' - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Set up Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v3 - - - name: Build and export to Docker local cache - uses: docker/build-push-action@v5 - with: - # Note build-args, context, file, and target must all match between this - # step and the later build-push-action, otherwise the second build-push-action - # will attempt to build the image again - build-args: | - PIP_OPTIONS=-r ${{ env.DIST_LOCKFILE_PATH }}/requirements.txt ${{ env.DIST_WHEEL_PATH }}/*.whl - context: artifacts/ - file: ./Dockerfile - target: runtime - load: true - tags: ${{ env.TEST_TAG }} - # If you have a long docker build (2+ minutes), uncomment the - # following to turn on caching. For short build times this - # makes it a little slower - #cache-from: type=gha - #cache-to: type=gha,mode=max - - - name: Test cli works in cached runtime image - run: docker run docker.io/library/${{ env.TEST_TAG }} --version - - - name: Create tags for publishing image - id: meta - uses: docker/metadata-action@v5 - with: - images: ${{ env.IMAGE_REPOSITORY }} - tags: | - type=ref,event=tag - type=raw,value=latest, enable=${{ github.ref_type == 'tag' }} - # type=edge,branch=main - # Add line above to generate image for every commit to given branch, - # and uncomment the end of if clause in next step - - - name: Push cached image to container registry - if: github.ref_type == 'tag' # || github.ref_name == 'main' - uses: docker/build-push-action@v5 - # This does not build the image again, it will find the image in the - # Docker cache and publish it - with: - # Note build-args, context, file, and target must all match between this - # step and the previous build-push-action, otherwise this step will - # attempt to build the image again - build-args: | - PIP_OPTIONS=-r ${{ env.DIST_LOCKFILE_PATH }}/requirements.txt ${{ env.DIST_WHEEL_PATH }}/*.whl - context: artifacts/ - file: ./Dockerfile - target: runtime - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - - release: - # upload to PyPI and make a release on every tag - needs: [lint, dist, test] - if: ${{ github.event_name == 'push' && github.ref_type == 'tag' }} - runs-on: ubuntu-latest - env: - HAS_PYPI_TOKEN: ${{ secrets.PYPI_TOKEN != '' }} - - steps: - - name: Download wheel and lockfiles - uses: actions/download-artifact@v4.1.0 - with: - path: artifacts/ - pattern: "*dist*" - - - name: Fixup blank lockfiles - # Github release artifacts can't be blank - run: for f in ${{ env.DIST_LOCKFILE_PATH }}/*; do [ -s $f ] || echo '# No requirements' >> $f; done - - - name: Github Release - # We pin to the SHA, not the tag, for security reasons. - # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15 - with: - prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} - files: | - ${{ env.DIST_WHEEL_PATH }}/* - ${{ env.DIST_LOCKFILE_PATH }}/* - generate_release_notes: true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Publish to PyPI - if: ${{ env.HAS_PYPI_TOKEN }} - uses: pypa/gh-action-pypi-publish@release/v1 - with: - password: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/docs_clean.yml b/.github/workflows/docs_clean.yml deleted file mode 100644 index e324640e..00000000 --- a/.github/workflows/docs_clean.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Docs Cleanup CI - -# delete branch documentation when a branch is deleted -# also allow manually deleting a documentation version -on: - delete: - workflow_dispatch: - inputs: - version: - description: "documentation version to DELETE" - required: true - type: string - -jobs: - remove: - if: github.event.ref_type == 'branch' || github.event_name == 'workflow_dispatch' - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - ref: gh-pages - - - name: removing documentation for branch ${{ github.event.ref }} - if: ${{ github.event_name != 'workflow_dispatch' }} - run: echo "REF_NAME=${{ github.event.ref }}" >> $GITHUB_ENV - - - name: manually removing documentation version ${{ github.event.inputs.version }} - if: ${{ github.event_name == 'workflow_dispatch' }} - run: echo "REF_NAME=${{ github.event.inputs.version }}" >> $GITHUB_ENV - - - name: Sanitize ref name for docs version - run: echo "DOCS_VERSION=${REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV - - - name: update index and push changes - run: | - rm -r $DOCS_VERSION - python make_switcher.py --remove $DOCS_VERSION ${{ github.repository }} switcher.json - git config --global user.name 'GitHub Actions Docs Cleanup CI' - git config --global user.email 'GithubActionsCleanup@noreply.github.com' - git commit -am "Removing redundant docs version $DOCS_VERSION" - git push diff --git a/.github/workflows/linkcheck.yml b/.github/workflows/linkcheck.yml deleted file mode 100644 index 7f651a27..00000000 --- a/.github/workflows/linkcheck.yml +++ /dev/null @@ -1,28 +0,0 @@ -name: Link Check - -on: - workflow_dispatch: - schedule: - # Run weekly to check URL links still resolve - - cron: "0 8 * * WED" - -jobs: - docs: - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - artifact_name: link_check - - - name: Check links - run: tox -e docs build -- -b linkcheck - - - name: Keepalive Workflow - uses: gautamkrishnar/keepalive-workflow@v1 diff --git a/.github/workflows/periodic.yml b/.github/workflows/periodic.yml new file mode 100644 index 00000000..e2a0fd1b --- /dev/null +++ b/.github/workflows/periodic.yml @@ -0,0 +1,13 @@ +name: Periodic + +on: + workflow_dispatch: + schedule: + # Run weekly to check URL links still resolve + - cron: "0 8 * * WED" + +jobs: + linkcheck: + uses: ./.github/workflows/_tox.yml + with: + tox: docs build -- -b linkcheck diff --git a/.gitignore b/.gitignore index a37be99b..0f33bf29 100644 --- a/.gitignore +++ b/.gitignore @@ -8,7 +8,6 @@ __pycache__/ # Distribution / packaging .Python env/ -.venv build/ develop-eggs/ dist/ @@ -56,6 +55,7 @@ cov.xml # Sphinx documentation docs/_build/ +docs/_api # PyBuilder target/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5bc9f001..60fc23f9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,23 +1,24 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-added-large-files - id: check-yaml - id: check-merge-conflict + - id: end-of-file-fixer - repo: local hooks: - - id: black - name: Run black - stages: [commit] + - id: ruff + name: lint with ruff language: system - entry: black --check --diff + entry: ruff check --force-exclude types: [python] + require_serial: true - - id: ruff - name: Run ruff - stages: [commit] + - id: ruff-format + name: format with ruff language: system - entry: ruff + entry: ruff format --force-exclude types: [python] + require_serial: true diff --git a/.vscode/extensions.json b/.vscode/extensions.json index a1227b34..933c580c 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -1,10 +1,5 @@ { "recommendations": [ "ms-vscode-remote.remote-containers", - "ms-python.python", - "tamasfe.even-better-toml", - "redhat.vscode-yaml", - "ryanluker.vscode-coverage-gutters", - "charliermarsh.Ruff" ] } diff --git a/.vscode/launch.json b/.vscode/launch.json index 3cda7432..36d8f503 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -6,7 +6,7 @@ "configurations": [ { "name": "Debug Unit Test", - "type": "python", + "type": "debugpy", "request": "launch", "justMyCode": false, "program": "${file}", @@ -15,10 +15,8 @@ ], "console": "integratedTerminal", "env": { - // The default config in pyproject.toml's "[tool.pytest.ini_options]" adds coverage. - // Cannot have coverage and debugging at the same time. - // https://github.com/microsoft/vscode-python/issues/693 - "PYTEST_ADDOPTS": "--no-cov" + // Enable break on exception when debugging tests (see: tests/conftest.py) + "PYTEST_RAISE": "1", }, } ] diff --git a/.vscode/settings.json b/.vscode/settings.json index 72259a62..101c75fa 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,22 +1,12 @@ { - "python.linting.pylintEnabled": false, - "python.linting.flake8Enabled": false, - "python.linting.mypyEnabled": true, - "python.linting.enabled": true, - "python.testing.pytestArgs": [ - "--cov=event_model", - "--cov-report", - "xml:cov.xml" - ], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, - "python.formatting.provider": "black", - "python.languageServer": "Pylance", "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": "explicit" + }, + "files.insertFinalNewline": true, "[python]": { - "editor.codeActionsOnSave": { - "source.fixAll.ruff": false, - "source.organizeImports.ruff": true - } - } + "editor.defaultFormatter": "charliermarsh.ruff", + }, } diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..c4404eca --- /dev/null +++ b/Dockerfile @@ -0,0 +1,13 @@ +# The devcontainer should use the developer target and run as root with podman +# or docker with user namespaces. +ARG PYTHON_VERSION=3.11 +FROM python:${PYTHON_VERSION} as developer + +# Add any system dependencies for the developer/build environment here +RUN apt-get update && apt-get install -y --no-install-recommends \ + graphviz \ + && rm -rf /var/lib/apt/lists/* + +# Set up a virtual environment and put it in PATH +RUN python -m venv /venv +ENV PATH=/venv/bin:$PATH diff --git a/README.md b/README.md new file mode 100644 index 00000000..3381b43d --- /dev/null +++ b/README.md @@ -0,0 +1,37 @@ +[![CI](https://github.com/bluesky/event-model/actions/workflows/ci.yml/badge.svg)](https://github.com/bluesky/event-model/actions/workflows/ci.yml) +[![Coverage](https://codecov.io/gh/bluesky/event-model/branch/main/graph/badge.svg)](https://codecov.io/gh/bluesky/event-model) +[![PyPI](https://img.shields.io/pypi/v/event-model.svg)](https://pypi.org/project/event-model) +[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) + +# event_model + + + +This is where you should write a short paragraph that describes what your module does, +how it does it, and why people should use it. + +Source | +:---: | :---: +PyPI | `pip install event-model` +Documentation | +Releases | + +This is where you should put some images or code snippets that illustrate +some relevant examples. If it is a library then you might put some +introductory code here: + +```python +from event_model import __version__ + +print(f"Hello event_model {__version__}") +``` + +Or if it is a commandline tool then you might put some example commands here: + +``` +python -m event_model --version +``` + + + +See https://bluesky.github.io/event-model for more detailed documentation. diff --git a/README.rst b/README.rst deleted file mode 100644 index eba650fe..00000000 --- a/README.rst +++ /dev/null @@ -1,55 +0,0 @@ -event_model -============================================================================= - -|code_ci| |docs_ci| |coverage| |pypi_version| |license| - -This is where you should write a short paragraph that describes what your module does, -how it does it, and why people should use it. - -============== ============================================================== -PyPI ``pip install event-model`` -Source code https://github.com/bluesky/event-model -Documentation https://bluesky.github.io/event-model -Releases https://github.com/bluesky/event-model/releases -============== ============================================================== - -This is where you should put some images or code snippets that illustrate -some relevant examples. If it is a library then you might put some -introductory code here: - -.. code-block:: python - - from event_model import __version__ - - print(f"Hello event_model {__version__}") - -Or if it is a commandline tool then you might put some example commands here:: - - $ python -m event_model --version - -.. |code_ci| image:: https://github.com/bluesky/event-model/actions/workflows/code.yml/badge.svg?branch=main - :target: https://github.com/bluesky/event-model/actions/workflows/code.yml - :alt: Code CI - -.. |docs_ci| image:: https://github.com/bluesky/event-model/actions/workflows/docs.yml/badge.svg?branch=main - :target: https://github.com/bluesky/event-model/actions/workflows/docs.yml - :alt: Docs CI - -.. |coverage| image:: https://codecov.io/gh/bluesky/event-model/branch/main/graph/badge.svg - :target: https://codecov.io/gh/bluesky/event-model - :alt: Test Coverage - -.. |pypi_version| image:: https://img.shields.io/pypi/v/event-model.svg - :target: https://pypi.org/project/event-model - :alt: Latest PyPI version - -.. |license| image:: https://img.shields.io/badge/License-Apache%202.0-blue.svg - :target: https://opensource.org/licenses/Apache-2.0 - :alt: Apache License - - -.. - Anything below this line is used when viewing README.rst and will be replaced - when included in index.rst - -See https://bluesky.github.io/event-model for more detailed documentation. diff --git a/catalog-info.yaml b/catalog-info.yaml deleted file mode 100644 index e4206bb5..00000000 --- a/catalog-info.yaml +++ /dev/null @@ -1,10 +0,0 @@ -apiVersion: backstage.io/v1alpha1 -kind: Component -metadata: - name: event-model - title: event-model - description: -spec: - type: documentation - lifecycle: experimental - owner: \ No newline at end of file diff --git a/docs/_api.rst b/docs/_api.rst new file mode 100644 index 00000000..c2f22557 --- /dev/null +++ b/docs/_api.rst @@ -0,0 +1,16 @@ +:orphan: + +.. + This page is not included in the TOC tree, but must exist so that the + autosummary pages are generated for event_model and all its + subpackages + +API +=== + +.. autosummary:: + :toctree: _api + :template: custom-module-template.rst + :recursive: + + event_model diff --git a/docs/_templates/custom-module-template.rst b/docs/_templates/custom-module-template.rst new file mode 100644 index 00000000..9aeca540 --- /dev/null +++ b/docs/_templates/custom-module-template.rst @@ -0,0 +1,37 @@ +{{ ('``' + fullname + '``') | underline }} + +{%- set filtered_members = [] %} +{%- for item in members %} + {%- if item in functions + classes + exceptions + attributes %} + {% set _ = filtered_members.append(item) %} + {%- endif %} +{%- endfor %} + +.. automodule:: {{ fullname }} + :members: + + {% block modules %} + {% if modules %} + .. rubric:: Submodules + + .. autosummary:: + :toctree: + :template: custom-module-template.rst + :recursive: + {% for item in modules %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block members %} + {% if filtered_members %} + .. rubric:: Members + + .. autosummary:: + :nosignatures: + {% for item in filtered_members %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} diff --git a/docs/conf.py b/docs/conf.py index a0b50a24..bf839aba 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,8 +1,9 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html +"""Configuration file for the Sphinx documentation builder. + +This file only contains a selection of the most common options. For a full +list see the documentation: +https://www.sphinx-doc.org/en/master/usage/configuration.html +""" import sys from pathlib import Path @@ -32,6 +33,8 @@ extensions = [ # Use this for generating API docs "sphinx.ext.autodoc", + # and making summary tables at the top of API docs + "sphinx.ext.autosummary", # This can parse google style docstrings "sphinx.ext.napoleon", # For linking to external sphinx documentation @@ -44,8 +47,13 @@ "sphinx_copybutton", # For the card element "sphinx_design", + # So we can write markdown files + "myst_parser", ] +# So we can use the ::: syntax +myst_enable_extensions = ["colon_fence"] + # If true, Sphinx will warn about all references where the target cannot # be found. nitpicky = True @@ -75,6 +83,12 @@ # Don't inherit docstrings from baseclasses autodoc_inherit_docstrings = False +# Document only what is in __all__ +autosummary_ignore_module_all = False + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + # Output graphviz directive produced images in a scalable format graphviz_output_format = "svg" @@ -82,9 +96,6 @@ # role, that is, for text marked up `like this` default_role = "any" -# The suffix of source filenames. -source_suffix = ".rst" - # The master toctree document. master_doc = "index" @@ -103,15 +114,6 @@ # A dictionary of graphviz graph attributes for inheritance diagrams. inheritance_graph_attrs = {"rankdir": "TB"} -# Common links that should be available on every page -rst_epilog = """ -.. _Diamond Light Source: http://www.diamond.ac.uk -.. _black: https://github.com/psf/black -.. _ruff: https://beta.ruff.rs/docs/ -.. _mypy: http://mypy-lang.org/ -.. _pre-commit: https://pre-commit.com/ -""" - # Ignore localhost links for periodic check that links in docs are valid linkcheck_ignore = [r"http://localhost:\d+/"] @@ -142,10 +144,10 @@ # Theme options for pydata_sphinx_theme # We don't check switcher because there are 3 possible states for a repo: # 1. New project, docs are not published so there is no switcher -# 2. Existing project with latest skeleton, switcher exists and works -# 3. Existing project with old skeleton that makes broken switcher, +# 2. Existing project with latest copier template, switcher exists and works +# 3. Existing project with old copier template that makes broken switcher, # switcher exists but is broken -# Point 3 makes checking switcher difficult, because the updated skeleton +# Point 3 makes checking switcher difficult, because the updated copier template # will fix the switcher at the end of the docs workflow, but never gets a chance # to complete as the docs build warns and fails. html_theme_options = { @@ -167,19 +169,13 @@ }, "check_switcher": False, "navbar_end": ["theme-switcher", "icon-links", "version-switcher"], - "external_links": [ - { - "name": "Release Notes", - "url": f"https://github.com/{github_user}/{github_repo}/releases", - } - ], "navigation_with_keys": False, } # A dictionary of values to pass into the template engine’s context for all pages html_context = { "github_user": github_user, - "github_repo": project, + "github_repo": github_repo, "github_version": version, "doc_path": "docs", } @@ -192,4 +188,4 @@ # Logo html_logo = "images/dls-logo.svg" -html_favicon = "images/dls-favicon.ico" +html_favicon = html_logo diff --git a/docs/developer/explanations/decisions.rst b/docs/developer/explanations/decisions.rst deleted file mode 100644 index 5841e6ea..00000000 --- a/docs/developer/explanations/decisions.rst +++ /dev/null @@ -1,17 +0,0 @@ -.. This Source Code Form is subject to the terms of the Mozilla Public -.. License, v. 2.0. If a copy of the MPL was not distributed with this -.. file, You can obtain one at http://mozilla.org/MPL/2.0/. - -Architectural Decision Records -============================== - -We record major architectural decisions in Architecture Decision Records (ADRs), -as `described by Michael Nygard -`_. -Below is the list of our current ADRs. - -.. toctree:: - :maxdepth: 1 - :glob: - - decisions/* \ No newline at end of file diff --git a/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst b/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst deleted file mode 100644 index b2d3d0fe..00000000 --- a/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst +++ /dev/null @@ -1,26 +0,0 @@ -1. Record architecture decisions -================================ - -Date: 2022-02-18 - -Status ------- - -Accepted - -Context -------- - -We need to record the architectural decisions made on this project. - -Decision --------- - -We will use Architecture Decision Records, as `described by Michael Nygard -`_. - -Consequences ------------- - -See Michael Nygard's article, linked above. To create new ADRs we will copy and -paste from existing ones. diff --git a/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst b/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst deleted file mode 100644 index 33d56981..00000000 --- a/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst +++ /dev/null @@ -1,35 +0,0 @@ -2. Adopt python_copier_template for project structure -===================================================== - -Date: 2022-02-18 - -Status ------- - -Accepted - -Context -------- - -We should use the following `python_copier_template `_. -The template will ensure consistency in developer -environments and package management. - -Decision --------- - -We have switched to using the skeleton. - -Consequences ------------- - -This module will use a fixed set of tools as developed in python_copier_template -and can pull from this template to update the packaging to the latest techniques. - -As such, the developer environment may have changed, the following could be -different: - -- linting -- formatting -- pip venv setup -- CI/CD diff --git a/docs/developer/how-to/build-docs.rst b/docs/developer/how-to/build-docs.rst deleted file mode 100644 index 11a5e638..00000000 --- a/docs/developer/how-to/build-docs.rst +++ /dev/null @@ -1,38 +0,0 @@ -Build the docs using sphinx -=========================== - -You can build the `sphinx`_ based docs from the project directory by running:: - - $ tox -e docs - -This will build the static docs on the ``docs`` directory, which includes API -docs that pull in docstrings from the code. - -.. seealso:: - - `documentation_standards` - -The docs will be built into the ``build/html`` directory, and can be opened -locally with a web browser:: - - $ firefox build/html/index.html - -Autobuild ---------- - -You can also run an autobuild process, which will watch your ``docs`` -directory for changes and rebuild whenever it sees changes, reloading any -browsers watching the pages:: - - $ tox -e docs autobuild - -You can view the pages at localhost:: - - $ firefox http://localhost:8000 - -If you are making changes to source code too, you can tell it to watch -changes in this directory too:: - - $ tox -e docs autobuild -- --watch src - -.. _sphinx: https://www.sphinx-doc.org/ diff --git a/docs/developer/how-to/contribute.rst b/docs/developer/how-to/contribute.rst deleted file mode 100644 index 65b992f0..00000000 --- a/docs/developer/how-to/contribute.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../../../.github/CONTRIBUTING.rst diff --git a/docs/developer/how-to/lint.rst b/docs/developer/how-to/lint.rst deleted file mode 100644 index 2df258d8..00000000 --- a/docs/developer/how-to/lint.rst +++ /dev/null @@ -1,39 +0,0 @@ -Run linting using pre-commit -============================ - -Code linting is handled by black_ and ruff_ run under pre-commit_. - -Running pre-commit ------------------- - -You can run the above checks on all files with this command:: - - $ tox -e pre-commit - -Or you can install a pre-commit hook that will run each time you do a ``git -commit`` on just the files that have changed:: - - $ pre-commit install - -It is also possible to `automatically enable pre-commit on cloned repositories `_. -This will result in pre-commits being enabled on every repo your user clones from now on. - -Fixing issues -------------- - -If black reports an issue you can tell it to reformat all the files in the -repository:: - - $ black . - -Likewise with ruff:: - - $ ruff --fix . - -Ruff may not be able to automatically fix all issues; in this case, you will have to fix those manually. - -VSCode support --------------- - -The ``.vscode/settings.json`` will run black formatting as well as -ruff checking on save. Issues will be highlighted in the editor window. diff --git a/docs/developer/how-to/make-release.rst b/docs/developer/how-to/make-release.rst deleted file mode 100644 index 230025f0..00000000 --- a/docs/developer/how-to/make-release.rst +++ /dev/null @@ -1,16 +0,0 @@ -Make a release -============== - -To make a new release, please follow this checklist: - -- Choose a new PEP440 compliant release number (see https://peps.python.org/pep-0440/) -- Go to the GitHub release_ page -- Choose ``Draft New Release`` -- Click ``Choose Tag`` and supply the new tag you chose (click create new tag) -- Click ``Generate release notes``, review and edit these notes -- Choose a title and click ``Publish Release`` - -Note that tagging and pushing to the main branch has the same effect except that -you will not get the option to edit the release notes. - -.. _release: https://github.com/bluesky/event-model/releases diff --git a/docs/developer/how-to/pin-requirements.rst b/docs/developer/how-to/pin-requirements.rst deleted file mode 100644 index 89639623..00000000 --- a/docs/developer/how-to/pin-requirements.rst +++ /dev/null @@ -1,74 +0,0 @@ -Pinning Requirements -==================== - -Introduction ------------- - -By design this project only defines dependencies in one place, i.e. in -the ``requires`` table in ``pyproject.toml``. - -In the ``requires`` table it is possible to pin versions of some dependencies -as needed. For library projects it is best to leave pinning to a minimum so -that your library can be used by the widest range of applications. - -When CI builds the project it will use the latest compatible set of -dependencies available (after applying your pins and any dependencies' pins). - -This approach means that there is a possibility that a future build may -break because an updated release of a dependency has made a breaking change. - -The correct way to fix such an issue is to work out the minimum pinning in -``requires`` that will resolve the problem. However this can be quite hard to -do and may be time consuming when simply trying to release a minor update. - -For this reason we provide a mechanism for locking all dependencies to -the same version as a previous successful release. This is a quick fix that -should guarantee a successful CI build. - -Finding the lock files ----------------------- - -Every release of the project will have a set of requirements files published -as release assets. - -For example take a look at the release page for python3-pip-skeleton-cli here: -https://github.com/DiamondLightSource/python3-pip-skeleton-cli/releases/tag/3.3.0 - -There is a list of requirements*.txt files showing as assets on the release. - -There is one file for each time the CI installed the project into a virtual -environment. There are multiple of these as the CI creates a number of -different environments. - -The files are created using ``pip freeze`` and will contain a full list -of the dependencies and sub-dependencies with pinned versions. - -You can download any of these files by clicking on them. It is best to use -the one that ran with the lowest Python version as this is more likely to -be compatible with all the versions of Python in the test matrix. -i.e. ``requirements-test-ubuntu-latest-3.8.txt`` in this example. - -Applying the lock file ----------------------- - -To apply a lockfile: - -- copy the requirements file you have downloaded to the root of your - repository -- rename it to requirements.txt -- commit it into the repo -- push the changes - -The CI looks for a requirements.txt in the root and will pass it to pip -when installing each of the test environments. pip will then install exactly -the same set of packages as the previous release. - -Removing dependency locking from CI ------------------------------------ - -Once the reasons for locking the build have been resolved it is a good idea -to go back to an unlocked build. This is because you get an early indication -of any incoming problems. - -To restore unlocked builds in CI simply remove requirements.txt from the root -of the repo and push. diff --git a/docs/developer/how-to/run-tests.rst b/docs/developer/how-to/run-tests.rst deleted file mode 100644 index d2e03644..00000000 --- a/docs/developer/how-to/run-tests.rst +++ /dev/null @@ -1,12 +0,0 @@ -Run the tests using pytest -========================== - -Testing is done with pytest_. It will find functions in the project that `look -like tests`_, and run them to check for errors. You can run it with:: - - $ tox -e pytest - -It will also report coverage to the commandline and to ``cov.xml``. - -.. _pytest: https://pytest.org/ -.. _look like tests: https://docs.pytest.org/explanation/goodpractices.html#test-discovery diff --git a/docs/developer/how-to/static-analysis.rst b/docs/developer/how-to/static-analysis.rst deleted file mode 100644 index 065920e1..00000000 --- a/docs/developer/how-to/static-analysis.rst +++ /dev/null @@ -1,8 +0,0 @@ -Run static analysis using mypy -============================== - -Static type analysis is done with mypy_. It checks type definition in source -files without running them, and highlights potential issues where types do not -match. You can run it with:: - - $ tox -e mypy diff --git a/docs/developer/how-to/test-container.rst b/docs/developer/how-to/test-container.rst deleted file mode 100644 index a4a43a6f..00000000 --- a/docs/developer/how-to/test-container.rst +++ /dev/null @@ -1,25 +0,0 @@ -Container Local Build and Test -============================== - -CI builds a runtime container for the project. The local tests -checks available via ``tox -p`` do not verify this because not -all developers will have docker installed locally. - -If CI is failing to build the container, then it is best to fix and -test the problem locally. This would require that you have docker -or podman installed on your local workstation. - -In the following examples the command ``docker`` is interchangeable with -``podman`` depending on which container cli you have installed. - -To build the container and call it ``test``:: - - cd - docker build -t test . - -To verify that the container runs:: - - docker run -it test --help - -You can pass any other command line parameters to your application -instead of --help. diff --git a/docs/developer/how-to/update-tools.rst b/docs/developer/how-to/update-tools.rst deleted file mode 100644 index c1075ee8..00000000 --- a/docs/developer/how-to/update-tools.rst +++ /dev/null @@ -1,16 +0,0 @@ -Update the tools -================ - -This module is merged with the python3-pip-skeleton_. This is a generic -Python project structure which provides a means to keep tools and -techniques in sync between multiple Python projects. To update to the -latest version of the skeleton, run:: - - $ git pull --rebase=false https://github.com/DiamondLightSource/python3-pip-skeleton - -Any merge conflicts will indicate an area where something has changed that -conflicts with the setup of the current module. Check the `closed pull requests -`_ -of the skeleton module for more details. - -.. _python3-pip-skeleton: https://DiamondLightSource.github.io/python3-pip-skeleton diff --git a/docs/developer/index.rst b/docs/developer/index.rst deleted file mode 100644 index 8a6369b9..00000000 --- a/docs/developer/index.rst +++ /dev/null @@ -1,64 +0,0 @@ -Developer Guide -=============== - -Documentation is split into four categories, also accessible from links in the -side-bar. - -.. grid:: 2 - :gutter: 4 - - .. grid-item-card:: :material-regular:`directions_run;3em` - - .. toctree:: - :caption: Tutorials - :maxdepth: 1 - - tutorials/dev-install - - +++ - - Tutorials for getting up and running as a developer. - - .. grid-item-card:: :material-regular:`task;3em` - - .. toctree:: - :caption: How-to Guides - :maxdepth: 1 - - how-to/contribute - how-to/build-docs - how-to/run-tests - how-to/static-analysis - how-to/lint - how-to/update-tools - how-to/make-release - how-to/pin-requirements - how-to/test-container - - +++ - - Practical step-by-step guides for day-to-day dev tasks. - - .. grid-item-card:: :material-regular:`apartment;3em` - - .. toctree:: - :caption: Explanations - :maxdepth: 1 - - explanations/decisions - - +++ - - Explanations of how and why the architecture is why it is. - - .. grid-item-card:: :material-regular:`description;3em` - - .. toctree:: - :caption: Reference - :maxdepth: 1 - - reference/standards - - +++ - - Technical reference material on standards in use. diff --git a/docs/developer/reference/standards.rst b/docs/developer/reference/standards.rst deleted file mode 100644 index 5a1fd478..00000000 --- a/docs/developer/reference/standards.rst +++ /dev/null @@ -1,63 +0,0 @@ -Standards -========= - -This document defines the code and documentation standards used in this -repository. - -Code Standards --------------- - -The code in this repository conforms to standards set by the following tools: - -- black_ for code formatting -- ruff_ for style checks -- mypy_ for static type checking - -.. seealso:: - - How-to guides `../how-to/lint` and `../how-to/static-analysis` - -.. _documentation_standards: - -Documentation Standards ------------------------ - -Docstrings are pre-processed using the Sphinx Napoleon extension. As such, -google-style_ is considered as standard for this repository. Please use type -hints in the function signature for types. For example: - -.. code:: python - - def func(arg1: str, arg2: int) -> bool: - """Summary line. - - Extended description of function. - - Args: - arg1: Description of arg1 - arg2: Description of arg2 - - Returns: - Description of return value - """ - return True - -.. _google-style: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/index.html#google-vs-numpy - -Documentation is contained in the ``docs`` directory and extracted from -docstrings of the API. - -Docs follow the underlining convention:: - - Headling 1 (page title) - ======================= - - Heading 2 - --------- - - Heading 3 - ~~~~~~~~~ - -.. seealso:: - - How-to guide `../how-to/build-docs` diff --git a/docs/developer/tutorials/dev-install.rst b/docs/developer/tutorials/dev-install.rst deleted file mode 100644 index 0f467988..00000000 --- a/docs/developer/tutorials/dev-install.rst +++ /dev/null @@ -1,68 +0,0 @@ -Developer install -================= - -These instructions will take you through the minimal steps required to get a dev -environment setup, so you can run the tests locally. - -Clone the repository --------------------- - -First clone the repository locally using `Git -`_:: - - $ git clone git://github.com/bluesky/event-model.git - -Install dependencies --------------------- - -You can choose to either develop on the host machine using a `venv` (which -requires python 3.8 or later) or to run in a container under `VSCode -`_ - -.. tab-set:: - - .. tab-item:: Local virtualenv - - .. code:: - - $ cd event-model - $ python3 -m venv venv - $ source venv/bin/activate - $ pip install -e '.[dev]' - - .. tab-item:: VSCode devcontainer - - .. code:: - - $ code event-model - # Click on 'Reopen in Container' when prompted - # Open a new terminal - - .. note:: - - See the epics-containers_ documentation for more complex - use cases, such as integration with podman. - -See what was installed ----------------------- - -To see a graph of the python package dependency tree type:: - - $ pipdeptree - -Build and test --------------- - -Now you have a development environment you can run the tests in a terminal:: - - $ tox -p - -This will run in parallel the following checks: - -- `../how-to/build-docs` -- `../how-to/run-tests` -- `../how-to/static-analysis` -- `../how-to/lint` - - -.. _epics-containers: https://epics-containers.github.io/main/user/tutorials/devcontainer.html diff --git a/docs/explanations.md b/docs/explanations.md new file mode 100644 index 00000000..73ab289b --- /dev/null +++ b/docs/explanations.md @@ -0,0 +1,10 @@ +# Explanations + +Explanations of how it works and why it works that way. + +```{toctree} +:maxdepth: 1 +:glob: + +explanations/* +``` diff --git a/docs/explanations/decisions.md b/docs/explanations/decisions.md new file mode 100644 index 00000000..0533b98d --- /dev/null +++ b/docs/explanations/decisions.md @@ -0,0 +1,12 @@ +# Architectural Decision Records + +Architectural decisions are made throughout a project's lifetime. As a way of keeping track of these decisions, we record these decisions in Architecture Decision Records (ADRs) listed below. + +```{toctree} +:glob: true +:maxdepth: 1 + +decisions/* +``` + +For more information on ADRs see this [blog by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions). diff --git a/docs/explanations/decisions/0001-record-architecture-decisions.md b/docs/explanations/decisions/0001-record-architecture-decisions.md new file mode 100644 index 00000000..44d234ef --- /dev/null +++ b/docs/explanations/decisions/0001-record-architecture-decisions.md @@ -0,0 +1,18 @@ +# 1. Record architecture decisions + +## Status + +Accepted + +## Context + +We need to record the architectural decisions made on this project. + +## Decision + +We will use Architecture Decision Records, as [described by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions). + +## Consequences + +See Michael Nygard's article, linked above. To create new ADRs we will copy and +paste from existing ones. diff --git a/docs/explanations/decisions/0002-switched-to-python-copier-template.md b/docs/explanations/decisions/0002-switched-to-python-copier-template.md new file mode 100644 index 00000000..66fe5d8b --- /dev/null +++ b/docs/explanations/decisions/0002-switched-to-python-copier-template.md @@ -0,0 +1,28 @@ +# 2. Adopt python-copier-template for project structure + +## Status + +Accepted + +## Context + +We should use the following [python-copier-template](https://github.com/DiamondLightSource/python-copier-template). +The template will ensure consistency in developer +environments and package management. + +## Decision + +We have switched to using the template. + +## Consequences + +This module will use a fixed set of tools as developed in `python-copier-template` +and can pull from this template to update the packaging to the latest techniques. + +As such, the developer environment may have changed, the following could be +different: + +- linting +- formatting +- pip venv setup +- CI/CD diff --git a/docs/explanations/decisions/COPYME b/docs/explanations/decisions/COPYME new file mode 100644 index 00000000..b466c792 --- /dev/null +++ b/docs/explanations/decisions/COPYME @@ -0,0 +1,19 @@ +# 3. Short descriptive title + +Date: Today's date + +## Status + +Accepted + +## Context + +Background to allow us to make the decision, to show how we arrived at our conclusions. + +## Decision + +What decision we made. + +## Consequences + +What we will do as a result of this decision. diff --git a/docs/genindex.md b/docs/genindex.md new file mode 100644 index 00000000..73f1191b --- /dev/null +++ b/docs/genindex.md @@ -0,0 +1,3 @@ +# Index + + diff --git a/docs/genindex.rst b/docs/genindex.rst deleted file mode 100644 index 93eb8b29..00000000 --- a/docs/genindex.rst +++ /dev/null @@ -1,5 +0,0 @@ -API Index -========= - -.. - https://stackoverflow.com/a/42310803 diff --git a/docs/how-to.md b/docs/how-to.md new file mode 100644 index 00000000..6b161417 --- /dev/null +++ b/docs/how-to.md @@ -0,0 +1,10 @@ +# How-to Guides + +Practical step-by-step guides for the more experienced user. + +```{toctree} +:maxdepth: 1 +:glob: + +how-to/* +``` diff --git a/docs/how-to/contribute.md b/docs/how-to/contribute.md new file mode 100644 index 00000000..6e419797 --- /dev/null +++ b/docs/how-to/contribute.md @@ -0,0 +1,2 @@ +```{include} ../../.github/CONTRIBUTING.md +``` diff --git a/docs/images/dls-favicon.ico b/docs/images/dls-favicon.ico deleted file mode 100644 index 9a11f508ef8aed28f14c5ce0d8408e1ec8b614a1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 99678 zcmeI537lO;m4{y-5^&fxAd7U62#m-odom^>E-11%hzcU;%qW8>qNAV!X%rBR1O<_G zlo>@u83#o{W)z#S%OZm8BQq&!G^+HP(&n3&@W+)-9$hLOTPl^tjT;RAocFi!Zm+$n;Ww8`pBh^#O`bd$ z-t~}DY10X%Qg3fHyy2+Q{%4m;y8?rxKpcFJm&pY|u-6wCRW5(cjPg@`tAm&CdMS9B z-%o#TQRNE0?HvbX^O@z1HkeVq^0H->N|}gPE~^Af__3Vl@}-qP@2*;2sSxMtEoPQq zYs1-$wB*q@dPX_;yP4(Sk(Y_=xW{?7G2ax2xYNn62IKezl`B5Buo8T2aYcCq3)VS_ z2|mxetNC`;i~d2h<| z1L0&p|I2sR_3;k8>*A623f?_wr#*T>B~WUWL3O6z&+%LSv3#@RlJ;qyHRj!$W|xB( zN%WHym4NyQ9$Hfg9(}nIY|8IzDf?2s?L21)2hy%J={F+IpH>IKr=B0mmvt^~WxsY|c^bETWshNJpW zo$@@vv!?nyiT?vrUORpeluB!QN~QiWrBdJegHP`$_({ZLzALWMD6RO+IG)Ko;$Mxr zZTricy>@2#IB>ms%#88_@SR08{a5sSWpQPZ-fcLue2wC4*IyQkE5reRJkK>V)&{E% z92jcH7t#KVy8@nOXuCIU{mHcfy&?D^&(3*~*uKBK5q)ne?R>4thi)5uo^}hZ1Mv;x z{>%rxJDI*_y$&v2R#^*-Y1_{p;)z-Cfk*5Fyhl_f>NJ@C(okN?Q~cX?FFL&S{xv}W zEy8*M*5Bamnd$?A*(yZ;*}=7!GXGstcPv-!+svtxk;n?+nIj;uKAVVKj4>H-SrGs?lGN^-$l0Z(cPHo;nGh{BdY^4mkch_3#He)3d}>zw>nrufYt`-Uf^x z0&5B|PXf01zW6tJ{!nG#y1%>$ZElsJPn55|eJW#CR`+Fi1pKhZlcHdf=jyHClkkUQ zqrSWEz7GCb-8AGnH+@u?ypIFV$T8NAe+YH9E_?Q&d~`VN--Z$Oo4l`~ZtsoyX5P_P zf_YX)5G(v8{mX6>bd}&2yt8G*7f2(%W#B~l|GM@^IHb8--!6QO3C11uTy*|QW9Sjp7Rc)X`oQHj?0=(Pqw3p^ zqu;wTwitIH@~r#a4T~OU)1K`2+ihDPm^AQF*-*m)ZOP**fh8%qAo4#;w8A1NQUC9Xpx)qI~4V-LvBGFZ5~6 zN8Eg(!oXaJejuDzN9Ak3Q$0{mskHb2d@pVuZsVXjPb;^bzkY8;d#JX_*nY9s+)ALi zyq%ZxdoBI!+wiIlUHDnU>YL&Z)ZZ{3#k){OaPrh#XC-N_BJKFB`J}}g3!fCP2JYq5 z=e;}&c-B-O{nooHh;uA)H%WtMzK1-#e@qbcjtVNJ(v)?j(xf$|QqR&-X|sM8#lYW9pmxw^n**Nr$3;l zcor0v@`QQ}{AF*QQ=Y-MKN9Cs;-1hmyS)8uDOB3zz-dcl%G0)-Rlc8gRntMK%}F2P zy7xM=meNp;2k%`Ie1W*HYgIAGYa5>L@vP)Q=NT{`t{k5!LhU6{s`YXJ3w<5~0 z`Kz;>I6s;&zf&peU<4Z8;5#mNRE)L1bNr^ ziwi#~Ou7djVE({*;?^1;lH$gF(|UQMPP*hc_$luzto?4!`1j$Ic#-h;g*Quw+^F*z z!(2SU{RHN87rF1#!WvVggD%R6w@A00maqFA+%Kga{oZ|_7QP-H5#@e|F!5E|gXS}? z({hLO#P<4z9p_fk!UMg^fX%>djLD%rN*d1QdsLej5BjV%Kb&gW02myvw&q_aF~5}T z<~rZL0PZt*78%^q{HQknEbVAN%YH#HPLAl;XFB~9S*vbMNoDcv3*f$j=cP2f^*yT1 zt1TcC4x_o&JzS?cck@B64}Qd$Xgi<20Pba;)h^tqu-)cOdlCPSikn4$VyAQ4Q`Wvv z#Xq(E*lk|zMRLELzxx~AlwGCa?>%WRZah2ewx=w80sNQqB=%ps&BwJD8xQ@4uMM+t zU_Cw&f2FhAQYAAGP@? z{t_48e*af%y;}0B{VmIH^razx(mGIF*`f0#jKOv5j0U#WI@Mn6`J4Hc#aF(@-N)Q1 zOBy$hX;0E~xZe~;2K^W^&{k3M+hLBrH7b45JKL`4*VIE&+_Y~oxKz-ih4V1l(OqdU ze7|e`%Q)K(&lgTyd~m+s$p6emPKk?`_q}uo#`Q+nG3147(t-2o27lR5(uV4EoF-mg zU;1d{Bv0gp6O|5JSJ8Ir)(q&&v3w{BM%uec=%tL4{wOWJ&v(hqrtXc8zFPfwnGc+# zxLVUN?tql>Ith;Z4IEdZBiz>DZTqyTFS_ybhS8yhHtZ^cw9MSBOgT-tse-T`YW31rt*8EKy_tFp4YY`A z>N%|V!Tn^D0ny9TY&$Koh;?t7Q{En35Jwcz#9P3rKS;a_0`QfIIX9I*C#A`-3U#GtG{o?b2|G@o|K(!L|MYJQI^=fDLW+S619$izU~?F_!3WB`KnEW zYPr9TFT2E=(>@gR2QEDW>EGg<_Ha1#5A|#jYdgz;aRE=;>VdWM_0R!+8vB6fz5=FGTAv(v!xyZ!W1U0*6zNTUdefw$8COJRUxEhoRLC=mF!L_F<_% zFusO;LUt^1PJ2{MJlW+)KON^3cT9EujI41ldsZ{eAsekF`0_`Q7wTj@tu-alNoCNU z#w=^IGoiPhB&WHz%PZhF(!ZS4X!+vObDqF@*osXxGwBhP#GD{TPZzTVC!>bKf#vz=^sqw==jf$NRz``a*2S=}@T&#P=eU;m8_ zKkhfOe~`or8m$|{AL8=2--Gk-_Z?`g4zPz_kGlN14L9w#c!AcXigt@5`g|HL)WMDK zou9uiAcuZCEsv=0K6`(&)>GcK8p?2q+orRG8CO3NRkpNulKW)uS+tAVkC}#x`A%6* z%2H+%hdJ<@C`Y0`Mtz-l?CBWXQ*{RVB-!BG>$64If%MMWJIwhV!Ewk@+DnBj5-V$) z@@s4a*G%%kM;DgYL!P)@bd>yhP_=xrbK$I>KzpYjW1oH#NSwR6w4}@#BH`Y~tH4pV zQpcm?n+WdMfQI$MJnCNdzop8F$QGYWJHIG5qHRj3`cd2AETmIR8;|lqZxfycZ9=mZ z+3F0O*f|r`bWSUfXlEXj@q#GY?>xJ_DSYz9x3W)N`=Dgo^lfYfbT-Pp2(~&$i?ki@ zgrjVH?gwYdV&7q{MY;p&%lBmeDe}p3)(W?D>wt0cG{Z0BeAY~Y-Kd}UF{jnpTRKxq zYf-8noh`;+)1Bmh&3|-;k@N!Er=vZ1+S|JaxFP?i%Ey%B47>cC%}|0rJ|0)@tnaDA zaBgCs|4~$hXs?BI2Re@}D?V}Ym}AfQ#KIw68a8bE#>LI^Ui1B;-DR}nJh;TAc|H0> z(*}@}FN}+q=Y2EeKkf05%#{b9s5F%MyQciKhex8~wwNO!R-+s}Ys%E!H_$ZrZ3bUIjyIW{+BS?{>OIcmd^K&69YRU{o3OF0RkJ z?cGh!94l5naL?PY(+D|d@<;V~o!=PM-t97&-%)$K)RM5Rifl6`oqY8N zSW2DCD;KEzzU@D%&x^muwRanL^E+y-OmdU?p5{mOhdjK1@~i#NNXyUu?)Le#_HL&& zzjd~$>!hDD-?R8p{Xw{7No(Rz_Ib#`nfF-OeLjxA8`w#H#Cm>kJ4(8wG;!awC&?Zk ze0Tw6e~2;gx;WVOd%Ms3ws#wjeqYL5)^*aOxbd=v?f&4y3nc#_|DBbVkKO0qfB*ZKFZbNA6^ffE(S^as(&mA%~f z)Y&oP=ak11FV@ae@_3Rw#=)e_Ppa&4@PPB<;x*&F)(u@J-E=eZii1g+rwx{# zvm5)%`^3d-#(QM0VYV{h(9-hLrVlpd=Y9(Hfx>ivS?WxCh>eptr1jP;>57O$S)XP- z1Z(Ia#~AmSB4B5Qq4gQ#^6X>Inom?b%KC3ZB_I67-iVE%LGHP5R6a@XZektXIISNg z#Vzt1Wn9X>!Oh+BD~vplDhm~bi`MCl)A?CN!A*lh8PAIAAWjQ+N@kwQN zzp>BygQPEHUCiKN`#RUuxc#XM`&-e!ndcnmmM=?~aq=5Q<6__;e+H%oTw87vrwAW@ zKkV%KEM-@mchXo;oPoKYjzkzIhKCVvC*N+Cy4N>?v`c8N1 z$*!nTI8o`r`8Vu6E9AUpY<{#yxA1nLJwgxXyAL9<&M5oOlg?9(qjl2zcgzcI!Nm^> z^e)Raav;X`}MU^iLoFkDF8COrF-gD0vbpDg>Me?P!iBH}Ok!k<=o%6~~ zYwu}wfgH23=8fRuJ$KgrHOT>{JXwA6dXYSP(O+(whF`0`b63*F(xEg{j|A+;$m2Bb zSm>B?yY-8WONskT_J*$KgYUyhy7dh7uBbkNbs;eKMMvyr*YRQ6#aOMeP=>SMnb%RC zJK90HRoXfo*vvo(EUDrOpWtX74 zL$W$?3V2NJ{B({V_ruHw%!NEV6ETOheH!Rh0DJV)@fO|R!kmZnFiF4W&A^4!joSb=;GoowoT z#sl5WuWEl^9=6RL754Yv%vpH5k+$jmtdla}jKK{#gXUcHqTyXgI`<~8(|Evoa3ZaAwvDe# zvt88vI4S%-G0UG-_eG#5UW?uERL(lwxRYqqEL^Z*pTL~C?hYgMqdYV+6`V94Xk5-g z{t$HB-me_|-k=)#(l6+)R-3=T$7Zs&|1J*CZC2H{748YoSH{rJFJvwjsjrdkyzU{* z>(s-qVa?s%ldL&>Bj@(%-dq=+?k38~@57?0Epmoo9qmm!kUoj_`hE7M{9Rj#RdD9q z<^E>$ruUn2#`(IV+nGCgHwWEKtb1+0k8GfR)~J&`zxGLK?%Cf!`!smyo~^j@oA>B7 zALUN^-3ul|%fZcnmtlW@G;5!kZB84J1xy`xs;@Dhb%a#mLx8JF)ASYjZ#-jXZowmHwlOeVu8?h#m zdakftR{OVPHr=m1Qk>Qk;>LWt+;P8IQ}`WcKXE_TmxS{dE%QTypTg{E2Y@EP;n^1ET?h)=^u z#&sIqh0nx+^0wetH?Mc`_YG^^t`WUJRvI-cp5`Aq7s$8VN%65k>ECy5rKL8}Y3+@( z^xptph0@;CfnUv>IYft0q()z*1Xoc z3zh`yp&R{KBl!EI)qMyur051$^qDtF^@L90X7*dP)GqlM^m^zerX=CjjBh$0z0;l6 zA#^uIGs+(a6B+5^sY_d@CqyrKgs)yN4)?6@wLbKUDz^)q?2WRPtB80SYp{Vop%tS5 z>k>Pmn|`qfytBg4I^HkPop+1Vx*_{VTG|G%rC7=O@gB`=14lhq*#JG%Jz43NH=gJ% z9;$VA?x74Gi#a2>liR~Q^w+(de}jEf0KW{FA2+={FeiBQu;t65+DUlirKOL9fG2znk3P-_6XAGmLI5c~&r3g^-`bYA8=xf_-> z(p>uu>^e2SS$FyVpH~(y3t$g_Ao{phOg>3Iyh!6wFp2)Feeh>PU)g4ezRy74h+~31 zYI0;omED8v3%El&(D@lUN9>pc>Vl;DBisk(tE!lGTz%s_o|pT0$K?B6^=;i>+ZK}usKHGew+5dYkr}5 z#(B&)evDi>9r;r85bc3@wQ+Pt~a8i zMjybMLZaQa^qJC2NIxMxh4dBDTink4RCb_2`_}cCqrMI zQJ}q#oLyR*`x_mN>!YuEkK51V!mKGbysj@Dp!AxYwH)d>rSFv9vkx7D^q{Zm0EcL< z{wsTT-G%&9=&Q3+b$0xFsXNwm0_odadisX3)A@ZIz3unhy}2!V-nG8)ed24qQf1P* zh}KF!L0Pp{axLxSPqYu+%OoAsNO985h`x71-|L|71y%aWPDJ;wp<8Xby^z-HS-aiE zrghYB)(_6|p=Gn;YJ5@q&^=w!J9eAXy{7*{yAJtt3+S7L4&04&Q54P1JJxE}qb)w0 z1y(ELXiwM=SRd>br*)84toQoT0G_+x6ARDrjqJN_W!pI@=8oi6 z)m2hHth;}}^mo^%jxS3}+wO1AcZvO9Gw(fVlm^Iw*SU08`1pmD(eQ_XM&UOrz2*|# zG6G;H)v&zYta`+DZ|RZP?YnJ2_8ra2vk17d59$`DdAjzl6;bYHz~DT@!(93!8#e7u zs7A}6{?u)YP_k)jwA{@~kACM;m;T88_cbfOM&N1=xTp)peU~>$|JiCg@T~RB+~ld7 ztaHn`XJ!ld)yrAaw<@0OfW=F@)#>b@?OMDSBnxe1BY5zhW_m7xTV$kC*_Cz za-ehEMvCi1SpXT}Zqc7QF7Z3}U3W=z%=1lSzSglvnv*QRp4pD!1Jv`1ud;6#`;M`! z$CdNYsu^jfjes#fuI+Y`ETA>meK?mBUOS-~bj$;@h%sOLPh|h1b0oEwrcw6@>v)>W z>n{60%c8nL*GaMfXDS?y9C%_LS{0q9(RsecSlO6p{4ls_-SCPA^oD<69nZGCP@j>l zg3nztZgY{X2lMS3jt19u_?+Ky8hXFpcI0j6+2}l9wr|>JYr{0ZS?>sz=9DFOk2$AV zHg!JtNx5yHQ)B_;{)>68GIiB1zmYLtcde)CSZ?&V`_0fw`;e3BLpD1)6FRSk;#Tk$ ze@e=u+27Cu-#|Hj)9ieb;7hlkrw+yMH6~}#t>n=oWxc;%``2~l(VR~zC2k=fLZyyi=%jjuRATn zJq>O?3Tr&@ogcJIFF>1J$r!LOsvOOH=R42$<@YY`;?KVBSnQ5nI9bDa#)Edq0`UG< zcv^avm+zRLMZQm?i_YTmH6hU1Q)zIMzWa^`?N}o~w^42-{e8xKBj4++sHA$%@=fzB z-!r8ex&PP3$!C7hYFR+^ZzccFI_4+obL_hH`K@znvO2Xr*_->oPm1fKFKVSMApXz% zxh4BOvX1$Z@6+@-Np&6fO?&IIx+RDU()Gr{%JZJO&OAS8l`J6n54@T_|I0Gw8-AZf zpOdHleeMC&y$yNt$dV?@co6CZ*jJqeUL$cBj|ZUtU5&s_&l+}Hi^#V72Gs8*af%-|a_7QMy$VHs#d_vJ>OB(Zw(C6gA zSNAVwbvm;+Pach=Ntz!tOBSH-e-8VvgBrm*Ds9x5-)esE;w4!sD+hRYj4g=^vl-#I z`GMA>i=G=%C-1}l^L5O1*A-QksCmBlz0MIUDvvyHkaaSjDHCV+lPBLiY2wC%B4q(+ zUcvq|yhji{;M_cTx@n^3`K^-gU0kBVYKLh~qXc7OTidE|H{*eg@1QJDOh00bUdH{Z z>uV1HbAX$o>dWUH`^xL=_6@&pw~dos2Hne&=CpRJve@a``P-cz%wr*|hWeJ?`Y6o zB2V7FX+DEZSDMo~bG~p}9badHicj5#Jd-DH#{S2CcNw)BuRQrluGaladDf}X`_{&O!vi>9>uq`P=%zFW(^k{mr&uTGrZVNhl`(tR zoe&>dP+1>6Kz|;1-I7M<3Zyyi%^K14XKuUbkolD{rr+B>bAs=73oY~D$zHcWa#NDq zFQ-hE2cLGNVIFa_G<{bT=j;MA%-HD;!rA=>J@yIWOulMiP-#ohyed^8GKuIct* z2A6jDe@ob>r8^0_MV8G|ce3|7;oa~PC$kW|YcExLNQ2z>>nK`By+aqU7kseX4dwF1@rwz2!F9*6FT8GuueE;UzR6Lvj(XQSE6|$o z&D~HoUmTB1*b9EX=bmrhyxSEY-TvKYEGc{41IwD=1ht!X;oPiz51ALw|38~^&v&zM zEefveyrTMf(z~;lj!Yh)`y4mf;{+jN}BgYoCo=(7Vr5kx-O9Sm!PlNxlm%q07IX6DC5j4MVFyf@b-?_3og6* zR^?xGKGO5BC+veUJ>*mW?T&kswHJJ52k!Y!svl_oBHidPn6ce$*{v!Pl+5;Q!U(d%jKk6VGSwZ%6few;k+1x4a2* zNypm`o?`6Qp`9LDpVyoekTGbeCR_J36NvpVNM?Xqx7MCtWf2LmjtXzbk2T5F@zL^8DmH1`vX#m4AM z%on=uOe*C0XTbeyd$ND7C6zUTGdY@b$&eBD)48RrfpzQ|mEbl2j+fEbC%$KXdu*~s za5D&t_Cd}mWqf!W>r3ar7w&|=wrx)m`kI%es{@yB&^`}@=80#kjda?yqkIQ*c0F?A zyXbdkGtS-w-<^xBRrq{TFzMg(C7+U4FY6kI9XL?lq8(*^HP7T4VEuVZc<_O&Kc2uC zd=~Sq%Xw}TzrcSCp79LrWC7vDgcs{K@1EuN<2-ls{@3_dl6DGusuO$q%M&KfE00ai zwL8C>HSl_WU8yw5e$!hjjk3aPRMwuM7kvt^KNME5RH}u6CO65vSUMOUW5Rud;TnL! zU=2Vuc@03AyW;c=0_ZpKs{s2gaRpa#C0K@EI0gDSQHvYF!d*T9v+ z4Eu({VTQd!;jqqzf?`SF7EI`=bC)J@7B4nWxB4nWxBIJhqZFnHqXNN)14fopL zLD&u3pH+bR@RU0ADUcJMWYw-x4hz>6j{>^ky5dpbv~Yhteq(&Yef8Ob9ufIP3F}~rn_UC?g+p`-^>mN>ka{Jd5w?8`J;r+SSt^oRbpB;|i5B>Ic z_(@#>VTf+Hu7Ewm`B`0oT>eM6t^fpWh7|Hs3*nI8S_p>x*g`1e*A_xOf@jtEB!w-6 zrYJm=VVIp&Lt%E-01##u1hou$!sJ64Od1T=N>mM+DzAd8Rbhy&;#4u5Wa3u=)PjQm ZYRRh@^bCDh5vs@!z69bV>$COq{{Z);QUw42 diff --git a/docs/images/dls-logo.svg b/docs/images/dls-logo.svg index 0af1a177..4fcaa861 100644 --- a/docs/images/dls-logo.svg +++ b/docs/images/dls-logo.svg @@ -1,11 +1,11 @@ - - - - - - - - - - \ No newline at end of file + + + + + + + + + + diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..730b3fdc --- /dev/null +++ b/docs/index.md @@ -0,0 +1,56 @@ +--- +html_theme.sidebar_secondary.remove: true +--- + +```{include} ../README.md +:end-before: + +::::{grid} 2 +:gutter: 4 + +:::{grid-item-card} {material-regular}`directions_walk;2em` +```{toctree} +:maxdepth: 2 +tutorials +``` ++++ +Tutorials for installation and typical usage. New users start here. +::: + +:::{grid-item-card} {material-regular}`directions;2em` +```{toctree} +:maxdepth: 2 +how-to +``` ++++ +Practical step-by-step guides for the more experienced user. +::: + +:::{grid-item-card} {material-regular}`info;2em` +```{toctree} +:maxdepth: 2 +explanations +``` ++++ +Explanations of how it works and why it works that way. +::: + +:::{grid-item-card} {material-regular}`menu_book;2em` +```{toctree} +:maxdepth: 2 +reference +``` ++++ +Technical reference material including APIs and release notes. +::: + +:::: diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 75892944..00000000 --- a/docs/index.rst +++ /dev/null @@ -1,29 +0,0 @@ -:html_theme.sidebar_secondary.remove: - -.. include:: ../README.rst - :end-before: when included in index.rst - -How the documentation is structured ------------------------------------ - -The documentation is split into 2 sections: - -.. grid:: 2 - - .. grid-item-card:: :material-regular:`person;4em` - :link: user/index - :link-type: doc - - The User Guide contains documentation on how to install and use event-model. - - .. grid-item-card:: :material-regular:`code;4em` - :link: developer/index - :link-type: doc - - The Developer Guide contains documentation on how to develop and contribute changes back to event-model. - -.. toctree:: - :hidden: - - user/index - developer/index diff --git a/docs/reference.md b/docs/reference.md new file mode 100644 index 00000000..65ab11cd --- /dev/null +++ b/docs/reference.md @@ -0,0 +1,12 @@ +# Reference + +Technical reference material including APIs and release notes. + +```{toctree} +:maxdepth: 1 +:glob: + +API <_api/event_model> +genindex +Release Notes +``` diff --git a/docs/tutorials.md b/docs/tutorials.md new file mode 100644 index 00000000..1fe66c54 --- /dev/null +++ b/docs/tutorials.md @@ -0,0 +1,10 @@ +# Tutorials + +Tutorials for installation and typical usage. New users start here. + +```{toctree} +:maxdepth: 1 +:glob: + +tutorials/* +``` diff --git a/docs/tutorials/installation.md b/docs/tutorials/installation.md new file mode 100644 index 00000000..91722905 --- /dev/null +++ b/docs/tutorials/installation.md @@ -0,0 +1,42 @@ +# Installation + +## Check your version of python + +You will need python 3.10 or later. You can check your version of python by +typing into a terminal: + +``` +$ python3 --version +``` + +## Create a virtual environment + +It is recommended that you install into a “virtual environment” so this +installation will not interfere with any existing Python software: + +``` +$ python3 -m venv /path/to/venv +$ source /path/to/venv/bin/activate +``` + +## Installing the library + +You can now use `pip` to install the library and its dependencies: + +``` +$ python3 -m pip install event-model +``` + +If you require a feature that is not currently released you can also install +from github: + +``` +$ python3 -m pip install git+https://github.com/bluesky/event-model.git +``` + +The library should now be installed and the commandline interface on your path. +You can check the version that has been installed by typing: + +``` +$ event-model --version +``` diff --git a/docs/user/explanations/docs-structure.rst b/docs/user/explanations/docs-structure.rst deleted file mode 100644 index f25a09ba..00000000 --- a/docs/user/explanations/docs-structure.rst +++ /dev/null @@ -1,18 +0,0 @@ -About the documentation ------------------------ - - :material-regular:`format_quote;2em` - - The Grand Unified Theory of Documentation - - -- David Laing - -There is a secret that needs to be understood in order to write good software -documentation: there isn't one thing called *documentation*, there are four. - -They are: *tutorials*, *how-to guides*, *technical reference* and *explanation*. -They represent four different purposes or functions, and require four different -approaches to their creation. Understanding the implications of this will help -improve most documentation - often immensely. - -`More information on this topic. `_ diff --git a/docs/user/how-to/run-container.rst b/docs/user/how-to/run-container.rst deleted file mode 100644 index 8c16ce8c..00000000 --- a/docs/user/how-to/run-container.rst +++ /dev/null @@ -1,15 +0,0 @@ -Run in a container -================== - -Pre-built containers with event-model and its dependencies already -installed are available on `Github Container Registry -`_. - -Starting the container ----------------------- - -To pull the container from github container registry and run:: - - $ docker run ghcr.io/bluesky/event-model:main --version - -To get a released version, use a numbered release instead of ``main``. diff --git a/docs/user/index.rst b/docs/user/index.rst deleted file mode 100644 index 2c94a0c0..00000000 --- a/docs/user/index.rst +++ /dev/null @@ -1,57 +0,0 @@ -User Guide -========== - -Documentation is split into four categories, also accessible from links in the -side-bar. - -.. grid:: 2 - :gutter: 4 - - .. grid-item-card:: :material-regular:`directions_walk;3em` - - .. toctree:: - :caption: Tutorials - :maxdepth: 1 - - tutorials/installation - - +++ - - Tutorials for installation and typical usage. New users start here. - - .. grid-item-card:: :material-regular:`directions;3em` - - .. toctree:: - :caption: How-to Guides - :maxdepth: 1 - - how-to/run-container - - +++ - - Practical step-by-step guides for the more experienced user. - - .. grid-item-card:: :material-regular:`info;3em` - - .. toctree:: - :caption: Explanations - :maxdepth: 1 - - explanations/docs-structure - - +++ - - Explanations of how the library works and why it works that way. - - .. grid-item-card:: :material-regular:`menu_book;3em` - - .. toctree:: - :caption: Reference - :maxdepth: 1 - - reference/api - ../genindex - - +++ - - Technical reference material including APIs and release notes. diff --git a/docs/user/reference/api.rst b/docs/user/reference/api.rst deleted file mode 100644 index 491f7a21..00000000 --- a/docs/user/reference/api.rst +++ /dev/null @@ -1,14 +0,0 @@ -API -=== - -.. automodule:: event_model - - ``event_model`` - ----------------------------------- - -This is the internal API reference for event_model - -.. data:: event_model.__version__ - :type: str - - Version number as calculated by https://github.com/pypa/setuptools_scm diff --git a/docs/user/tutorials/installation.rst b/docs/user/tutorials/installation.rst deleted file mode 100644 index 7b95d051..00000000 --- a/docs/user/tutorials/installation.rst +++ /dev/null @@ -1,38 +0,0 @@ -Installation -============ - -Check your version of python ----------------------------- - -You will need python 3.8 or later. You can check your version of python by -typing into a terminal:: - - $ python3 --version - - -Create a virtual environment ----------------------------- - -It is recommended that you install into a “virtual environment” so this -installation will not interfere with any existing Python software:: - - $ python3 -m venv /path/to/venv - $ source /path/to/venv/bin/activate - - -Installing the library ----------------------- - -You can now use ``pip`` to install the library and its dependencies:: - - $ python3 -m pip install event-model - -If you require a feature that is not currently released you can also install -from github:: - - $ python3 -m pip install git+https://github.com/bluesky/event-model.git - -The library should now be installed and the commandline interface on your path. -You can check the version that has been installed by typing:: - - $ event-model --version diff --git a/pyproject.toml b/pyproject.toml index 33bfbe4f..9007023f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=64", "setuptools_scm[toml]>=6.2", "wheel"] +requires = ["setuptools>=64", "setuptools_scm[toml]>=8"] build-backend = "setuptools.build_meta" [project] @@ -7,25 +7,22 @@ name = "event-model" classifiers = [ "Development Status :: 3 - Alpha", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] description = "" -dependencies = [ - "typing-extensions;python_version<'3.8'", -] # Add project dependencies here, e.g. ["click", "numpy"] +dependencies = [] # Add project dependencies here, e.g. ["click", "numpy"] dynamic = ["version"] license.file = "LICENSE" -readme = "README.rst" -requires-python = ">=3.7" +readme = "README.md" +requires-python = ">=3.10" [project.optional-dependencies] dev = [ - "black", + "copier", "mypy", + "myst-parser", "pipdeptree", "pre-commit", "pydata-sphinx-theme>=0.12", @@ -51,7 +48,7 @@ name = "Eva Lott" [tool.setuptools_scm] -write_to = "src/event_model/_version.py" +version_file = "src/event_model/_version.py" [tool.mypy] ignore_missing_imports = true # Ignore missing stubs in imported modules @@ -80,7 +77,7 @@ legacy_tox_ini = """ [tox] skipsdist=True -[testenv:{pre-commit,mypy,pytest,docs}] +[testenv:{pre-commit,type-checking,tests,docs}] # Don't create a virtualenv for the command, requires tox-direct plugin direct = True passenv = * @@ -91,20 +88,28 @@ allowlist_externals = sphinx-build sphinx-autobuild commands = - pytest: pytest --cov=event_model --cov-report term --cov-report xml:cov.xml {posargs} - mypy: mypy src tests {posargs} - pre-commit: pre-commit run --all-files {posargs} + pre-commit: pre-commit run --all-files --show-diff-on-failure {posargs} + type-checking: mypy src tests {posargs} + tests: pytest --cov=event_model --cov-report term --cov-report xml:cov.xml {posargs} docs: sphinx-{posargs:build -EW --keep-going} -T docs build/html """ - [tool.ruff] src = ["src", "tests"] line-length = 88 -select = [ - "C4", # flake8-comprehensions - https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4 - "E", # pycodestyle errors - https://beta.ruff.rs/docs/rules/#error-e - "F", # pyflakes rules - https://beta.ruff.rs/docs/rules/#pyflakes-f - "W", # pycodestyle warnings - https://beta.ruff.rs/docs/rules/#warning-w - "I001", # isort +lint.select = [ + "B", # flake8-bugbear - https://docs.astral.sh/ruff/rules/#flake8-bugbear-b + "C4", # flake8-comprehensions - https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4 + "E", # pycodestyle errors - https://docs.astral.sh/ruff/rules/#error-e + "F", # pyflakes rules - https://docs.astral.sh/ruff/rules/#pyflakes-f + "W", # pycodestyle warnings - https://docs.astral.sh/ruff/rules/#warning-w + "I", # isort - https://docs.astral.sh/ruff/rules/#isort-i + "UP", # pyupgrade - https://docs.astral.sh/ruff/rules/#pyupgrade-up + "SLF", # self - https://docs.astral.sh/ruff/settings/#lintflake8-self ] + +[tool.ruff.lint.per-file-ignores] +# By default, private member access is allowed in tests +# See https://github.com/DiamondLightSource/python-copier-template/issues/154 +# Remove this line to forbid private member access in tests +"tests/**/*" = ["SLF001"] diff --git a/src/event_model/__init__.py b/src/event_model/__init__.py index 33a32fd9..a2ffbf36 100644 --- a/src/event_model/__init__.py +++ b/src/event_model/__init__.py @@ -1,11 +1,11 @@ -import sys +"""Top level API. -if sys.version_info < (3, 8): - from importlib_metadata import version # noqa -else: - from importlib.metadata import version # noqa +.. data:: __version__ + :type: str -__version__ = version("event-model") -del version + Version number as calculated by https://github.com/pypa/setuptools_scm +""" + +from ._version import __version__ __all__ = ["__version__"] diff --git a/src/event_model/__main__.py b/src/event_model/__main__.py index 0564d154..e3e537c9 100644 --- a/src/event_model/__main__.py +++ b/src/event_model/__main__.py @@ -1,16 +1,24 @@ +"""Interface for ``python -m event_model``.""" + from argparse import ArgumentParser +from collections.abc import Sequence from . import __version__ __all__ = ["main"] -def main(args=None): +def main(args: Sequence[str] | None = None) -> None: + """Argument parser for the CLI.""" parser = ArgumentParser() - parser.add_argument("-v", "--version", action="version", version=__version__) - args = parser.parse_args(args) + parser.add_argument( + "-v", + "--version", + action="version", + version=__version__, + ) + parser.parse_args(args) -# test with: python -m event_model if __name__ == "__main__": main() diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..ebe9c10f --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,21 @@ +import os +from typing import Any + +import pytest + +# Prevent pytest from catching exceptions when debugging in vscode so that break on +# exception works correctly (see: https://github.com/pytest-dev/pytest/issues/7409) +if os.getenv("PYTEST_RAISE", "0") == "1": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call: pytest.CallInfo[Any]): + if call.excinfo is not None: + raise call.excinfo.value + else: + raise RuntimeError( + f"{call} has no exception data, an unknown error has occurred" + ) + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo: pytest.ExceptionInfo[Any]): + raise excinfo.value From c03c203f4bd840828004b174e18552027fb8f879 Mon Sep 17 00:00:00 2001 From: Eva Date: Fri, 18 Oct 2024 09:49:34 +0100 Subject: [PATCH 3/7] made `src` directory and fixed pyproject.toml --- .copier-answers.yml | 6 +- README.md | 2 +- event_model/__init__.py | 3109 ----------------- event_model/__main__.py | 16 - pyproject.toml | 43 +- src/event_model/__init__.py | 3106 +++++++++++++++- src/event_model/__main__.py | 15 +- .../event_model}/documents/__init__.py | 0 .../event_model}/documents/datum.py | 0 .../event_model}/documents/datum_page.py | 0 .../event_model}/documents/event.py | 0 .../documents/event_descriptor.py | 0 .../event_model}/documents/event_page.py | 0 .../documents/generate/__main__.py | 6 +- .../documents/generate/type_wrapper.py | 0 .../documents/generate/typeddict_to_schema.py | 0 .../event_model}/documents/resource.py | 0 .../event_model}/documents/run_start.py | 0 .../event_model}/documents/run_stop.py | 0 .../event_model}/documents/stream_datum.py | 0 .../event_model}/documents/stream_resource.py | 0 .../event_model}/schemas/bulk_datum.json | 0 .../event_model}/schemas/bulk_events.json | 0 .../event_model}/schemas/datum.json | 0 .../event_model}/schemas/datum_page.json | 0 .../event_model}/schemas/event.json | 0 .../schemas/event_descriptor.json | 0 .../event_model}/schemas/event_page.json | 0 .../event_model}/schemas/resource.json | 0 .../event_model}/schemas/run_start.json | 0 .../event_model}/schemas/run_stop.json | 0 .../event_model}/schemas/stream_datum.json | 0 .../event_model}/schemas/stream_resource.json | 0 .../event_model}/tests/__init__.py | 0 .../event_model}/tests/test_auth.py | 0 .../event_model}/tests/test_em.py | 0 .../event_model}/tests/test_emit.py | 0 .../event_model}/tests/test_filler.py | 0 .../event_model}/tests/test_projections.py | 0 .../event_model}/tests/test_run_router.py | 0 .../tests/test_schema_generation.py | 0 tests/conftest.py | 21 - tests/test_cli.py | 9 - 43 files changed, 3142 insertions(+), 3191 deletions(-) delete mode 100644 event_model/__init__.py delete mode 100644 event_model/__main__.py rename {event_model => src/event_model}/documents/__init__.py (100%) rename {event_model => src/event_model}/documents/datum.py (100%) rename {event_model => src/event_model}/documents/datum_page.py (100%) rename {event_model => src/event_model}/documents/event.py (100%) rename {event_model => src/event_model}/documents/event_descriptor.py (100%) rename {event_model => src/event_model}/documents/event_page.py (100%) rename {event_model => src/event_model}/documents/generate/__main__.py (93%) rename {event_model => src/event_model}/documents/generate/type_wrapper.py (100%) rename {event_model => src/event_model}/documents/generate/typeddict_to_schema.py (100%) rename {event_model => src/event_model}/documents/resource.py (100%) rename {event_model => src/event_model}/documents/run_start.py (100%) rename {event_model => src/event_model}/documents/run_stop.py (100%) rename {event_model => src/event_model}/documents/stream_datum.py (100%) rename {event_model => src/event_model}/documents/stream_resource.py (100%) rename {event_model => src/event_model}/schemas/bulk_datum.json (100%) rename {event_model => src/event_model}/schemas/bulk_events.json (100%) rename {event_model => src/event_model}/schemas/datum.json (100%) rename {event_model => src/event_model}/schemas/datum_page.json (100%) rename {event_model => src/event_model}/schemas/event.json (100%) rename {event_model => src/event_model}/schemas/event_descriptor.json (100%) rename {event_model => src/event_model}/schemas/event_page.json (100%) rename {event_model => src/event_model}/schemas/resource.json (100%) rename {event_model => src/event_model}/schemas/run_start.json (100%) rename {event_model => src/event_model}/schemas/run_stop.json (100%) rename {event_model => src/event_model}/schemas/stream_datum.json (100%) rename {event_model => src/event_model}/schemas/stream_resource.json (100%) rename {event_model => src/event_model}/tests/__init__.py (100%) rename {event_model => src/event_model}/tests/test_auth.py (100%) rename {event_model => src/event_model}/tests/test_em.py (100%) rename {event_model => src/event_model}/tests/test_emit.py (100%) rename {event_model => src/event_model}/tests/test_filler.py (100%) rename {event_model => src/event_model}/tests/test_projections.py (100%) rename {event_model => src/event_model}/tests/test_run_router.py (100%) rename {event_model => src/event_model}/tests/test_schema_generation.py (100%) delete mode 100644 tests/conftest.py delete mode 100644 tests/test_cli.py diff --git a/.copier-answers.yml b/.copier-answers.yml index 8b43cc29..3cac8f56 100644 --- a/.copier-answers.yml +++ b/.copier-answers.yml @@ -1,9 +1,9 @@ # Changes here will be overwritten by Copier _commit: 2.3.0 _src_path: gh:DiamondLightSource/python-copier-template -author_email: eva.lott@diamond.ac.uk -author_name: Eva Lott -description: '' +author_email: dallan@bnl.gov +author_name: Brookhaven National Lab +description: Data model used by the bluesky ecosystem. distribution_name: event-model docker: false docs_type: sphinx diff --git a/README.md b/README.md index 3381b43d..11e9d693 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ # event_model - +Data model used by the bluesky ecosystem. This is where you should write a short paragraph that describes what your module does, how it does it, and why people should use it. diff --git a/event_model/__init__.py b/event_model/__init__.py deleted file mode 100644 index ac3c466a..00000000 --- a/event_model/__init__.py +++ /dev/null @@ -1,3109 +0,0 @@ -import collections.abc -import copy -import inspect -import itertools -import json -import os -import sys -import threading -import time as ttime -import uuid -import warnings -import weakref -from collections import defaultdict, deque -from dataclasses import dataclass -from enum import Enum -from importlib.metadata import version as importlib_version -from typing import ( - Any, - Callable, - Dict, - Generator, - Iterable, - Iterator, - List, - Optional, - Tuple, - Type, - Union, - cast, - no_type_check, -) - -import jsonschema -import numpy -from typing_extensions import Literal - -from .documents.datum import Datum -from .documents.datum_page import DatumPage -from .documents.event import Event, PartialEvent -from .documents.event_descriptor import ( - Configuration, - DataKey, - Dtype, - EventDescriptor, - Limits, - LimitsRange, - PerObjectHint, -) -from .documents.event_page import EventPage, PartialEventPage -from .documents.resource import PartialResource, Resource -from .documents.run_start import Calculation, Hints, Projection, Projections, RunStart -from .documents.run_stop import RunStop -from .documents.stream_datum import StreamDatum, StreamRange -from .documents.stream_resource import StreamResource - -if sys.version_info < (3, 9): - import importlib_resources -else: - import importlib.resources as importlib_resources - -__version__ = importlib_version("event-model") - -del importlib_version - - -__all__ = [ - # Document types - "Datum", - "DatumPage", - "Event", - "PartialEvent", - "Configuration", - "DataKey", - "Dtype", - "EventDescriptor", - "Limits", - "LimitsRange", - "PerObjectHint", - "EventPage", - "PartialEventPage", - "PartialResource", - "Resource", - "Calculation", - "Hints", - "Projection", - "Projections", - "RunStart", - "RunStop", - "StreamDatum", - "StreamRange", - "StreamResource", - # Schema and version - "DocumentNames", - "schemas", - "schema_validators", - "compose_run", - "__version__", -] - - -class DocumentNames(Enum): - stop = "stop" - start = "start" - descriptor = "descriptor" - event = "event" - datum = "datum" - resource = "resource" - event_page = "event_page" - datum_page = "datum_page" - stream_resource = "stream_resource" - stream_datum = "stream_datum" - bulk_datum = "bulk_datum" # deprecated - bulk_events = "bulk_events" # deprecated - - -class DocumentRouter: - """ - Route each document by type to a corresponding method. - - When an instance is called with a document type and a document like:: - - router(name, doc) - - the document is passed to the method of the corresponding name, as in:: - - getattr(router, name)(doc) - - The method is expected to return ``None`` or a valid document of the same - type. It may be the original instance (passed through), a copy, or a - different dict altogether. - - Finally, the call to ``router(name, doc)`` returns:: - - (name, getattr(router, name)(doc)) - - Parameters - ---------- - emit: callable, optional - Expected signature ``f(name, doc)`` - """ - - def __init__(self, *, emit: Optional[Callable] = None) -> None: - # Put in some extra effort to validate `emit` carefully, because if - # this is used incorrectly the resultant errors can be confusing. - - self._emit_ref: Optional[Callable] = None - - if emit is not None: - if not callable(emit): - raise ValueError("emit must be a callable") - sig = inspect.signature(emit) - try: - # Does this function accept two positional arguments? - sig.bind(None, None) - except TypeError: - raise ValueError( - "emit must accept two positional arguments, name and doc" - ) - # Stash a weak reference to `emit`. - if inspect.ismethod(emit): - self._emit_ref = weakref.WeakMethod(emit) - else: - self._emit_ref = weakref.ref(emit) - - def emit(self, name: str, doc: dict) -> None: - """ - Emit to the callable provided an instantiation time, if any. - """ - if self._emit_ref is not None: - # Call the weakref. - emit = self._emit_ref() - if emit is not None: - emit(name, doc) - - def __call__( - self, name: str, doc: dict, validate: bool = False - ) -> Tuple[str, dict]: - """ - Process a document. - - Parameters - ---------- - name : string - doc : dict - validate : boolean - Apply jsonschema validation to the documents coming *out*. This is - False by default. - - Returns - ------- - name, output_doc : string, dict - The same name as what was passed in, and a doc that may be the same - instance as doc, a copy of doc, or a different dict altogether. - """ - return self._dispatch(name, doc, validate) - - def _dispatch(self, name: str, doc: dict, validate: bool) -> Tuple[str, dict]: - """ - Dispatch to the method corresponding to the `name`. - - Optionally validate that the result is still a valid document. - """ - output_doc = getattr(self, name)(doc) - - # If 'event' is not defined by the subclass but 'event_page' is, or - # vice versa, use that. And the same for 'datum_page' / 'datum. - if output_doc is NotImplemented: - if name == "event": - event_page = pack_event_page(cast(Event, doc)) - # Subclass' implementation of event_page may return a valid - # EventPage or None or NotImplemented. - output_event_page = self.event_page(event_page) - output_event_page = ( - output_event_page if output_event_page is not None else event_page - ) - if output_event_page is not NotImplemented: - (output_doc,) = unpack_event_page(output_event_page) - elif name == "datum": - datum_page = pack_datum_page(cast(Datum, doc)) - # Subclass' implementation of datum_page may return a valid - # DatumPage or None or NotImplemented. - output_datum_page = self.datum_page(datum_page) - output_datum_page = ( - output_datum_page if output_datum_page is not None else datum_page - ) - if output_datum_page is not NotImplemented: - (output_doc,) = unpack_datum_page(output_datum_page) - elif name == "event_page": - output_events = [] - for event in unpack_event_page(cast(EventPage, doc)): - # Subclass' implementation of event may return a valid - # Event or None or NotImplemented. - output_event = self.event(event) - output_event = output_event if output_event is not None else event - if output_event is NotImplemented: - break - output_events.append(output_event) - else: - output_doc = pack_event_page(*output_events) - elif name == "datum_page": - output_datums = [] - for datum in unpack_datum_page(cast(DatumPage, doc)): - # Subclass' implementation of datum may return a valid - # Datum or None or NotImplemented. - output_datum = self.datum(datum) - output_datum = output_datum if output_datum is not None else datum - if output_datum is NotImplemented: - break - output_datums.append(output_datum) - else: - output_doc = pack_datum_page(*output_datums) - # If we still don't find an implemented method by here, then pass the - # original document through. - if output_doc is NotImplemented: - output_doc = doc - if validate: - schema_validators[getattr(DocumentNames, name)].validate(output_doc) - return (name, output_doc if output_doc is not None else doc) - - # The methods below return NotImplemented, a built-in Python constant. - # Note that it is not interchangeable with NotImplementedError. See docs at - # https://docs.python.org/3/library/constants.html#NotImplemented - # It is used here so that _dispatch, defined above, can detect whether a - # subclass implements event, event_page, both, or neither. This is similar - # to how Python uses NotImplemented in arithmetic operations, as described - # in the documentation. - - def start(self, doc: RunStart) -> Optional[RunStart]: - return NotImplemented - - def stop(self, doc: RunStop) -> Optional[RunStop]: - return NotImplemented - - def descriptor(self, doc: EventDescriptor) -> Optional[EventDescriptor]: - return NotImplemented - - def resource(self, doc: Resource) -> Optional[Resource]: - return NotImplemented - - def event(self, doc: Event) -> Event: - return NotImplemented - - def datum(self, doc: Datum) -> Datum: - return NotImplemented - - def event_page(self, doc: EventPage) -> EventPage: - return NotImplemented - - def datum_page(self, doc: DatumPage) -> Optional[DatumPage]: - return NotImplemented - - def stream_datum(self, doc: StreamDatum) -> Optional[StreamDatum]: - return NotImplemented - - def stream_resource(self, doc: StreamResource) -> Optional[StreamResource]: - return NotImplemented - - def bulk_events(self, doc: dict) -> None: - # Do not modify this in a subclass. Use event_page. - warnings.warn( - "The document type 'bulk_events' has been deprecated in favor of " - "'event_page', whose structure is a transpose of 'bulk_events'." - ) - for page in bulk_events_to_event_pages(doc): - self.event_page(page) - - def bulk_datum(self, doc: dict) -> None: - # Do not modify this in a subclass. Use event_page. - warnings.warn( - "The document type 'bulk_datum' has been deprecated in favor of " - "'datum_page', whose structure is a transpose of 'bulk_datum'." - ) - self.datum_page(bulk_datum_to_datum_page(doc)) - - -class SingleRunDocumentRouter(DocumentRouter): - """ - A DocumentRouter intended to process events from exactly one run. - """ - - def __init__(self) -> None: - super().__init__() - self._start_doc: Optional[dict] = None - self._descriptors: dict = dict() - - def __call__( - self, name: str, doc: dict, validate: bool = False - ) -> Tuple[str, dict]: - """ - Process a document. - - Also, track of the start document and descriptor documents - passed to this SingleRunDocumentRouter in caches. - - Parameters - ---------- - name : string - doc : dict - validate : boolean - Apply jsonschema validation to the documents coming *out*. This is - False by default. - - Returns - ------- - name, output_doc : string, dict - The same name as what was passed in, and a doc that may be the same - instance as doc, a copy of doc, or a different dict altogether. - """ - if name == "start": - if self._start_doc is None: - self._start_doc = doc - else: - raise EventModelValueError( - "SingleRunDocumentRouter associated with start document " - f'{self._start_doc["uid"]} ' - f'received a second start document with uid {doc["uid"]}' - ) - elif name == "descriptor": - assert isinstance(self._start_doc, dict) - if doc["run_start"] == self._start_doc["uid"]: - self._descriptors[doc["uid"]] = doc - else: - raise EventModelValueError( - "SingleRunDocumentRouter associated with start document " - f'{self._start_doc["uid"]} ' - f'received a descriptor {doc["uid"]} associated with ' - f'start document {doc["run_start"]}' - ) - # Defer to superclass for dispatch/processing. - return super().__call__(name, doc, validate=validate) - - def get_start(self) -> dict: - """Convenience method returning the start document for the associated run. - - If no start document has been processed EventModelError will be raised. - - Returns - ------- - start document : dict - """ - if self._start_doc is None: - raise EventModelError( - "SingleRunDocumentRouter has not processed a start document yet" - ) - - return self._start_doc - - def get_descriptor(self, doc: dict) -> EventDescriptor: - """Convenience method returning the descriptor associated with the - specified document. - - Parameters - ---------- - doc : dict - event-model document - - Returns - ------- - descriptor document : EventDescriptor - """ - if "descriptor" not in doc: - raise EventModelValueError( - f"document is not associated with a descriptor:\n{doc}" - ) - elif doc["descriptor"] not in self._descriptors: - raise EventModelValueError( - "SingleRunDocumentRouter has not processed a descriptor with " - f'uid {doc["descriptor"]}' - ) - - return self._descriptors[doc["descriptor"]] - - def get_stream_name(self, doc: dict) -> str: - """Convenience method returning the name of the stream for the - specified document. - - Parameters - ---------- - doc : dict - event-model document - - Returns - ------- - stream name : str - """ - return str(self.get_descriptor(doc).get("name")) - - -class HandlerRegistryView(collections.abc.Mapping): - def __init__(self, handler_registry: dict) -> None: - self._handler_registry = handler_registry - - def __repr__(self) -> str: - return f"HandlerRegistryView({self._handler_registry!r})" - - def __getitem__(self, key: str) -> str: - return self._handler_registry[key] - - def __iter__(self) -> Generator: - yield from self._handler_registry - - def __len__(self) -> int: - return len(self._handler_registry) - - def __setitem__(self, key: str, val: Any) -> None: - raise EventModelTypeError( - "The handler registry cannot be edited directly. " - "Instead, use the method Filler.register_handler." - ) - - def __delitem__(self, key: str) -> None: - raise EventModelTypeError( - "The handler registry cannot be edited directly. " - "Instead, use the method Filler.deregister_handler." - ) - - -# A "coercion funcion" is a hook that Filler can use to, for example, ensure -# all the external data read in my handlers is an *actual* numpy array as -# opposed to some other array-like such as h5py.Dataset or dask.array.Array, -# or wrap every result is dask.array.from_array(...). -# -# It has access to the handler_class as it is registered and to some state -# provided by the Filler (more on that below). It is expected to return -# something that is API-compatible with handler_class. That might be -# handler_class itself (a no-op), a subclass, or an altogether different class -# with the same API. See example below. -# -# The "state provided by the Filler", mentioned above is passed into the -# coercion functions below as ``filler_state``. It is a namespace containing -# information that may be useful for the coercion functions. Currently, it has -# ``filler_state.descriptor`` and ``filler_state.key``. More may be added in -# the future if the need arises. Ultimately, this is necessary because Resource -# documents don't know the shape and dtype of the data that they reference. -# That situation could be improved in the future; to some degree this is a -# work-around. -# -# As an implementation detail, the ``filler_state`` is a ``threading.local`` -# object to ensure that filling is thread-safe. -# -# Third-party libraries can register custom coercion options via the -# register_coercion function below. For example, databroker uses this to -# register a 'delayed' option. This avoids introducing dependency on a specific -# delayed-computation framework (e.g. dask) in event-model itself. - - -def as_is(handler_class, filler_state) -> Type: - "A no-op coercion function that returns handler_class unchanged." - return handler_class - - -@no_type_check -def force_numpy(handler_class: Type, filler_state) -> Any: - "A coercion that makes handler_class.__call__ return actual numpy.ndarray." - - class Subclass(handler_class): - def __call__(self, *args, **kwargs): - raw_result = super().__call__(*args, **kwargs) - result_as_array = numpy.asarray(raw_result) - return result_as_array - - Subclass.__name__ = f"Subclassed{handler_class.__name__}" - Subclass.__qualname__ = f"Subclassed{handler_class.__qualname__}" - return Subclass - - -# maps coerce option to corresponding coercion function -_coercion_registry = {"as_is": as_is, "force_numpy": force_numpy} - - -def register_coercion(name: str, func: Callable, overwrite: bool = False) -> None: - """ - Register a new option for :class:`Filler`'s ``coerce`` argument. - - This is an advanced feature. See source code for comments and examples. - - Parameters - ---------- - name : string - The new value for ``coerce`` that will invoke this function. - func : callable - Expected signature:: - - func(filler, handler_class) -> handler_class - overwrite : boolean, optional - False by default. Name collissions will raise ``EventModelValueError`` - unless this is set to ``True``. - """ - - if name in _coercion_registry and not overwrite: - # If we are re-registering the same object, there is no problem. - original = _coercion_registry[name] - if original is func: - return - raise EventModelValueError( - f"The coercion function {func} could not be registered for the " - f"name {name} because {_coercion_registry[name]} is already " - f"registered. Use overwrite=True to force it." - ) - _coercion_registry[name] = func - - -register_coersion = register_coercion # back-compat for a spelling mistake - - -class Filler(DocumentRouter): - """Pass documents through, loading any externally-referenced data. - - It is recommended to use the Filler as a context manager. Because the - Filler manages caches of potentially expensive resources (e.g. large data - in memory) managing its lifecycle is important. If used as a context - manager, it will drop references to its caches upon exit from the - context. Unless the user holds additional references to those caches, they - will be garbage collected. - - But for some applications, such as taking multiple passes over the same - data, it may be useful to keep a longer-lived Filler instance and then - manually delete it when finished. - - See Examples below. - - Parameters - ---------- - handler_registry : dict - Maps each 'spec' (a string identifying a given type or external - resource) to a handler class. - - A 'handler class' may be any callable with the signature:: - - handler_class(full_path, **resource_kwargs) - - It is expected to return an object, a 'handler instance', which is also - callable and has the following signature:: - - handler_instance(**datum_kwargs) - - As the names 'handler class' and 'handler instance' suggest, this is - typically implemented using a class that implements ``__init__`` and - ``__call__``, with the respective signatures. But in general it may be - any callable-that-returns-a-callable. - include : Iterable - The set of fields to fill. By default all unfilled fields are filled. - This parameter is mutually incompatible with the ``exclude`` parameter. - exclude : Iterable - The set of fields to skip filling. By default all unfilled fields are - filled. This parameter is mutually incompatible with the ``include`` - parameter. - root_map: dict - str -> str mapping to account for temporarily moved/copied/remounted - files. Any resources which have a ``root`` in ``root_map`` will be - loaded using the mapped ``root``. - coerce : {'as_is', 'numpy'} - Default is 'as_is'. Other options (e.g. 'delayed') may be registered by - external packages at runtime. - handler_cache : dict, optional - A cache of handler instances. If None, a dict is used. - resource_cache : dict, optional - A cache of Resource documents. If None, a dict is used. - datum_cache : dict, optional - A cache of Datum documents. If None, a dict is used. - descriptor_cache : dict, optional - A cache of EventDescriptor documents. If None, a dict is used. - stream_resource_cache : dict, optional - A cache of StreamResource documents. If None, a dict is used. - stream_datum_cache : dict, optional - A cache of StreamDatum documents. If None, a dict is used. - retry_intervals : Iterable, optional - If data is not found on the first try, there may a race between the - I/O systems creating the external data and this stream of Documents - that reference it. If Filler encounters an ``IOError`` it will wait a - bit and retry. This list specifies how long to sleep (in seconds) - between subsequent attempts. Set to ``None`` to try only once before - raising ``DataNotAccessible``. A subclass may catch this exception and - implement a different retry mechanism --- for example using a different - implementation of sleep from an async framework. But by default, a - sequence of several retries with increasing sleep intervals is used. - The default sequence should not be considered stable; it may change at - any time as the authors tune it. - - Raises - ------ - DataNotAccessible - If an IOError is raised when loading the data after the configured - number of attempts. See the ``retry_intervals`` parameter for details. - - Examples - -------- - A Filler may be used as a context manager. - - >>> with Filler(handler_registry) as filler: - ... for name, doc in stream: - ... filler(name, doc) # mutates doc in place - ... # Do some analysis or export with name and doc. - - Or as a long-lived object. - - >>> f = Filler(handler_registry) - >>> for name, doc in stream: - ... filler(name, doc) # mutates doc in place - ... # Do some analysis or export with name and doc. - ... - >>> del filler # Free up memory from potentially large caches. - """ - - def __init__( - self, - handler_registry: dict, - *, - include: Optional[Iterable] = None, - exclude: Optional[Iterable] = None, - root_map: Optional[dict] = None, - coerce: str = "as_is", - handler_cache: Optional[dict] = None, - resource_cache: Optional[dict] = None, - datum_cache: Optional[dict] = None, - descriptor_cache: Optional[dict] = None, - stream_resource_cache: Optional[dict] = None, - stream_datum_cache: Optional[dict] = None, - inplace: Optional[bool] = None, - retry_intervals: List = [ - 0.001, - 0.002, - 0.004, - 0.008, - 0.016, - 0.032, - 0.064, - 0.128, - 0.256, - 0.512, - 1.024, - ], - ) -> None: - if inplace is None: - self._inplace = True - warnings.warn( - "'inplace' argument not specified. It is recommended to " - "specify True or False. In future releases, 'inplace' " - "will default to False." - ) - else: - self._inplace = inplace - - if include is not None and exclude is not None: - raise EventModelValueError( - "The parameters `include` and `exclude` are mutually " - "incompatible. At least one must be left as the default, " - "None." - ) - try: - self._coercion_func = _coercion_registry[coerce] - except KeyError: - raise EventModelKeyError( - f"The option coerce={coerce!r} was given to event_model.Filler. " - f"The valid options are {set(_coercion_registry)}." - ) - self._coerce = coerce - - # See comments on coerision functions above for the use of - # _current_state, which is passed to coercion functions' `filler_state` - # parameter. - self._current_state = threading.local() - self._unpatched_handler_registry: dict = {} - self._handler_registry: dict = {} - for spec, handler_class in handler_registry.items(): - self.register_handler(spec, handler_class) - self.handler_registry = HandlerRegistryView(self._handler_registry) - if include is not None: - warnings.warn( - "In a future release of event-model, the argument `include` " - "will be removed from Filler.", - DeprecationWarning, - ) - self.include = include - if exclude is not None: - warnings.warn( - "In a future release of event-model, the argument `exclude` " - "will be removed from Filler.", - DeprecationWarning, - ) - self.exclude = exclude - self.root_map = root_map or {} - if handler_cache is None: - handler_cache = self.get_default_handler_cache() - if resource_cache is None: - resource_cache = self.get_default_resource_cache() - if datum_cache is None: - datum_cache = self.get_default_datum_cache() - if descriptor_cache is None: - descriptor_cache = self.get_default_descriptor_cache() - if stream_resource_cache is None: - stream_resource_cache = self.get_default_stream_resource_cache() - if stream_datum_cache is None: - stream_datum_cache = self.get_default_stream_datum_cache() - self._handler_cache = handler_cache - self._resource_cache = resource_cache - self._datum_cache = datum_cache - self._descriptor_cache = descriptor_cache - self._stream_resource_cache = stream_resource_cache - self._stream_datum_cache = stream_datum_cache - if retry_intervals is None: - retry_intervals = [] - self.retry_intervals = retry_intervals - self._closed = False - - def __eq__(self, other: Any) -> bool: - return ( - type(self) is type(other) - and self.inplace == other.inplace - and self._coerce == other._coerce - and self.include == other.include - and self.exclude == other.exclude - and self.root_map == other.root_map - and type(self._handler_cache) is type(other._handler_cache) - and type(self._resource_cache) is type(other._resource_cache) - and type(self._datum_cache) is type(other._datum_cache) - and type(self._descriptor_cache) is type(other._descriptor_cache) - and type(self._stream_resource_cache) is type(other._stream_resource_cache) - and type(self._stream_datum_cache) is type(other._stream_datum_cache) - and self.retry_intervals == other.retry_intervals - ) - - def __getstate__(self) -> dict: - return dict( - inplace=self._inplace, - coercion_func=self._coerce, - handler_registry=self._unpatched_handler_registry, - include=self.include, - exclude=self.exclude, - root_map=self.root_map, - handler_cache=self._handler_cache, - resource_cache=self._resource_cache, - datum_cache=self._datum_cache, - descriptor_cache=self._descriptor_cache, - stream_resource_cache=self._stream_resource_cache, - stream_datum_cache=self._stream_datum_cache, - retry_intervals=self.retry_intervals, - ) - - def __setstate__(self, d: dict) -> None: - self._inplace = d["inplace"] - self._coerce = d["coercion_func"] - - # See comments on coerision functions above for the use of - # _current_state, which is passed to coercion functions' `filler_state` - # parameter. - self._current_state = threading.local() - self._unpatched_handler_registry = {} - self._handler_registry = {} - for spec, handler_class in d["handler_registry"].items(): - self.register_handler(spec, handler_class) - self.handler_registry = HandlerRegistryView(self._handler_registry) - self.include = d["include"] - self.exclude = d["exclude"] - self.root_map = d["root_map"] - self._handler_cache = d["handler_cache"] - self._resource_cache = d["resource_cache"] - self._datum_cache = d["datum_cache"] - self._descriptor_cache = d["descriptor_cache"] - self._stream_resource_cache = d["stream_resource_cache"] - self._stream_datum_cache = d["stream_datum_cache"] - retry_intervals = d["retry_intervals"] - if retry_intervals is None: - retry_intervals = [] - self._retry_intervals = retry_intervals - self._closed = False - - @property - def retry_intervals(self) -> List: - return self._retry_intervals - - @retry_intervals.setter - def retry_intervals(self, value: Any) -> None: - self._retry_intervals = list(value) - - def __repr__(self) -> str: - return "" if not self._closed else "" - - @staticmethod - def get_default_resource_cache() -> dict: - return {} - - @staticmethod - def get_default_descriptor_cache() -> dict: - return {} - - @staticmethod - def get_default_datum_cache() -> dict: - return {} - - @staticmethod - def get_default_handler_cache() -> dict: - return {} - - @staticmethod - def get_default_stream_datum_cache() -> dict: - return {} - - @staticmethod - def get_default_stream_resource_cache() -> dict: - return {} - - @property - def inplace(self) -> bool: - return self._inplace - - def clone( - self, - handler_registry: Optional[dict] = None, - *, - root_map: Optional[dict] = None, - coerce: Optional[str] = None, - handler_cache: Optional[dict] = None, - resource_cache: Optional[dict] = None, - datum_cache: Optional[dict] = None, - descriptor_cache: Optional[dict] = None, - stream_resource_cache: Optional[dict] = None, - stream_datum_cache: Optional[dict] = None, - inplace: Optional[bool] = None, - retry_intervals: Optional[List] = None, - ) -> "Filler": - """ - Create a new Filler instance from this one. - - By default it will be created with the same settings that this Filler - has. Individual settings may be overridden here. - - The clone does *not* share any caches or internal state with the - original. - """ - if handler_registry is None: - handler_registry = self._unpatched_handler_registry - if root_map is None: - root_map = self.root_map - if coerce is None: - coerce = self._coerce - if inplace is None: - inplace = self.inplace - if retry_intervals is None: - retry_intervals = self.retry_intervals - return Filler( - handler_registry, - root_map=root_map, - coerce=coerce, - handler_cache=handler_cache, - resource_cache=resource_cache, - datum_cache=datum_cache, - descriptor_cache=descriptor_cache, - stream_resource_cache=stream_resource_cache, - stream_datum_cache=stream_datum_cache, - inplace=inplace, - retry_intervals=retry_intervals, - ) - - def register_handler( - self, spec: str, handler: Any, overwrite: bool = False - ) -> None: - """ - Register a handler. - - Parameters - ---------- - spec: str - handler: Handler - overwrite: boolean, optional - False by default - - Raises - ------ - DuplicateHandler - If a handler is already registered for spec and overwrite is False - - See https://blueskyproject.io/event-model/external.html - """ - if (not overwrite) and (spec in self._handler_registry): - original = self._unpatched_handler_registry[spec] - if original is handler: - return - raise DuplicateHandler( - f"There is already a handler registered for the spec {spec!r}. " - f"Use overwrite=True to deregister the original.\n" - f"Original: {original}\n" - f"New: {handler}" - ) - - self.deregister_handler(spec) - # Keep a raw copy, unused above for identifying redundant registration. - self._unpatched_handler_registry[spec] = handler - # Let the 'coerce' argument to Filler.__init__ modify the handler if it - # wants to. - self._handler_registry[spec] = self._coercion_func(handler, self._current_state) - - def deregister_handler(self, spec: str) -> Any: - """ - Deregister a handler. - - If no handler is registered for this spec, it is no-op and returns - None. - - Parameters - ---------- - spec: str - - Returns - ------- - handler: Handler or None - - See https://blueskyproject.io/event-model/external.html - """ - handler = self._handler_registry.pop(spec, None) - if handler is not None: - self._unpatched_handler_registry.pop(spec) - for key in list(self._handler_cache): - resource_uid, spec_ = key - if spec == spec_: - del self._handler_cache[key] - return handler - - def resource(self, doc: Resource) -> Resource: - # Defer creating the handler instance until we actually need it, when - # we fill the first Event field that requires this Resource. - self._resource_cache[doc["uid"]] = doc - return doc - - # Handlers operate document-wise, so we'll explode pages into individual - # documents. - - def datum_page(self, doc: DatumPage) -> DatumPage: - datum = self.datum # Avoid attribute lookup in hot loop. - for datum_doc in unpack_datum_page(doc): - datum(datum_doc) - return doc - - def datum(self, doc: Datum) -> Datum: - self._datum_cache[doc["datum_id"]] = doc - return doc - - def stream_resource(self, doc: StreamResource) -> StreamResource: - self._stream_resource_cache[doc["uid"]] = doc - return doc - - def stream_datum(self, doc: StreamDatum) -> StreamDatum: - self._stream_datum_cache[doc["uid"]] = doc - return doc - - def event_page(self, doc: EventPage) -> EventPage: - # TODO We may be able to fill a page in place, and that may be more - # efficient than unpacking the page in to Events, filling them, and the - # re-packing a new page. But that seems tricky in general since the - # page may be implemented as a DataFrame or dict, etc. - filled_doc = self.fill_event_page( - doc, include=self.include, exclude=self.exclude - ) - return filled_doc - - def event(self, doc: Event) -> Event: - filled_doc = self.fill_event(doc, include=self.include, exclude=self.exclude) - return filled_doc - - def fill_event_page( - self, - doc: EventPage, - include: Optional[Iterable] = None, - exclude: Optional[Iterable] = None, - inplace: Optional[bool] = None, - ) -> EventPage: - filled_events = [] - for event_doc in unpack_event_page(doc): - filled_events.append( - self.fill_event( - event_doc, include=include, exclude=exclude, inplace=True - ) - ) - filled_doc = pack_event_page(*filled_events) - if inplace is None: - inplace = self._inplace - if inplace: - doc["data"] = filled_doc["data"] - doc["filled"] = filled_doc["filled"] - return doc - else: - return filled_doc - - def get_handler(self, resource: Resource) -> Any: - """ - Return a new Handler instance for this Resource. - - Parameters - ---------- - resource: Resource - - Returns - ------- - handler: Handler - """ - if self._closed: - raise EventModelRuntimeError( - "This Filler has been closed and is no longer usable." - ) - try: - handler_class = self.handler_registry[resource["spec"]] - except KeyError as err: - raise UndefinedAssetSpecification( - f"Resource document with uid {resource['uid']} " - f"refers to spec {resource['spec']!r} which is " - f"not defined in the Filler's " - f"handler registry." - ) from err - # Apply root_map. - resource_path = resource["resource_path"] - original_root = resource.get("root", "") - root = self.root_map.get(original_root, original_root) - if root: - resource_path = os.path.join(root, resource_path) - msg = ( - f"Error instantiating handler " - f"class {handler_class} " - f"with Resource document {resource}. " - ) - if root != original_root: - msg += ( - f"Its 'root' field was " - f"mapped from {original_root} to {root} by root_map." - ) - else: - msg += ( - f"Its 'root' field {original_root} was " f"*not* modified by root_map." - ) - error_to_raise = EventModelError(msg) - handler = _attempt_with_retries( - func=handler_class, - args=(resource_path,), - kwargs=resource["resource_kwargs"], - intervals=[0] + self.retry_intervals, - error_to_catch=IOError, - error_to_raise=error_to_raise, - ) - return handler - - def _get_handler_maybe_cached(self, resource: Resource) -> Any: - "Get a cached handler for this resource or make one and cache it." - key = (resource["uid"], resource["spec"]) - try: - handler = self._handler_cache[key] - except KeyError: - handler = self.get_handler(resource) - self._handler_cache[key] = handler - return handler - - def fill_event( - self, - doc, - include: Optional[Iterable] = None, - exclude: Optional[Iterable] = None, - inplace: Optional[bool] = None, - ) -> Any: - if inplace is None: - inplace = self._inplace - if inplace: - filled_doc = doc - else: - filled_doc = copy.deepcopy(doc) - descriptor = self._descriptor_cache[doc["descriptor"]] - from_datakeys = False - self._current_state.descriptor = descriptor - try: - needs_filling = {key for key, val in doc["filled"].items() if val is False} - except KeyError: - # This document is not telling us which, if any, keys are filled. - # Infer that none of the external data is filled. - needs_filling = { - key for key, val in descriptor["data_keys"].items() if "external" in val - } - from_datakeys = True - for key in needs_filling: - self._current_state.key = key - if exclude is not None and key in exclude: - continue - if include is not None and key not in include: - continue - try: - datum_id = doc["data"][key] - except KeyError as err: - if from_datakeys: - raise MismatchedDataKeys( - "The documents are not valid. Either because they " - "were recorded incorrectly in the first place, " - "corrupted since, or exercising a yet-undiscovered " - "bug in a reader. event['data'].keys() " - "must equal descriptor['data_keys'].keys(). " - f"event['data'].keys(): {doc['data'].keys()}, " - "descriptor['data_keys'].keys(): " - f"{descriptor['data_keys'].keys()}" - ) from err - else: - raise MismatchedDataKeys( - "The documents are not valid. Either because they " - "were recorded incorrectly in the first place, " - "corrupted since, or exercising a yet-undiscovered " - "bug in a reader. event['filled'].keys() " - "must be a subset of event['data'].keys(). " - f"event['data'].keys(): {doc['data'].keys()}, " - "event['filled'].keys(): " - f"{doc['filled'].keys()}" - ) from err - # Look up the cached Datum doc. - try: - datum_doc = self._datum_cache[datum_id] - except KeyError as err: - raise UnresolvableForeignKeyError( - datum_id, - f"Event with uid {doc['uid']} refers to unknown Datum " - f"datum_id {datum_id}", - ) from err - resource_uid = datum_doc["resource"] - # Look up the cached Resource. - try: - resource = self._resource_cache[resource_uid] - except KeyError as err: - raise UnresolvableForeignKeyError( - resource_uid, - f"Datum with id {datum_id} refers to unknown Resource " - f"uid {resource_uid}", - ) from err - self._current_state.resource = resource - self._current_state.datum = datum_doc - handler = self._get_handler_maybe_cached(resource) - error_to_raise = DataNotAccessible( - f"Filler was unable to load the data referenced by " - f"the Datum document {datum_doc} and the Resource " - f"document {resource}." - ) - payload = _attempt_with_retries( - func=handler, - args=(), - kwargs=datum_doc["datum_kwargs"], - intervals=[0] + self.retry_intervals, - error_to_catch=IOError, - error_to_raise=error_to_raise, - ) - # Here we are intentionally modifying doc in place. - filled_doc["data"][key] = payload - filled_doc["filled"][key] = datum_id - self._current_state.key = None - self._current_state.descriptor = None - self._current_state.resource = None - self._current_state.datum = None - return filled_doc - - def descriptor(self, doc: EventDescriptor) -> EventDescriptor: - self._descriptor_cache[doc["uid"]] = doc - return doc - - def __enter__(self): - return self - - @no_type_check - def close(self) -> None: - """ - Drop cached documents and handlers. - - They are *not* explicitly cleared, so if there are other references to - these caches they will remain. - """ - # Drop references to the caches. If the user holds another reference to - # them it's the user's problem to manage their lifecycle. If the user - # does not (e.g. they are the default caches) the gc will look after - # them. - self._closed = True - self._handler_cache = None - self._resource_cache = None - self._datum_cache = None - self._descriptor_cache = None - - @property - def closed(self) -> bool: - return self._closed - - def clear_handler_cache(self) -> None: - """ - Clear any cached handler instances. - - This operation may free significant memory, depending on the - implementation of the handlers. - """ - self._handler_cache.clear() - - def clear_document_caches(self) -> None: - """ - Clear any cached documents. - """ - self._resource_cache.clear() - self._descriptor_cache.clear() - self._datum_cache.clear() - - def __exit__(self, *exc_details) -> None: - self.close() - - def __call__( - self, name: str, doc: dict, validate: bool = False - ) -> Tuple[str, dict]: - if self._closed: - raise EventModelRuntimeError( - "This Filler has been closed and is no longer usable." - ) - return super().__call__(name, doc, validate) - - -class EventModelError(Exception): ... - - -def _attempt_with_retries( - func, - args, - kwargs, - intervals: Iterable, - error_to_catch: Type[OSError], - error_to_raise: EventModelError, -) -> Any: - """ - Return func(*args, **kwargs), using a retry loop. - - func, args, kwargs: self-explanatory - intervals: list - How long to wait (seconds) between each attempt including the first. - error_to_catch: Exception class - If this is raised, retry. - error_to_raise: Exception instance or class - If we run out of retries, raise this from the proximate error. - """ - error = None - for interval in intervals: - ttime.sleep(interval) - try: - return func(*args, **kwargs) - except error_to_catch as error_: - # The file may not be visible on the filesystem yet. - # Wait and try again. Stash the error in a variable - # that we can access later if we run out of attempts. - error = error_ - else: - break - else: - # We have used up all our attempts. There seems to be an - # actual problem. Raise specified error from the error stashed above. - raise error_to_raise from error - - -class NoFiller(Filler): - """ - This does not fill the documents; it merely validates them. - - It checks that all the references between the documents are resolvable and - *could* be filled. This is useful when the filling will be done later, as - a delayed computation, but we want to make sure in advance that we have all - the information that we will need when that computation occurs. - """ - - def __init__(self, *args, **kwargs) -> None: - # Do not make Filler make copies because we are not going to alter the - # documents anyway. - kwargs.setdefault("inplace", True) - super().__init__(*args, **kwargs) - - def fill_event_page( - self, - doc: EventPage, - include: Optional[Iterable] = None, - exclude: Optional[Iterable] = None, - *kwargs, - ) -> EventPage: - filled_events = [] - for event_doc in unpack_event_page(doc): - filled_events.append( - self.fill_event( - event_doc, include=include, exclude=exclude, inplace=True - ) - ) - filled_doc = pack_event_page(*filled_events) - return filled_doc - - def fill_event( - self, - doc: Event, - include: Optional[Iterable] = None, - exclude: Optional[Iterable] = None, - inplace: Optional[bool] = None, - ) -> Event: - descriptor = self._descriptor_cache[doc["descriptor"]] - from_datakeys = False - try: - needs_filling = {key for key, val in doc["filled"].items() if val is False} - except KeyError: - # This document is not telling us which, if any, keys are filled. - # Infer that none of the external data is filled. - needs_filling = { - key for key, val in descriptor["data_keys"].items() if "external" in val - } - from_datakeys = True - for key in needs_filling: - if exclude is not None and key in exclude: - continue - if include is not None and key not in include: - continue - try: - datum_id = doc["data"][key] - except KeyError as err: - if from_datakeys: - raise MismatchedDataKeys( - "The documents are not valid. Either because they " - "were recorded incorrectly in the first place, " - "corrupted since, or exercising a yet-undiscovered " - "bug in a reader. event['data'].keys() " - "must equal descriptor['data_keys'].keys(). " - f"event['data'].keys(): {doc['data'].keys()}, " - "descriptor['data_keys'].keys(): " - f"{descriptor['data_keys'].keys()}" - ) from err - else: - raise MismatchedDataKeys( - "The documents are not valid. Either because they " - "were recorded incorrectly in the first place, " - "corrupted since, or exercising a yet-undiscovered " - "bug in a reader. event['filled'].keys() " - "must be a subset of event['data'].keys(). " - f"event['data'].keys(): {doc['data'].keys()}, " - "event['filled'].keys(): " - f"{doc['filled'].keys()}" - ) from err - # Look up the cached Datum doc. - try: - datum_doc = self._datum_cache[datum_id] - except KeyError as err: - err_with_key = UnresolvableForeignKeyError( - datum_id, - f"Event with uid {doc['uid']} refers to unknown Datum " - f"datum_id {datum_id}", - ) - err_with_key.key = datum_id - raise err_with_key from err - resource_uid = datum_doc["resource"] - # Look up the cached Resource. - try: - self._resource_cache[resource_uid] - except KeyError as err: - raise UnresolvableForeignKeyError( - datum_id, - f"Datum with id {datum_id} refers to unknown Resource " - f"uid {resource_uid}", - ) from err - return doc - - -DOCS_PASSED_IN_1_14_0_WARNING = ( - "The callback {callback!r} raised {err!r} when " - "RunRouter passed it a {name!r} document. This is " - "probably because in earlier releases the RunRouter " - "expected its factory functions to forward the 'start' " - "document, but starting in event-model 1.14.0 the " - "RunRouter passes in the document, causing the " - "callback to receive it twice and potentially raise " - "an error. Update the factory function. In a future " - "release this warning will become an error." -) - - -class RunRouter(DocumentRouter): - """ - Routes documents, by run, to callbacks it creates from factory functions. - - A RunRouter is callable, and it has the signature ``router(name, doc)``, - suitable for subscribing to the RunEngine. - - It is configured with a list of factory functions that produce callbacks in - a two-layered scheme, described below. - - .. warning:: - - This is experimental. In a future release, it may be changed in a - backward-incompatible way or fully removed. - - Parameters - ---------- - factories : list - A list of callables with the signature:: - - factory('start', start_doc) -> List[Callbacks], List[SubFactories] - - which should return two lists, which may be empty. All items in the - first list should be callbacks --- callables with the signature:: - - callback(name, doc) - - that will receive that RunStart document and all subsequent documents - from the run including the RunStop document. All items in the second - list should be "subfactories" with the signature:: - - subfactory('descriptor', descriptor_doc) -> List[Callbacks] - - These will receive each of the EventDescriptor documents for the run, - as they arrive. They must return one list, which may be empty, - containing callbacks that will receive the RunStart document, that - EventDescriptor, all Events that reference that EventDescriptor and - finally the RunStop document for the run. - handler_registry : dict, optional - This is passed to the Filler or whatever class is given in the - filler_class parametr below. - - Maps each 'spec' (a string identifying a given type or external - resource) to a handler class. - - A 'handler class' may be any callable with the signature:: - - handler_class(full_path, **resource_kwargs) - - It is expected to return an object, a 'handler instance', which is also - callable and has the following signature:: - - handler_instance(**datum_kwargs) - - As the names 'handler class' and 'handler instance' suggest, this is - typically implemented using a class that implements ``__init__`` and - ``__call__``, with the respective signatures. But in general it may be - any callable-that-returns-a-callable. - root_map: dict, optional - This is passed to Filler or whatever class is given in the filler_class - parameter below. - - str -> str mapping to account for temporarily moved/copied/remounted - files. Any resources which have a ``root`` in ``root_map`` will be - loaded using the mapped ``root``. - filler_class: type - This is Filler by default. It can be a Filler subclass, - ``functools.partial(Filler, ...)``, or any class that provides the same - methods as ``DocumentRouter``. - fill_or_fail: boolean, optional - By default (False), if a document with a spec not in - ``handler_registry`` is encountered, let it pass through unfilled. But - if set to True, fill everything and raise - ``UndefinedAssetSpecification`` if some unknown spec is encountered. - """ - - def __init__( - self, - factories, - handler_registry: Optional[dict] = None, - *, - root_map: Optional[dict] = None, - filler_class: Type[Filler] = Filler, - fill_or_fail: bool = False, - ) -> None: - self.factories = factories - self.handler_registry = handler_registry or {} - self.filler_class = filler_class - self.fill_or_fail = fill_or_fail - self.root_map = root_map - - # Map RunStart UID to "subfactory" functions that want all - # EventDescriptors from that run. - self._subfactories: defaultdict = defaultdict(list) - - # Callbacks that want all the documents from a given run, keyed on - # RunStart UID. - self._factory_cbs_by_start: defaultdict = defaultdict(list) - - # Callbacks that want all the documents from a given run, keyed on - # each EventDescriptor UID in the run. - self._factory_cbs_by_descriptor: defaultdict = defaultdict(list) - - # Callbacks that want documents related to a given EventDescriptor, - # keyed on EventDescriptor UID. - self._subfactory_cbs_by_descriptor: defaultdict = defaultdict(list) - - # Callbacks that want documents related to a given EventDescriptor, - # keyed on the RunStart UID referenced by that EventDescriptor. - self._subfactory_cbs_by_start: defaultdict = defaultdict(list) - - # Map RunStart UID to RunStart document. This is used to send - # RunStart documents to subfactory callbacks. - self._start_to_start_doc: dict = dict() - - # Map RunStart UID to the list EventDescriptor. This is used to - # facilitate efficient cleanup of the caches above. - self._start_to_descriptors: defaultdict = defaultdict(list) - - # Map EventDescriptor UID to RunStart UID. This is used for looking up - # Fillers. - self._descriptor_to_start: dict = {} - - # Map Resource UID to RunStart UID. - self._resources: dict = {} - self._stream_resources: dict = {} - - # Old-style Resources that do not have a RunStart UID - self._unlabeled_resources: deque = deque(maxlen=10000) - - # Map Runstart UID to instances of self.filler_class. - self._fillers: dict = {} - - def __repr__(self): - return ( - "RunRouter([\n" - + "\n".join(f" {factory}" for factory in self.factories) - + "])" - ) - - def start(self, start_doc: RunStart) -> None: - uid = start_doc["uid"] - # If we get the same uid twice, weird things will happen, so check for - # that and give a nice error message. - if uid in self._start_to_start_doc: - if self._start_to_start_doc[uid] == start_doc: - raise ValueError( - "RunRouter received the same 'start' document twice:\n" - "{start_doc!r}" - ) - else: - raise ValueError( - "RunRouter received two 'start' documents with different " - "contents but the same uid:\n" - "First: {self._start_to_start_doc[uid]!r}\n" - "Second: {start_doc!r}" - ) - self._start_to_start_doc[uid] = start_doc - filler = self.filler_class( - self.handler_registry, root_map=self.root_map, inplace=False - ) - self._fillers[uid] = filler - # No need to pass the document to filler - # because Fillers do nothing with 'start'. - for factory in self.factories: - callbacks, subfactories = factory("start", start_doc) - for callback in callbacks: - try: - callback("start", start_doc) - except Exception as err: - warnings.warn( - DOCS_PASSED_IN_1_14_0_WARNING.format( - callback=callback, name="start", err=err - ) - ) - raise err - self._factory_cbs_by_start[uid].extend(callbacks) - self._subfactories[uid].extend(subfactories) - - def descriptor(self, descriptor_doc: EventDescriptor) -> None: - descriptor_uid = descriptor_doc["uid"] - start_uid = descriptor_doc["run_start"] - - # Keep track of the RunStart UID -> [EventDescriptor UIDs] mapping for - # purposes of cleanup in stop(). - self._start_to_descriptors[start_uid].append(descriptor_uid) - # Keep track of the EventDescriptor UID -> RunStartUID for filling - # purposes. - self._descriptor_to_start[descriptor_uid] = start_uid - - self._fillers[start_uid].descriptor(descriptor_doc) - # Apply all factory cbs for this run to this descriptor, and run them. - factory_cbs = self._factory_cbs_by_start[start_uid] - self._factory_cbs_by_descriptor[descriptor_uid].extend(factory_cbs) - for callback in factory_cbs: - callback("descriptor", descriptor_doc) - # Let all the subfactories add any relevant callbacks. - for subfactory in self._subfactories[start_uid]: - callbacks = subfactory("descriptor", descriptor_doc) - self._subfactory_cbs_by_start[start_uid].extend(callbacks) - self._subfactory_cbs_by_descriptor[descriptor_uid].extend(callbacks) - for callback in callbacks: - try: - start_doc = self._start_to_start_doc[start_uid] - callback("start", start_doc) - except Exception as err: - warnings.warn( - DOCS_PASSED_IN_1_14_0_WARNING.format( - callback=callback, name="start", err=err - ) - ) - raise err - try: - callback("descriptor", descriptor_doc) - except Exception as err: - warnings.warn( - DOCS_PASSED_IN_1_14_0_WARNING.format( - callback=callback, name="descriptor", err=err - ) - ) - raise err - - def event_page(self, doc: EventPage): - descriptor_uid = doc["descriptor"] - start_uid = self._descriptor_to_start[descriptor_uid] - try: - doc = self._fillers[start_uid].event_page(doc) - except UndefinedAssetSpecification: - if self.fill_or_fail: - raise - for callback in self._factory_cbs_by_descriptor[descriptor_uid]: - callback("event_page", doc) - for callback in self._subfactory_cbs_by_descriptor[descriptor_uid]: - callback("event_page", doc) - - def datum_page(self, doc: DatumPage) -> None: - resource_uid = doc["resource"] - try: - start_uid = self._resources[resource_uid] - except KeyError: - if resource_uid not in self._unlabeled_resources: - raise UnresolvableForeignKeyError( - resource_uid, - f"DatumPage refers to unknown Resource uid {resource_uid}", - ) - # Old Resources do not have a reference to a RunStart document, - # so in turn we cannot immediately tell which run these datum - # documents belong to. - # Fan them out to every run currently flowing through RunRouter. If - # they are not applicable they will do no harm, and this is - # expected to be an increasingly rare case. - for callbacks in self._factory_cbs_by_start.values(): - for callback in callbacks: - callback("datum_page", doc) - for callbacks in self._subfactory_cbs_by_start.values(): - for callback in callbacks: - callback("datum_page", doc) - for filler in self._fillers.values(): - filler.datum_page(doc) - else: - self._fillers[start_uid].datum_page(doc) - for callback in self._factory_cbs_by_start[start_uid]: - callback("datum_page", doc) - for callback in self._subfactory_cbs_by_start[start_uid]: - callback("datum_page", doc) - - def stream_datum(self, doc: StreamDatum) -> None: - resource_uid = doc["stream_resource"] - start_uid = self._stream_resources[resource_uid] - self._fillers[start_uid].stream_datum(doc) - for callback in self._factory_cbs_by_start[start_uid]: - callback("stream_datum", doc) - for callback in self._subfactory_cbs_by_start[start_uid]: - callback("stream_datum", doc) - - def resource(self, doc: Resource) -> None: - try: - start_uid = doc["run_start"] - except KeyError: - # Old Resources do not have a reference to a RunStart document. - # Fan them out to every run currently flowing through RunRouter. If - # they are not applicable they will do no harm, and this is - # expected to be an increasingly rare case. - self._unlabeled_resources.append(doc["uid"]) - for callbacks in self._factory_cbs_by_start.values(): - for callback in callbacks: - callback("resource", doc) - for callbacks in self._subfactory_cbs_by_start.values(): - for callback in callbacks: - callback("resource", doc) - for filler in self._fillers.values(): - filler.resource(doc) - else: - self._fillers[start_uid].resource(doc) - self._resources[doc["uid"]] = doc["run_start"] - for callback in self._factory_cbs_by_start[start_uid]: - callback("resource", doc) - for callback in self._subfactory_cbs_by_start[start_uid]: - callback("resource", doc) - - def stream_resource(self, doc: StreamResource) -> None: - start_uid = doc["run_start"] # No need for Try - self._fillers[start_uid].stream_resource(doc) - self._stream_resources[doc["uid"]] = doc["run_start"] - for callback in self._factory_cbs_by_start[start_uid]: - callback("stream_resource", doc) - for callback in self._subfactory_cbs_by_start[start_uid]: - callback("stream_resource", doc) - - def stop(self, doc: RunStop) -> None: - start_uid = doc["run_start"] - for callback in self._factory_cbs_by_start[start_uid]: - callback("stop", doc) - for callback in self._subfactory_cbs_by_start[start_uid]: - callback("stop", doc) - # Clean up references. - self._fillers.pop(start_uid, None) - self._subfactories.pop(start_uid, None) - self._factory_cbs_by_start.pop(start_uid, None) - self._subfactory_cbs_by_start.pop(start_uid, None) - for descriptor_uid in self._start_to_descriptors.pop(start_uid, ()): - self._descriptor_to_start.pop(descriptor_uid, None) - self._factory_cbs_by_descriptor.pop(descriptor_uid, None) - self._subfactory_cbs_by_descriptor.pop(descriptor_uid, None) - self._resources.pop(start_uid, None) - self._start_to_start_doc.pop(start_uid, None) - - -# Here we define subclasses of all of the built-in Python exception types (as -# needed, not a comprehensive list) so that all errors raised *directly* by -# event_model also inhereit from EventModelError as well as the appropriate -# built-in type. This means, for example, that `EventModelValueError` can be -# caught by `except ValueError:` or by `except EventModelError:`. This can be -# useful for higher-level libraries and for debugging. - - -class EventModelKeyError(EventModelError, KeyError): ... - - -class EventModelValueError(EventModelError, ValueError): ... - - -class EventModelRuntimeError(EventModelError, RuntimeError): ... - - -class EventModelTypeError(EventModelError, TypeError): ... - - -class EventModelValidationError(EventModelError): ... - - -class UnfilledData(EventModelError): - """raised when unfilled data is found""" - - ... - - -class UndefinedAssetSpecification(EventModelKeyError): - """raised when a resource spec is missing from the handler registry""" - - ... - - -class DataNotAccessible(EventModelError, IOError): - """raised when attempts to load data referenced by Datum document fail""" - - ... - - -class UnresolvableForeignKeyError(EventModelValueError): - """when we see a foreign before we see the thing to which it refers""" - - def __init__(self, key: Any, message: str) -> None: - self.key = key - self.message = message - - -class DuplicateHandler(EventModelRuntimeError): - """raised when a handler is already registered for a given spec""" - - ... - - -class InvalidData(EventModelError): - """raised when the data is invalid""" - - ... - - -class MismatchedDataKeys(InvalidData): - """ - Raised when any data keys structures are out of sync. This includes, - event['data'].keys(), descriptor['data_keys'].keys(), - event['timestamp'].keys(), event['filled'].keys() - """ - - ... - - -SCHEMA_PATH = "schemas" -SCHEMA_NAMES = { - DocumentNames.start: "schemas/run_start.json", - DocumentNames.stop: "schemas/run_stop.json", - DocumentNames.event: "schemas/event.json", - DocumentNames.event_page: "schemas/event_page.json", - DocumentNames.descriptor: "schemas/event_descriptor.json", - DocumentNames.datum: "schemas/datum.json", - DocumentNames.datum_page: "schemas/datum_page.json", - DocumentNames.resource: "schemas/resource.json", - DocumentNames.stream_datum: "schemas/stream_datum.json", - DocumentNames.stream_resource: "schemas/stream_resource.json", - # DEPRECATED: - DocumentNames.bulk_events: "schemas/bulk_events.json", - DocumentNames.bulk_datum: "schemas/bulk_datum.json", -} -schemas = {} -for name, filename in SCHEMA_NAMES.items(): - ref = importlib_resources.files("event_model") / filename - with ref.open() as f: - schemas[name] = json.load(f) - - -def _is_array(checker, instance): - return ( - jsonschema.validators.Draft202012Validator.TYPE_CHECKER.is_type( - instance, "array" - ) - or isinstance(instance, tuple) - or hasattr(instance, "__array__") - ) - - -_array_type_checker = jsonschema.validators.Draft202012Validator.TYPE_CHECKER.redefine( - "array", _is_array -) - -_Validator = jsonschema.validators.extend( - jsonschema.validators.Draft202012Validator, type_checker=_array_type_checker -) - -schema_validators = { - name: _Validator(schema=schema) for name, schema in schemas.items() -} - - -@dataclass -class ComposeDatum: - resource: Resource - counter: Iterator - - def __call__(self, datum_kwargs: Dict[str, Any], validate: bool = True) -> Datum: - resource_uid = self.resource["uid"] - doc = Datum( - resource=resource_uid, - datum_kwargs=datum_kwargs, - datum_id="{}/{}".format(resource_uid, next(self.counter)), - ) - if validate: - schema_validators[DocumentNames.datum].validate(doc) - return doc - - -def compose_datum( - *, - resource: Resource, - counter: Iterator, - datum_kwargs: Dict[str, Any], - validate: bool = True, -) -> Datum: - """ - Here for backwards compatibility, the Compose class is prefered. - """ - return ComposeDatum(resource, counter)(datum_kwargs, validate=validate) - - -@dataclass -class ComposeDatumPage: - resource: Resource - counter: Iterator - - def __call__(self, datum_kwargs: dict, validate: bool = True) -> DatumPage: - resource_uid = self.resource["uid"] - any_column, *_ = datum_kwargs.values() - N = len(any_column) - doc = DatumPage( - resource=resource_uid, - datum_kwargs=datum_kwargs, - datum_id=[ - "{}/{}".format(resource_uid, next(self.counter)) for _ in range(N) - ], - ) - if validate: - schema_validators[DocumentNames.datum_page].validate(doc) - return doc - - -def compose_datum_page( - *, - resource: Resource, - counter: Iterator, - datum_kwargs: Dict[str, List[Any]], - validate: bool = True, -) -> DatumPage: - """ - Here for backwards compatibility, the Compose class is prefered. - """ - return ComposeDatumPage(resource, counter)(datum_kwargs, validate=validate) - - -@dataclass -class ComposeResourceBundle: - resource_doc: Resource - compose_datum: ComposeDatum - compose_datum_page: ComposeDatumPage - - # iter for backwards compatibility - def __iter__(self) -> Iterator: - return iter( - ( - self.resource_doc, - self.compose_datum, - self.compose_datum_page, - ) - ) - - -PATH_SEMANTICS: Dict[str, Literal["posix", "windows"]] = { - "posix": "posix", - "nt": "windows", -} -default_path_semantics: Literal["posix", "windows"] = PATH_SEMANTICS[os.name] - - -@dataclass -class ComposeResource: - start: Optional[RunStart] - - def __call__( - self, - spec: str, - root: str, - resource_path: str, - resource_kwargs: Dict[str, Any], - path_semantics: Literal["posix", "windows"] = default_path_semantics, - uid: Optional[str] = None, - validate: bool = True, - ) -> ComposeResourceBundle: - if uid is None: - uid = str(uuid.uuid4()) - - doc = Resource( - path_semantics=path_semantics, - uid=uid, - spec=spec, - root=root, - resource_kwargs=resource_kwargs, - resource_path=resource_path, - ) - - if self.start: - doc["run_start"] = self.start["uid"] - - if validate: - schema_validators[DocumentNames.resource].validate(doc) - - counter = itertools.count() - return ComposeResourceBundle( - doc, - ComposeDatum(resource=doc, counter=counter), - ComposeDatumPage(resource=doc, counter=counter), - ) - - -def compose_resource( - *, - spec: str, - root: str, - resource_path: str, - resource_kwargs: Dict[str, Any], - path_semantics: Literal["posix", "windows"] = default_path_semantics, - start: Optional[RunStart] = None, - uid: Optional[str] = None, - validate: bool = True, -) -> ComposeResourceBundle: - """ - Here for backwards compatibility, the Compose class is prefered. - """ - return ComposeResource(start)( - spec, - root, - resource_path, - resource_kwargs, - path_semantics=path_semantics, - uid=uid, - validate=validate, - ) - - -@dataclass -class ComposeStreamDatum: - stream_resource: StreamResource - counter: Iterator - - def __call__( - self, - indices: StreamRange, - seq_nums: Optional[StreamRange] = None, - descriptor: Optional[EventDescriptor] = None, - validate: bool = True, - ) -> StreamDatum: - resource_uid = self.stream_resource["uid"] - - # If the seq_nums aren't passed in then the bluesky - # bundler will keep track of them - if not seq_nums: - seq_nums = StreamRange(start=0, stop=0) - - doc = StreamDatum( - stream_resource=resource_uid, - uid=f"{resource_uid}/{next(self.counter)}", - seq_nums=seq_nums, - indices=indices, - descriptor=descriptor["uid"] if descriptor else "", - ) - - if validate: - schema_validators[DocumentNames.stream_datum].validate(doc) - - return doc - - -def compose_stream_datum( - *, - stream_resource: StreamResource, - counter: Iterator, - seq_nums: StreamRange, - indices: StreamRange, - validate: bool = True, -) -> StreamDatum: - """ - Here for backwards compatibility, the Compose class is prefered. - """ - warnings.warn( - "compose_stream_datum() will be removed in the minor version.", - DeprecationWarning, - ) - return ComposeStreamDatum(stream_resource, counter)( - seq_nums, - indices, - validate=validate, - ) - - -@dataclass -class ComposeStreamResourceBundle: - stream_resource_doc: StreamResource - compose_stream_datum: ComposeStreamDatum - - # iter for backwards compatibility - def __iter__(self) -> Iterator: - return iter( - ( - self.stream_resource_doc, - self.compose_stream_datum, - ) - ) - - -@dataclass -class ComposeStreamResource: - start: Optional[RunStart] = None - - def __call__( - self, - mimetype: str, - uri: str, - data_key: str, - parameters: Dict[str, Any], - uid: Optional[str] = None, - validate: bool = True, - ) -> ComposeStreamResourceBundle: - if uid is None: - uid = str(uuid.uuid4()) - - doc = StreamResource( - uid=uid, - data_key=data_key, - mimetype=mimetype, - uri=uri, - parameters=parameters, - ) - - if self.start: - doc["run_start"] = self.start["uid"] - - if validate: - schema_validators[DocumentNames.stream_resource].validate(doc) - - return ComposeStreamResourceBundle( - doc, - ComposeStreamDatum( - stream_resource=doc, - counter=itertools.count(), - ), - ) - - -def compose_stream_resource( - *, - mimetype: str, - uri: str, - data_key: str, - parameters: Dict[str, Any], - start: Optional[RunStart] = None, - uid: Optional[str] = None, - validate: bool = True, -) -> ComposeStreamResourceBundle: - """ - Here for backwards compatibility, the Compose class is prefered. - """ - return ComposeStreamResource(start=start)( - mimetype, - uri, - data_key, - parameters, - uid=uid, - validate=validate, - ) - - -@dataclass -class ComposeStop: - start: RunStart - event_counters: Dict[str, int] - poison_pill: List - - def __call__( - self, - exit_status: Literal["success", "abort", "fail"] = "success", - reason: str = "", - uid: Optional[str] = None, - time: Optional[float] = None, - validate: bool = True, - ) -> RunStop: - if self.poison_pill: - raise EventModelError( - "Already composed a RunStop document for run " - "{!r}.".format(self.start["uid"]) - ) - self.poison_pill.append(object()) - if uid is None: - uid = str(uuid.uuid4()) - if time is None: - time = ttime.time() - doc = RunStop( - uid=uid, - time=time, - run_start=self.start["uid"], - exit_status=exit_status, - reason=reason, - num_events={k: v - 1 for k, v in self.event_counters.items()}, - ) - if validate: - schema_validators[DocumentNames.stop].validate(doc) - return doc - - -def compose_stop( - *, - start: RunStart, - event_counters: Dict[str, int], - poison_pill: List, - exit_status: Literal["success", "abort", "fail"] = "success", - reason: str = "", - uid: Optional[str] = None, - time: Optional[float] = None, - validate: bool = True, -) -> RunStop: - """ - Here for backwards compatibility, the Compose class is prefered. - """ - return ComposeStop( - start=start, - event_counters=event_counters, - poison_pill=poison_pill, - )(exit_status=exit_status, reason=reason, uid=uid, time=time, validate=validate) - - -def length_of_value(dictionary: Dict[str, List], error_msg: str) -> Optional[int]: - length = None - for k, v in dictionary.items(): - v_len = len(v) - if length is not None: - if v_len != length: - raise EventModelError(error_msg) - length = v_len - return length - - -@dataclass -class ComposeEventPage: - descriptor: EventDescriptor - event_counters: Dict[str, int] - - def __call__( - self, - data: Dict[str, List], - timestamps: Dict[str, Any], - seq_num: Optional[List[int]] = None, - filled: Optional[Dict[str, List[Union[bool, str]]]] = None, - uid: Optional[List] = None, - time: Optional[List] = None, - validate: bool = True, - ) -> EventPage: - timestamps_length = length_of_value( - timestamps, - "Cannot compose event_page: event_page contains `timestamps` " - "list values of different lengths", - ) - data_length = length_of_value( - data, - "Cannot compose event_page: event_page contains `data` " - "lists of different lengths", - ) - assert timestamps_length == data_length, ( - "Cannot compose event_page: the lists in `timestamps` are of a different " - "length to those in `data`" - ) - - if seq_num is None: - last_seq_num = self.event_counters[self.descriptor["name"]] - seq_num = list( - range(last_seq_num, len(next(iter(data.values()))) + last_seq_num) - ) - N = len(seq_num) - if uid is None: - uid = [str(uuid.uuid4()) for _ in range(N)] - if time is None: - time = [ttime.time()] * N - if filled is None: - filled = {} - doc = EventPage( - uid=uid, - time=time, - data=data, - timestamps=timestamps, - seq_num=seq_num, - filled=filled, - descriptor=self.descriptor["uid"], - ) - if validate: - schema_validators[DocumentNames.event_page].validate(doc) - - if not ( - set( - keys_without_stream_keys( - self.descriptor["data_keys"], self.descriptor["data_keys"] - ) - ) - == set(keys_without_stream_keys(data, self.descriptor["data_keys"])) - == set( - keys_without_stream_keys(timestamps, self.descriptor["data_keys"]) - ) - ): - raise EventModelValidationError( - 'These sets of keys must match (other than "STREAM:" keys):\n' - "event['data'].keys(): {}\n" - "event['timestamps'].keys(): {}\n" - "descriptor['data_keys'].keys(): {}\n".format( - data.keys(), - timestamps.keys(), - self.descriptor["data_keys"].keys(), - ) - ) - if set(filled) - set(data): - raise EventModelValidationError( - "Keys in event['filled'] {} must be a subset of those in " - "event['data'] {}".format(filled.keys(), data.keys()) - ) - self.event_counters[self.descriptor["name"]] += len(seq_num) - return doc - - -def compose_event_page( - *, - descriptor: EventDescriptor, - event_counters: Dict[str, int], - data: Dict[str, List], - timestamps: Dict[str, Any], - seq_num: List[int], - filled: Optional[Dict[str, List[Union[bool, str]]]] = None, - uid: Optional[List] = None, - time: Optional[List] = None, - validate: bool = True, -) -> EventPage: - """ - Here for backwards compatibility, the Compose class is prefered. - """ - return ComposeEventPage(descriptor, event_counters)( - data, - timestamps, - seq_num=seq_num, - filled=filled, - uid=uid, - time=time, - validate=validate, - ) - - -def keys_without_stream_keys(dictionary, descriptor_data_keys): - return [ - key - for key in dictionary.keys() - if ( - "external" not in descriptor_data_keys[key] - or descriptor_data_keys[key]["external"] != "STREAM:" - ) - ] - - -@dataclass -class ComposeEvent: - descriptor: EventDescriptor - event_counters: Dict[str, int] - - def __call__( - self, - data: dict, - timestamps: dict, - seq_num: Optional[int] = None, - filled: Optional[Dict[str, Union[bool, str]]] = None, - uid: Optional[str] = None, - time: Optional[float] = None, - validate: bool = True, - ) -> Event: - if seq_num is None: - seq_num = self.event_counters[self.descriptor["name"]] - if uid is None: - uid = str(uuid.uuid4()) - if time is None: - time = ttime.time() - if filled is None: - filled = {} - doc = Event( - uid=uid, - time=time, - data=data, - timestamps=timestamps, - seq_num=seq_num, - filled=filled, - descriptor=self.descriptor["uid"], - ) - if validate: - schema_validators[DocumentNames.event].validate(doc) - - if not ( - set( - keys_without_stream_keys( - self.descriptor["data_keys"], self.descriptor["data_keys"] - ) - ) - == set(keys_without_stream_keys(data, self.descriptor["data_keys"])) - == set( - keys_without_stream_keys(timestamps, self.descriptor["data_keys"]) - ) - ): - raise EventModelValidationError( - 'These sets of keys must match (other than "STREAM:" keys):\n' - "event['data'].keys(): {}\n" - "event['timestamps'].keys(): {}\n" - "descriptor['data_keys'].keys(): {}\n".format( - data.keys(), - timestamps.keys(), - self.descriptor["data_keys"].keys(), - ) - ) - if set(filled) - set(data): - raise EventModelValidationError( - "Keys in event['filled'] {} must be a subset of those in " - "event['data'] {}".format(filled.keys(), data.keys()) - ) - self.event_counters[self.descriptor["name"]] = seq_num + 1 - return doc - - -def compose_event( - *, - descriptor: EventDescriptor, - event_counters: Dict[str, int], - data: Dict[str, Any], - timestamps: Dict[str, Any], - seq_num: int, - filled: Optional[Dict[str, Union[bool, str]]] = None, - uid: Optional[str] = None, - time: Optional[float] = None, - validate: bool = True, -) -> Event: - """ - Here for backwards compatibility, the Compose class is prefered. - """ - return ComposeEvent(descriptor, event_counters)( - data, - timestamps, - seq_num=seq_num, - filled=filled, - uid=uid, - time=time, - validate=validate, - ) - - -@dataclass -class ComposeDescriptorBundle: - descriptor_doc: EventDescriptor - compose_event: ComposeEvent - compose_event_page: ComposeEventPage - - def __iter__(self) -> Iterator: - return iter( - ( - self.descriptor_doc, - self.compose_event, - self.compose_event_page, - ) - ) - - -@dataclass -class ComposeDescriptor: - start: RunStart - streams: dict - event_counters: Dict[str, int] - - def __call__( - self, - name, - data_keys, - hints=None, - configuration=None, - object_keys=None, - time=None, - uid=None, - validate=True, - ) -> ComposeDescriptorBundle: - if time is None: - time = ttime.time() - if uid is None: - uid = str(uuid.uuid4()) - if hints is None: - hints = {} - if configuration is None: - configuration = {} - if object_keys is None: - object_keys = {} - - doc = EventDescriptor( - configuration=configuration, - data_keys=data_keys, - name=name, - object_keys=object_keys, - run_start=self.start["uid"], - time=time, - uid=uid, - hints=hints, - ) - if validate: - if name in self.streams and self.streams[name] != set(data_keys): - raise EventModelValidationError( - "A descriptor with the name {} has already been composed with " - "data_keys {}. The requested data_keys were {}. All " - "descriptors in a given stream must have the same " - "data_keys.".format(name, self.streams[name], set(data_keys)) - ) - schema_validators[DocumentNames.descriptor].validate(doc) - - if name not in self.streams: - self.streams[name] = set(data_keys) - self.event_counters[name] = 1 - - return ComposeDescriptorBundle( - descriptor_doc=doc, - compose_event=ComposeEvent( - descriptor=doc, event_counters=self.event_counters - ), - compose_event_page=ComposeEventPage( - descriptor=doc, event_counters=self.event_counters - ), - ) - - -def compose_descriptor( - *, - start: RunStart, - streams: Dict[str, Iterable], - event_counters: Dict[str, int], - name: str, - data_keys: Dict[str, DataKey], - uid: Optional[str] = None, - time: Optional[float] = None, - object_keys: Optional[Dict[str, Any]] = None, - configuration: Optional[Dict[str, Configuration]] = None, - hints: Optional[PerObjectHint] = None, - validate: bool = True, -) -> ComposeDescriptorBundle: - """ - Here for backwards compatibility, the Compose class is prefered. - """ - return ComposeDescriptor(start, streams, event_counters)( - name, - data_keys, - hints=hints, - configuration=configuration, - object_keys=object_keys, - time=time, - uid=uid, - validate=validate, - ) - - -@dataclass -class ComposeRunBundle: - """Extensible compose run bundle. This maintains backward compatibility - by unpacking into a basic run bundle - (start, compose_descriptor, compose_resource, stop). - Further extensions are optional and require keyword referencing - (i.e. compose_stream_resource). - """ - - start_doc: RunStart - compose_descriptor: ComposeDescriptor - compose_resource: ComposeResource - compose_stop: ComposeStop - compose_stream_resource: Optional[ComposeStreamResource] = None - - # iter for backwards compatibility - def __iter__(self) -> Iterator: - return iter( - ( - self.start_doc, - self.compose_descriptor, - self.compose_resource, - self.compose_stop, - ) - ) - - -def compose_run( - *, - uid: Optional[str] = None, - time: Optional[float] = None, - metadata: Optional[Dict] = None, - validate: bool = True, - event_counters: Optional[Dict[str, int]] = None, -) -> ComposeRunBundle: - """ - Compose a RunStart document and factory functions for related documents. - - Parameters - ---------- - uid : string, optional - Unique identifier for this run, conventionally a UUID4. If None is - given, a UUID4 will be generated. - time : float, optional - UNIX epoch time of start of this run. If None is given, the current - time will be used. - metadata : dict, optional - Additional metadata include the document - validate : boolean, optional - Validate this document conforms to the schema. - event_counters : dict, optional - A dict for counting events, when an event is composed by any of the - descriptors composed by this run, the element in this dict with the key of the - descriptor name will be increased by 1. - - Returns - ------- - ComposeRunBundle - """ - if uid is None: - uid = str(uuid.uuid4()) - if time is None: - time = ttime.time() - if metadata is None: - metadata = {} - - # Define some mutable state to be shared internally by the closures composed - # below. - streams: Dict[str, Iterable] = {} - if event_counters is None: - event_counters = {} - poison_pill: list = [] - - doc = dict(uid=uid, time=time, **metadata) - - if validate: - schema_validators[DocumentNames.start].validate(doc) - - return ComposeRunBundle( - cast(RunStart, doc), - ComposeDescriptor( - start=cast(RunStart, doc), streams=streams, event_counters=event_counters - ), - ComposeResource(start=cast(RunStart, doc)), - ComposeStop( - start=cast(RunStart, doc), - event_counters=event_counters, - poison_pill=poison_pill, - ), - compose_stream_resource=ComposeStreamResource(start=cast(RunStart, doc)), - ) - - -def pack_event_page(*events: Event) -> EventPage: - """ - Transform one or more Event documents into an EventPage document. - - Parameters - ---------- - *event : dicts - any number of Event documents - - Returns - ------- - event_page : dict - """ - if not events: - raise ValueError( - "The pack_event_page() function was called with empty *args. " - "Cannot create an EventPage from an empty collection of Events " - "because the 'descriptor' field in an EventPage cannot be NULL." - ) - time_list = [] - uid_list = [] - seq_num_list = [] - data_list = [] - filled_list = [] - timestamps_list = [] - for event in events: - time_list.append(event["time"]) - uid_list.append(event["uid"]) - seq_num_list.append(event["seq_num"]) - filled_list.append(event.get("filled", {})) - data_list.append(event["data"]) - timestamps_list.append(event["timestamps"]) - event_page = EventPage( - time=time_list, - uid=uid_list, - seq_num=seq_num_list, - descriptor=event["descriptor"], - filled=_transpose_list_of_dicts(filled_list), - data=_transpose_list_of_dicts(data_list), - timestamps=_transpose_list_of_dicts(timestamps_list), - ) - return event_page - - -def unpack_event_page(event_page: EventPage) -> Generator: - """ - Transform an EventPage document into individual Event documents. - - Parameters - ---------- - event_page : EventPage - - Yields - ------ - event : Event - """ - descriptor = event_page["descriptor"] - data_list = _transpose_dict_of_lists(event_page["data"]) - timestamps_list = _transpose_dict_of_lists(event_page["timestamps"]) - filled_list = _transpose_dict_of_lists(event_page.get("filled", {})) - for uid, time, seq_num, data, timestamps, filled in itertools.zip_longest( - event_page["uid"], - event_page["time"], - event_page["seq_num"], - data_list, - timestamps_list, - filled_list, - fillvalue={}, - ): - yield Event( - descriptor=descriptor, - uid=uid, - time=time, - seq_num=seq_num, - data=data, - timestamps=timestamps, - filled=filled, - ) - - -def pack_datum_page(*datum: Datum) -> DatumPage: - """ - Transform one or more Datum documents into a DatumPage document. - - Parameters - ---------- - *datum : dicts - any number of Datum documents - - Returns - ------- - datum_page : dict - """ - if not datum: - raise ValueError( - "The pack_datum_page() function was called with empty *args. " - "Cannot create an DatumPage from an empty collection of Datum " - "because the 'resource' field in a DatumPage cannot be NULL." - ) - datum_id_list = [] - datum_kwarg_list = [] - for datum_ in datum: - datum_id_list.append(datum_["datum_id"]) - datum_kwarg_list.append(datum_["datum_kwargs"]) - datum_page = DatumPage( - resource=datum_["resource"], - datum_id=datum_id_list, - datum_kwargs=_transpose_list_of_dicts(datum_kwarg_list), - ) - return datum_page - - -def unpack_datum_page(datum_page: DatumPage) -> Generator: - """ - Transform a DatumPage document into individual Datum documents. - - Parameters - ---------- - datum_page : DatumPage - - Yields - ------ - datum : Datum - """ - resource = datum_page["resource"] - datum_kwarg_list = _transpose_dict_of_lists(datum_page["datum_kwargs"]) - datum_id: Any - datum_kwargs: Any - for datum_id, datum_kwargs in itertools.zip_longest( - datum_page["datum_id"], datum_kwarg_list, fillvalue={} - ): - yield Datum(datum_id=datum_id, datum_kwargs=datum_kwargs, resource=resource) - - -def rechunk_event_pages(event_pages: Iterable, chunk_size: int) -> Generator: - """ - Resizes the event_pages in a iterable of event_pages. - - Parameters - ---------- - event_pages: Iterabile - An iterable of event_pages - chunk_size: integer - Size of pages to yield - - Yields - ------ - event_page : dict - """ - remainder = chunk_size - chunk_list = [] - - def page_chunks(page: dict, chunk_size: int, remainder: int) -> Generator: - """ - Yields chunks of a event_page. - The first chunk will be of size remainder, the following chunks will be - of size chunk_size. The last chunk will be what ever is left over. - """ - array_keys = ["seq_num", "time", "uid"] - page_size = len(page["uid"]) # Number of events in the page. - - # Make a list of the chunk indexes. - chunks = [(0, remainder)] - chunks.extend( - [(i, i + chunk_size) for i in range(remainder, page_size, chunk_size)] - ) - - for start, stop in chunks: - yield { - "descriptor": page["descriptor"], - **{key: page[key][start:stop] for key in array_keys}, - "data": { - key: page["data"][key][start:stop] for key in page["data"].keys() - }, - "timestamps": { - key: page["timestamps"][key][start:stop] - for key in page["timestamps"].keys() - }, - "filled": { - key: page["filled"][key][start:stop] - for key in page["filled"].keys() - }, - } - - for page in event_pages: - new_chunks = page_chunks(page, chunk_size, remainder) - for chunk in new_chunks: - remainder -= len(chunk["uid"]) # Subtract the size of the chunk. - chunk_list.append(chunk) - if remainder == 0: - yield merge_event_pages(chunk_list) - remainder = chunk_size - chunk_list = [] - if chunk_list: - yield merge_event_pages(chunk_list) - - -def merge_event_pages(event_pages: Iterable[EventPage]) -> EventPage: - """ - Combines a iterable of event_pages to a single event_page. - - Parameters - ---------- - event_pages: Iterabile - An iterable of event_pages - - Returns - ------ - event_page : dict - """ - pages = list(event_pages) - if len(pages) == 1: - return pages[0] - - doc = dict( - descriptor=pages[0]["descriptor"], - seq_num=list( - itertools.chain.from_iterable([page["seq_num"] for page in pages]) - ), - time=list(itertools.chain.from_iterable([page["time"] for page in pages])), - uid=list(itertools.chain.from_iterable([page["uid"] for page in pages])), - data={ - key: list( - itertools.chain.from_iterable([page["data"][key] for page in pages]) - ) - for key in pages[0]["data"].keys() - }, - timestamps={ - key: list( - itertools.chain.from_iterable( - [page["timestamps"][key] for page in pages] - ) - ) - for key in pages[0]["data"].keys() - }, - filled={ - key: list( - itertools.chain.from_iterable([page["filled"][key] for page in pages]) - ) - for key in pages[0]["filled"].keys() - }, - ) - return cast(EventPage, doc) - - -def rechunk_datum_pages(datum_pages: Iterable, chunk_size: int) -> Generator: - """ - Resizes the datum_pages in a iterable of event_pages. - - Parameters - ---------- - datum_pages: Iterabile - An iterable of datum_pages - chunk_size: integer - Size of pages to yield - - Yields - ------ - datum_page : dict - """ - remainder = chunk_size - chunk_list = [] - - def page_chunks(page: dict, chunk_size: int, remainder: int) -> Generator: - """ - Yields chunks of a datum_page. - The first chunk will be of size remainder, the following chunks will be - of size chunk_size. The last chunk will be what ever is left over. - """ - - array_keys = ["datum_id"] - page_size = len(page["datum_id"]) # Number of datum in the page. - - # Make a list of the chunk indexes. - chunks = [(0, remainder)] - chunks.extend( - [(i, i + chunk_size) for i in range(remainder, page_size, chunk_size)] - ) - - for start, stop in chunks: - yield { - "resource": page["resource"], - **{key: page[key][start:stop] for key in array_keys}, - "datum_kwargs": { - key: page["datum_kwargs"][key][start:stop] - for key in page["datum_kwargs"].keys() - }, - } - - for page in datum_pages: - new_chunks = page_chunks(page, chunk_size, remainder) - for chunk in new_chunks: - remainder -= len(chunk["datum_id"]) # Subtract the size of the chunk. - chunk_list.append(chunk) - if remainder == 0: - yield merge_datum_pages(chunk_list) - remainder = chunk_size - chunk_list = [] - if chunk_list: - yield merge_datum_pages(chunk_list) - - -def merge_datum_pages(datum_pages: Iterable) -> DatumPage: - """ - Combines a iterable of datum_pages to a single datum_page. - - Parameters - ---------- - datum_pages: Iterabile - An iterable of datum_pages - - Returns - ------ - datum_page : dict - """ - pages = list(datum_pages) - if len(pages) == 1: - return pages[0] - - array_keys = ["datum_id"] - - doc = dict( - resource=pages[0]["resource"], - **{ - key: list(itertools.chain.from_iterable([page[key] for page in pages])) - for key in array_keys - }, - datum_kwargs={ - key: list( - itertools.chain.from_iterable( - [page["datum_kwargs"][key] for page in pages] - ) - ) - for key in pages[0]["datum_kwargs"].keys() - }, - ) - return cast(DatumPage, doc) - - -def bulk_events_to_event_pages(bulk_events: dict) -> list: - """ - Transform a BulkEvents document into a list of EventPage documents. - - Note: The BulkEvents layout has been deprecated in favor of EventPage. - - Parameters - ---------- - bulk_events : dict - - Returns - ------- - event_pages : list - """ - # This is for a deprecated document type, so we are not being fussy - # about efficiency/laziness here. - event_pages: dict = {} # descriptor uid mapped to page - for events in bulk_events.values(): - for event in events: - descriptor = event["descriptor"] - try: - page = event_pages[descriptor] - except KeyError: - page = {"time": [], "uid": [], "seq_num": [], "descriptor": descriptor} - page["data"] = {k: [] for k in event["data"]} - page["timestamps"] = {k: [] for k in event["timestamps"]} - page["filled"] = {k: [] for k in event.get("filled", {})} - event_pages[descriptor] = page - page["uid"].append(event["uid"]) - page["time"].append(event["time"]) - page["seq_num"].append(event["seq_num"]) - page_data = page["data"] - for k, v in event["data"].items(): - page_data[k].append(v) - page_timestamps = page["timestamps"] - for k, v in event["timestamps"].items(): - page_timestamps[k].append(v) - page_filled = page["filled"] - for k, v in event.get("filled", {}).items(): - page_filled[k].append(v) - return list(event_pages.values()) - - -def bulk_datum_to_datum_page(bulk_datum: dict) -> DatumPage: - """ - Transform one BulkDatum into one DatumPage. - - Note: There is only one known usage of BulkDatum "in the wild", and the - BulkDatum layout has been deprecated in favor of DatumPage. - """ - datum_page = DatumPage( - datum_id=bulk_datum["datum_ids"], - resource=bulk_datum["resource"], - datum_kwargs=_transpose_list_of_dicts(bulk_datum["datum_kwarg_list"]), - ) - return datum_page - - -def _transpose_list_of_dicts(list_of_dicts: list) -> dict: - "Transform list-of-dicts into dict-of-lists (i.e. DataFrame-like)." - dict_of_lists = defaultdict(list) - for row in list_of_dicts: - for k, v in row.items(): - dict_of_lists[k].append(v) - return dict(dict_of_lists) - - -def _transpose_dict_of_lists(dict_of_lists: dict) -> list: - "Transform dict-of-lists (i.e. DataFrame-like) into list-of-dicts." - list_of_dicts = [] - keys = list(dict_of_lists) - for row in zip(*(dict_of_lists[k] for k in keys)): - list_of_dicts.append(dict(zip(keys, row))) - return list_of_dicts - - -def verify_filled(event_page: dict) -> None: - """Take an event_page document and verify that it is completely filled. - - Parameters - ---------- - event_page : event_page document - The event page document to check - - Raises - ------ - UnfilledData - Raised if any of the data in the event_page is unfilled, when raised it - inlcudes a list of unfilled data objects in the exception message. - """ - if not all(map(all, event_page["filled"].values())): - # check that all event_page data is filled. - unfilled_data = [] - for field, filled in event_page["filled"].items(): - if not all(filled): - unfilled_data.append(field) - raise UnfilledData( - f"Unfilled data found in fields " - f"{unfilled_data!r}. Use " - f"`event_model.Filler`." - ) - - -def sanitize_doc(doc: dict) -> dict: - """Return a copy with any numpy objects converted to built-in Python types. - - This function takes in an event-model document and returns a copy with any - numpy objects converted to built-in Python types. It is useful for - sanitizing documents prior to sending to any consumer that does not - recognize numpy types, such as a MongoDB database or a JSON encoder. - - Parameters - ---------- - doc : dict - The event-model document to be sanitized - - Returns - ------- - sanitized_doc : event-model document - The event-model document with numpy objects converted to built-in - Python types. - """ - return json.loads(json.dumps(doc, cls=NumpyEncoder)) - - -class NumpyEncoder(json.JSONEncoder): - """ - A json.JSONEncoder for encoding numpy objects using built-in Python types. - - Examples - -------- - - Encode a Python object that includes an arbitrarily-nested numpy object. - - >>> json.dumps({'a': {'b': numpy.array([1, 2, 3])}}, cls=NumpyEncoder) - """ - - # Credit: https://stackoverflow.com/a/47626762/1221924 - @no_type_check - def default(self, obj: object) -> Any: - try: - import dask.array - - if isinstance(obj, dask.array.Array): - obj = numpy.asarray(obj) - except ImportError: - pass - if isinstance(obj, (numpy.generic, numpy.ndarray)): - if numpy.isscalar(obj): - return obj.item() - return obj.tolist() - return json.JSONEncoder.default(self, obj) diff --git a/event_model/__main__.py b/event_model/__main__.py deleted file mode 100644 index cc35ce8a..00000000 --- a/event_model/__main__.py +++ /dev/null @@ -1,16 +0,0 @@ -from argparse import ArgumentParser - -from event_model import __version__ - -__all__ = ["main"] - - -def main(args=None): - parser = ArgumentParser() - parser.add_argument("-v", "--version", action="version", version=__version__) - args = parser.parse_args(args) - - -# test with: python -m python3_pip_skeleton -if __name__ == "__main__": - main() diff --git a/pyproject.toml b/pyproject.toml index 9007023f..aec267e4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,21 +6,27 @@ build-backend = "setuptools.build_meta" name = "event-model" classifiers = [ "Development Status :: 3 - Alpha", - "License :: OSI Approved :: Apache Software License", + "License :: OSI Approved :: BSD License", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ] -description = "" -dependencies = [] # Add project dependencies here, e.g. ["click", "numpy"] +description = "Data model used by the bluesky ecosystem." +dependencies = [ + "importlib-resources", + "jsonschema>=4", + "numpy", + "typing_extensions" +] dynamic = ["version"] license.file = "LICENSE" readme = "README.md" -requires-python = ">=3.10" +requires-python = ">=3.8" [project.optional-dependencies] dev = [ - "copier", "mypy", "myst-parser", "pipdeptree", @@ -34,18 +40,28 @@ dev = [ "sphinx-design", "tox-direct", "types-mock", + + # Until https://github.com/copier-org/copier/issues/1819 is released. + "copier==9.3.1", + + # These are dependencies of various sphinx extensions for documentation. + "ipython", + "matplotlib", + "numpydoc", + + # For schema generation. + "pydantic>=2.6", ] [project.scripts] -event-model = "event_model.__main__:main" +regenerate-schema = "event_model.documents.generate.__main__:main" [project.urls] GitHub = "https://github.com/bluesky/event-model" [[project.authors]] # Further authors may be added by duplicating this section -email = "eva.lott@diamond.ac.uk" -name = "Eva Lott" - +email = "dallan@bnl.gov" +name = "Brookhaven National Lab" [tool.setuptools_scm] version_file = "src/event_model/_version.py" @@ -56,12 +72,13 @@ ignore_missing_imports = true # Ignore missing stubs in imported modules [tool.pytest.ini_options] # Run pytest with all our checkers, and don't spam us with massive tracebacks on error addopts = """ - --tb=native -vv --doctest-modules --doctest-glob="*.rst" + --tb=native -vv + --cov=event_model --cov-report term --cov-report xml:cov.xml """ # https://iscinumpy.gitlab.io/post/bound-version-constraints/#watch-for-warnings filterwarnings = "error" # Doctest python code in docs, python code in src docstrings, test functions in tests -testpaths = "docs src tests" +testpaths = "src/event_model/tests" [tool.coverage.run] data_file = "/tmp/event_model.coverage" @@ -89,13 +106,13 @@ allowlist_externals = sphinx-autobuild commands = pre-commit: pre-commit run --all-files --show-diff-on-failure {posargs} - type-checking: mypy src tests {posargs} + type-checking: mypy event_model {posargs} tests: pytest --cov=event_model --cov-report term --cov-report xml:cov.xml {posargs} docs: sphinx-{posargs:build -EW --keep-going} -T docs build/html """ [tool.ruff] -src = ["src", "tests"] +src = ["src"] line-length = 88 lint.select = [ "B", # flake8-bugbear - https://docs.astral.sh/ruff/rules/#flake8-bugbear-b diff --git a/src/event_model/__init__.py b/src/event_model/__init__.py index a2ffbf36..7392cc88 100644 --- a/src/event_model/__init__.py +++ b/src/event_model/__init__.py @@ -1,11 +1,3105 @@ -"""Top level API. +import collections.abc +import copy +import inspect +import itertools +import json +import os +import sys +import threading +import time as ttime +import uuid +import warnings +import weakref +from collections import defaultdict, deque +from dataclasses import dataclass +from enum import Enum +from typing import ( + Any, + Callable, + Dict, + Generator, + Iterable, + Iterator, + List, + Optional, + Tuple, + Type, + Union, + cast, + no_type_check, +) -.. data:: __version__ - :type: str +import jsonschema +import numpy +from typing_extensions import Literal - Version number as calculated by https://github.com/pypa/setuptools_scm -""" +from .documents.datum import Datum +from .documents.datum_page import DatumPage +from .documents.event import Event, PartialEvent +from .documents.event_descriptor import ( + Configuration, + DataKey, + Dtype, + EventDescriptor, + Limits, + LimitsRange, + PerObjectHint, +) +from .documents.event_page import EventPage, PartialEventPage +from .documents.resource import PartialResource, Resource +from .documents.run_start import Calculation, Hints, Projection, Projections, RunStart +from .documents.run_stop import RunStop +from .documents.stream_datum import StreamDatum, StreamRange +from .documents.stream_resource import StreamResource + +if sys.version_info < (3, 9): + import importlib_resources +else: + import importlib.resources as importlib_resources from ._version import __version__ -__all__ = ["__version__"] +__all__ = [ + # Document types + "Datum", + "DatumPage", + "Event", + "PartialEvent", + "Configuration", + "DataKey", + "Dtype", + "EventDescriptor", + "Limits", + "LimitsRange", + "PerObjectHint", + "EventPage", + "PartialEventPage", + "PartialResource", + "Resource", + "Calculation", + "Hints", + "Projection", + "Projections", + "RunStart", + "RunStop", + "StreamDatum", + "StreamRange", + "StreamResource", + # Schema and version + "DocumentNames", + "schemas", + "schema_validators", + "compose_run", + "__version__", +] + + +class DocumentNames(Enum): + stop = "stop" + start = "start" + descriptor = "descriptor" + event = "event" + datum = "datum" + resource = "resource" + event_page = "event_page" + datum_page = "datum_page" + stream_resource = "stream_resource" + stream_datum = "stream_datum" + bulk_datum = "bulk_datum" # deprecated + bulk_events = "bulk_events" # deprecated + + +class DocumentRouter: + """ + Route each document by type to a corresponding method. + + When an instance is called with a document type and a document like:: + + router(name, doc) + + the document is passed to the method of the corresponding name, as in:: + + getattr(router, name)(doc) + + The method is expected to return ``None`` or a valid document of the same + type. It may be the original instance (passed through), a copy, or a + different dict altogether. + + Finally, the call to ``router(name, doc)`` returns:: + + (name, getattr(router, name)(doc)) + + Parameters + ---------- + emit: callable, optional + Expected signature ``f(name, doc)`` + """ + + def __init__(self, *, emit: Optional[Callable] = None) -> None: + # Put in some extra effort to validate `emit` carefully, because if + # this is used incorrectly the resultant errors can be confusing. + + self._emit_ref: Optional[Callable] = None + + if emit is not None: + if not callable(emit): + raise ValueError("emit must be a callable") + sig = inspect.signature(emit) + try: + # Does this function accept two positional arguments? + sig.bind(None, None) + except TypeError: + raise ValueError( + "emit must accept two positional arguments, name and doc" + ) + # Stash a weak reference to `emit`. + if inspect.ismethod(emit): + self._emit_ref = weakref.WeakMethod(emit) + else: + self._emit_ref = weakref.ref(emit) + + def emit(self, name: str, doc: dict) -> None: + """ + Emit to the callable provided an instantiation time, if any. + """ + if self._emit_ref is not None: + # Call the weakref. + emit = self._emit_ref() + if emit is not None: + emit(name, doc) + + def __call__( + self, name: str, doc: dict, validate: bool = False + ) -> Tuple[str, dict]: + """ + Process a document. + + Parameters + ---------- + name : string + doc : dict + validate : boolean + Apply jsonschema validation to the documents coming *out*. This is + False by default. + + Returns + ------- + name, output_doc : string, dict + The same name as what was passed in, and a doc that may be the same + instance as doc, a copy of doc, or a different dict altogether. + """ + return self._dispatch(name, doc, validate) + + def _dispatch(self, name: str, doc: dict, validate: bool) -> Tuple[str, dict]: + """ + Dispatch to the method corresponding to the `name`. + + Optionally validate that the result is still a valid document. + """ + output_doc = getattr(self, name)(doc) + + # If 'event' is not defined by the subclass but 'event_page' is, or + # vice versa, use that. And the same for 'datum_page' / 'datum. + if output_doc is NotImplemented: + if name == "event": + event_page = pack_event_page(cast(Event, doc)) + # Subclass' implementation of event_page may return a valid + # EventPage or None or NotImplemented. + output_event_page = self.event_page(event_page) + output_event_page = ( + output_event_page if output_event_page is not None else event_page + ) + if output_event_page is not NotImplemented: + (output_doc,) = unpack_event_page(output_event_page) + elif name == "datum": + datum_page = pack_datum_page(cast(Datum, doc)) + # Subclass' implementation of datum_page may return a valid + # DatumPage or None or NotImplemented. + output_datum_page = self.datum_page(datum_page) + output_datum_page = ( + output_datum_page if output_datum_page is not None else datum_page + ) + if output_datum_page is not NotImplemented: + (output_doc,) = unpack_datum_page(output_datum_page) + elif name == "event_page": + output_events = [] + for event in unpack_event_page(cast(EventPage, doc)): + # Subclass' implementation of event may return a valid + # Event or None or NotImplemented. + output_event = self.event(event) + output_event = output_event if output_event is not None else event + if output_event is NotImplemented: + break + output_events.append(output_event) + else: + output_doc = pack_event_page(*output_events) + elif name == "datum_page": + output_datums = [] + for datum in unpack_datum_page(cast(DatumPage, doc)): + # Subclass' implementation of datum may return a valid + # Datum or None or NotImplemented. + output_datum = self.datum(datum) + output_datum = output_datum if output_datum is not None else datum + if output_datum is NotImplemented: + break + output_datums.append(output_datum) + else: + output_doc = pack_datum_page(*output_datums) + # If we still don't find an implemented method by here, then pass the + # original document through. + if output_doc is NotImplemented: + output_doc = doc + if validate: + schema_validators[getattr(DocumentNames, name)].validate(output_doc) + return (name, output_doc if output_doc is not None else doc) + + # The methods below return NotImplemented, a built-in Python constant. + # Note that it is not interchangeable with NotImplementedError. See docs at + # https://docs.python.org/3/library/constants.html#NotImplemented + # It is used here so that _dispatch, defined above, can detect whether a + # subclass implements event, event_page, both, or neither. This is similar + # to how Python uses NotImplemented in arithmetic operations, as described + # in the documentation. + + def start(self, doc: RunStart) -> Optional[RunStart]: + return NotImplemented + + def stop(self, doc: RunStop) -> Optional[RunStop]: + return NotImplemented + + def descriptor(self, doc: EventDescriptor) -> Optional[EventDescriptor]: + return NotImplemented + + def resource(self, doc: Resource) -> Optional[Resource]: + return NotImplemented + + def event(self, doc: Event) -> Event: + return NotImplemented + + def datum(self, doc: Datum) -> Datum: + return NotImplemented + + def event_page(self, doc: EventPage) -> EventPage: + return NotImplemented + + def datum_page(self, doc: DatumPage) -> Optional[DatumPage]: + return NotImplemented + + def stream_datum(self, doc: StreamDatum) -> Optional[StreamDatum]: + return NotImplemented + + def stream_resource(self, doc: StreamResource) -> Optional[StreamResource]: + return NotImplemented + + def bulk_events(self, doc: dict) -> None: + # Do not modify this in a subclass. Use event_page. + warnings.warn( + "The document type 'bulk_events' has been deprecated in favor of " + "'event_page', whose structure is a transpose of 'bulk_events'." + ) + for page in bulk_events_to_event_pages(doc): + self.event_page(page) + + def bulk_datum(self, doc: dict) -> None: + # Do not modify this in a subclass. Use event_page. + warnings.warn( + "The document type 'bulk_datum' has been deprecated in favor of " + "'datum_page', whose structure is a transpose of 'bulk_datum'." + ) + self.datum_page(bulk_datum_to_datum_page(doc)) + + +class SingleRunDocumentRouter(DocumentRouter): + """ + A DocumentRouter intended to process events from exactly one run. + """ + + def __init__(self) -> None: + super().__init__() + self._start_doc: Optional[dict] = None + self._descriptors: dict = dict() + + def __call__( + self, name: str, doc: dict, validate: bool = False + ) -> Tuple[str, dict]: + """ + Process a document. + + Also, track of the start document and descriptor documents + passed to this SingleRunDocumentRouter in caches. + + Parameters + ---------- + name : string + doc : dict + validate : boolean + Apply jsonschema validation to the documents coming *out*. This is + False by default. + + Returns + ------- + name, output_doc : string, dict + The same name as what was passed in, and a doc that may be the same + instance as doc, a copy of doc, or a different dict altogether. + """ + if name == "start": + if self._start_doc is None: + self._start_doc = doc + else: + raise EventModelValueError( + "SingleRunDocumentRouter associated with start document " + f'{self._start_doc["uid"]} ' + f'received a second start document with uid {doc["uid"]}' + ) + elif name == "descriptor": + assert isinstance(self._start_doc, dict) + if doc["run_start"] == self._start_doc["uid"]: + self._descriptors[doc["uid"]] = doc + else: + raise EventModelValueError( + "SingleRunDocumentRouter associated with start document " + f'{self._start_doc["uid"]} ' + f'received a descriptor {doc["uid"]} associated with ' + f'start document {doc["run_start"]}' + ) + # Defer to superclass for dispatch/processing. + return super().__call__(name, doc, validate=validate) + + def get_start(self) -> dict: + """Convenience method returning the start document for the associated run. + + If no start document has been processed EventModelError will be raised. + + Returns + ------- + start document : dict + """ + if self._start_doc is None: + raise EventModelError( + "SingleRunDocumentRouter has not processed a start document yet" + ) + + return self._start_doc + + def get_descriptor(self, doc: dict) -> EventDescriptor: + """Convenience method returning the descriptor associated with the + specified document. + + Parameters + ---------- + doc : dict + event-model document + + Returns + ------- + descriptor document : EventDescriptor + """ + if "descriptor" not in doc: + raise EventModelValueError( + f"document is not associated with a descriptor:\n{doc}" + ) + elif doc["descriptor"] not in self._descriptors: + raise EventModelValueError( + "SingleRunDocumentRouter has not processed a descriptor with " + f'uid {doc["descriptor"]}' + ) + + return self._descriptors[doc["descriptor"]] + + def get_stream_name(self, doc: dict) -> str: + """Convenience method returning the name of the stream for the + specified document. + + Parameters + ---------- + doc : dict + event-model document + + Returns + ------- + stream name : str + """ + return str(self.get_descriptor(doc).get("name")) + + +class HandlerRegistryView(collections.abc.Mapping): + def __init__(self, handler_registry: dict) -> None: + self._handler_registry = handler_registry + + def __repr__(self) -> str: + return f"HandlerRegistryView({self._handler_registry!r})" + + def __getitem__(self, key: str) -> str: + return self._handler_registry[key] + + def __iter__(self) -> Generator: + yield from self._handler_registry + + def __len__(self) -> int: + return len(self._handler_registry) + + def __setitem__(self, key: str, val: Any) -> None: + raise EventModelTypeError( + "The handler registry cannot be edited directly. " + "Instead, use the method Filler.register_handler." + ) + + def __delitem__(self, key: str) -> None: + raise EventModelTypeError( + "The handler registry cannot be edited directly. " + "Instead, use the method Filler.deregister_handler." + ) + + +# A "coercion funcion" is a hook that Filler can use to, for example, ensure +# all the external data read in my handlers is an *actual* numpy array as +# opposed to some other array-like such as h5py.Dataset or dask.array.Array, +# or wrap every result is dask.array.from_array(...). +# +# It has access to the handler_class as it is registered and to some state +# provided by the Filler (more on that below). It is expected to return +# something that is API-compatible with handler_class. That might be +# handler_class itself (a no-op), a subclass, or an altogether different class +# with the same API. See example below. +# +# The "state provided by the Filler", mentioned above is passed into the +# coercion functions below as ``filler_state``. It is a namespace containing +# information that may be useful for the coercion functions. Currently, it has +# ``filler_state.descriptor`` and ``filler_state.key``. More may be added in +# the future if the need arises. Ultimately, this is necessary because Resource +# documents don't know the shape and dtype of the data that they reference. +# That situation could be improved in the future; to some degree this is a +# work-around. +# +# As an implementation detail, the ``filler_state`` is a ``threading.local`` +# object to ensure that filling is thread-safe. +# +# Third-party libraries can register custom coercion options via the +# register_coercion function below. For example, databroker uses this to +# register a 'delayed' option. This avoids introducing dependency on a specific +# delayed-computation framework (e.g. dask) in event-model itself. + + +def as_is(handler_class, filler_state) -> Type: + "A no-op coercion function that returns handler_class unchanged." + return handler_class + + +@no_type_check +def force_numpy(handler_class: Type, filler_state) -> Any: + "A coercion that makes handler_class.__call__ return actual numpy.ndarray." + + class Subclass(handler_class): + def __call__(self, *args, **kwargs): + raw_result = super().__call__(*args, **kwargs) + result_as_array = numpy.asarray(raw_result) + return result_as_array + + Subclass.__name__ = f"Subclassed{handler_class.__name__}" + Subclass.__qualname__ = f"Subclassed{handler_class.__qualname__}" + return Subclass + + +# maps coerce option to corresponding coercion function +_coercion_registry = {"as_is": as_is, "force_numpy": force_numpy} + + +def register_coercion(name: str, func: Callable, overwrite: bool = False) -> None: + """ + Register a new option for :class:`Filler`'s ``coerce`` argument. + + This is an advanced feature. See source code for comments and examples. + + Parameters + ---------- + name : string + The new value for ``coerce`` that will invoke this function. + func : callable + Expected signature:: + + func(filler, handler_class) -> handler_class + overwrite : boolean, optional + False by default. Name collissions will raise ``EventModelValueError`` + unless this is set to ``True``. + """ + + if name in _coercion_registry and not overwrite: + # If we are re-registering the same object, there is no problem. + original = _coercion_registry[name] + if original is func: + return + raise EventModelValueError( + f"The coercion function {func} could not be registered for the " + f"name {name} because {_coercion_registry[name]} is already " + f"registered. Use overwrite=True to force it." + ) + _coercion_registry[name] = func + + +register_coersion = register_coercion # back-compat for a spelling mistake + + +class Filler(DocumentRouter): + """Pass documents through, loading any externally-referenced data. + + It is recommended to use the Filler as a context manager. Because the + Filler manages caches of potentially expensive resources (e.g. large data + in memory) managing its lifecycle is important. If used as a context + manager, it will drop references to its caches upon exit from the + context. Unless the user holds additional references to those caches, they + will be garbage collected. + + But for some applications, such as taking multiple passes over the same + data, it may be useful to keep a longer-lived Filler instance and then + manually delete it when finished. + + See Examples below. + + Parameters + ---------- + handler_registry : dict + Maps each 'spec' (a string identifying a given type or external + resource) to a handler class. + + A 'handler class' may be any callable with the signature:: + + handler_class(full_path, **resource_kwargs) + + It is expected to return an object, a 'handler instance', which is also + callable and has the following signature:: + + handler_instance(**datum_kwargs) + + As the names 'handler class' and 'handler instance' suggest, this is + typically implemented using a class that implements ``__init__`` and + ``__call__``, with the respective signatures. But in general it may be + any callable-that-returns-a-callable. + include : Iterable + The set of fields to fill. By default all unfilled fields are filled. + This parameter is mutually incompatible with the ``exclude`` parameter. + exclude : Iterable + The set of fields to skip filling. By default all unfilled fields are + filled. This parameter is mutually incompatible with the ``include`` + parameter. + root_map: dict + str -> str mapping to account for temporarily moved/copied/remounted + files. Any resources which have a ``root`` in ``root_map`` will be + loaded using the mapped ``root``. + coerce : {'as_is', 'numpy'} + Default is 'as_is'. Other options (e.g. 'delayed') may be registered by + external packages at runtime. + handler_cache : dict, optional + A cache of handler instances. If None, a dict is used. + resource_cache : dict, optional + A cache of Resource documents. If None, a dict is used. + datum_cache : dict, optional + A cache of Datum documents. If None, a dict is used. + descriptor_cache : dict, optional + A cache of EventDescriptor documents. If None, a dict is used. + stream_resource_cache : dict, optional + A cache of StreamResource documents. If None, a dict is used. + stream_datum_cache : dict, optional + A cache of StreamDatum documents. If None, a dict is used. + retry_intervals : Iterable, optional + If data is not found on the first try, there may a race between the + I/O systems creating the external data and this stream of Documents + that reference it. If Filler encounters an ``IOError`` it will wait a + bit and retry. This list specifies how long to sleep (in seconds) + between subsequent attempts. Set to ``None`` to try only once before + raising ``DataNotAccessible``. A subclass may catch this exception and + implement a different retry mechanism --- for example using a different + implementation of sleep from an async framework. But by default, a + sequence of several retries with increasing sleep intervals is used. + The default sequence should not be considered stable; it may change at + any time as the authors tune it. + + Raises + ------ + DataNotAccessible + If an IOError is raised when loading the data after the configured + number of attempts. See the ``retry_intervals`` parameter for details. + + Examples + -------- + A Filler may be used as a context manager. + + >>> with Filler(handler_registry) as filler: + ... for name, doc in stream: + ... filler(name, doc) # mutates doc in place + ... # Do some analysis or export with name and doc. + + Or as a long-lived object. + + >>> f = Filler(handler_registry) + >>> for name, doc in stream: + ... filler(name, doc) # mutates doc in place + ... # Do some analysis or export with name and doc. + ... + >>> del filler # Free up memory from potentially large caches. + """ + + def __init__( + self, + handler_registry: dict, + *, + include: Optional[Iterable] = None, + exclude: Optional[Iterable] = None, + root_map: Optional[dict] = None, + coerce: str = "as_is", + handler_cache: Optional[dict] = None, + resource_cache: Optional[dict] = None, + datum_cache: Optional[dict] = None, + descriptor_cache: Optional[dict] = None, + stream_resource_cache: Optional[dict] = None, + stream_datum_cache: Optional[dict] = None, + inplace: Optional[bool] = None, + retry_intervals: List = [ + 0.001, + 0.002, + 0.004, + 0.008, + 0.016, + 0.032, + 0.064, + 0.128, + 0.256, + 0.512, + 1.024, + ], + ) -> None: + if inplace is None: + self._inplace = True + warnings.warn( + "'inplace' argument not specified. It is recommended to " + "specify True or False. In future releases, 'inplace' " + "will default to False." + ) + else: + self._inplace = inplace + + if include is not None and exclude is not None: + raise EventModelValueError( + "The parameters `include` and `exclude` are mutually " + "incompatible. At least one must be left as the default, " + "None." + ) + try: + self._coercion_func = _coercion_registry[coerce] + except KeyError: + raise EventModelKeyError( + f"The option coerce={coerce!r} was given to event_model.Filler. " + f"The valid options are {set(_coercion_registry)}." + ) + self._coerce = coerce + + # See comments on coerision functions above for the use of + # _current_state, which is passed to coercion functions' `filler_state` + # parameter. + self._current_state = threading.local() + self._unpatched_handler_registry: dict = {} + self._handler_registry: dict = {} + for spec, handler_class in handler_registry.items(): + self.register_handler(spec, handler_class) + self.handler_registry = HandlerRegistryView(self._handler_registry) + if include is not None: + warnings.warn( + "In a future release of event-model, the argument `include` " + "will be removed from Filler.", + DeprecationWarning, + ) + self.include = include + if exclude is not None: + warnings.warn( + "In a future release of event-model, the argument `exclude` " + "will be removed from Filler.", + DeprecationWarning, + ) + self.exclude = exclude + self.root_map = root_map or {} + if handler_cache is None: + handler_cache = self.get_default_handler_cache() + if resource_cache is None: + resource_cache = self.get_default_resource_cache() + if datum_cache is None: + datum_cache = self.get_default_datum_cache() + if descriptor_cache is None: + descriptor_cache = self.get_default_descriptor_cache() + if stream_resource_cache is None: + stream_resource_cache = self.get_default_stream_resource_cache() + if stream_datum_cache is None: + stream_datum_cache = self.get_default_stream_datum_cache() + self._handler_cache = handler_cache + self._resource_cache = resource_cache + self._datum_cache = datum_cache + self._descriptor_cache = descriptor_cache + self._stream_resource_cache = stream_resource_cache + self._stream_datum_cache = stream_datum_cache + if retry_intervals is None: + retry_intervals = [] + self.retry_intervals = retry_intervals + self._closed = False + + def __eq__(self, other: Any) -> bool: + return ( + type(self) is type(other) + and self.inplace == other.inplace + and self._coerce == other._coerce + and self.include == other.include + and self.exclude == other.exclude + and self.root_map == other.root_map + and type(self._handler_cache) is type(other._handler_cache) + and type(self._resource_cache) is type(other._resource_cache) + and type(self._datum_cache) is type(other._datum_cache) + and type(self._descriptor_cache) is type(other._descriptor_cache) + and type(self._stream_resource_cache) is type(other._stream_resource_cache) + and type(self._stream_datum_cache) is type(other._stream_datum_cache) + and self.retry_intervals == other.retry_intervals + ) + + def __getstate__(self) -> dict: + return dict( + inplace=self._inplace, + coercion_func=self._coerce, + handler_registry=self._unpatched_handler_registry, + include=self.include, + exclude=self.exclude, + root_map=self.root_map, + handler_cache=self._handler_cache, + resource_cache=self._resource_cache, + datum_cache=self._datum_cache, + descriptor_cache=self._descriptor_cache, + stream_resource_cache=self._stream_resource_cache, + stream_datum_cache=self._stream_datum_cache, + retry_intervals=self.retry_intervals, + ) + + def __setstate__(self, d: dict) -> None: + self._inplace = d["inplace"] + self._coerce = d["coercion_func"] + + # See comments on coerision functions above for the use of + # _current_state, which is passed to coercion functions' `filler_state` + # parameter. + self._current_state = threading.local() + self._unpatched_handler_registry = {} + self._handler_registry = {} + for spec, handler_class in d["handler_registry"].items(): + self.register_handler(spec, handler_class) + self.handler_registry = HandlerRegistryView(self._handler_registry) + self.include = d["include"] + self.exclude = d["exclude"] + self.root_map = d["root_map"] + self._handler_cache = d["handler_cache"] + self._resource_cache = d["resource_cache"] + self._datum_cache = d["datum_cache"] + self._descriptor_cache = d["descriptor_cache"] + self._stream_resource_cache = d["stream_resource_cache"] + self._stream_datum_cache = d["stream_datum_cache"] + retry_intervals = d["retry_intervals"] + if retry_intervals is None: + retry_intervals = [] + self._retry_intervals = retry_intervals + self._closed = False + + @property + def retry_intervals(self) -> List: + return self._retry_intervals + + @retry_intervals.setter + def retry_intervals(self, value: Any) -> None: + self._retry_intervals = list(value) + + def __repr__(self) -> str: + return "" if not self._closed else "" + + @staticmethod + def get_default_resource_cache() -> dict: + return {} + + @staticmethod + def get_default_descriptor_cache() -> dict: + return {} + + @staticmethod + def get_default_datum_cache() -> dict: + return {} + + @staticmethod + def get_default_handler_cache() -> dict: + return {} + + @staticmethod + def get_default_stream_datum_cache() -> dict: + return {} + + @staticmethod + def get_default_stream_resource_cache() -> dict: + return {} + + @property + def inplace(self) -> bool: + return self._inplace + + def clone( + self, + handler_registry: Optional[dict] = None, + *, + root_map: Optional[dict] = None, + coerce: Optional[str] = None, + handler_cache: Optional[dict] = None, + resource_cache: Optional[dict] = None, + datum_cache: Optional[dict] = None, + descriptor_cache: Optional[dict] = None, + stream_resource_cache: Optional[dict] = None, + stream_datum_cache: Optional[dict] = None, + inplace: Optional[bool] = None, + retry_intervals: Optional[List] = None, + ) -> "Filler": + """ + Create a new Filler instance from this one. + + By default it will be created with the same settings that this Filler + has. Individual settings may be overridden here. + + The clone does *not* share any caches or internal state with the + original. + """ + if handler_registry is None: + handler_registry = self._unpatched_handler_registry + if root_map is None: + root_map = self.root_map + if coerce is None: + coerce = self._coerce + if inplace is None: + inplace = self.inplace + if retry_intervals is None: + retry_intervals = self.retry_intervals + return Filler( + handler_registry, + root_map=root_map, + coerce=coerce, + handler_cache=handler_cache, + resource_cache=resource_cache, + datum_cache=datum_cache, + descriptor_cache=descriptor_cache, + stream_resource_cache=stream_resource_cache, + stream_datum_cache=stream_datum_cache, + inplace=inplace, + retry_intervals=retry_intervals, + ) + + def register_handler( + self, spec: str, handler: Any, overwrite: bool = False + ) -> None: + """ + Register a handler. + + Parameters + ---------- + spec: str + handler: Handler + overwrite: boolean, optional + False by default + + Raises + ------ + DuplicateHandler + If a handler is already registered for spec and overwrite is False + + See https://blueskyproject.io/event-model/external.html + """ + if (not overwrite) and (spec in self._handler_registry): + original = self._unpatched_handler_registry[spec] + if original is handler: + return + raise DuplicateHandler( + f"There is already a handler registered for the spec {spec!r}. " + f"Use overwrite=True to deregister the original.\n" + f"Original: {original}\n" + f"New: {handler}" + ) + + self.deregister_handler(spec) + # Keep a raw copy, unused above for identifying redundant registration. + self._unpatched_handler_registry[spec] = handler + # Let the 'coerce' argument to Filler.__init__ modify the handler if it + # wants to. + self._handler_registry[spec] = self._coercion_func(handler, self._current_state) + + def deregister_handler(self, spec: str) -> Any: + """ + Deregister a handler. + + If no handler is registered for this spec, it is no-op and returns + None. + + Parameters + ---------- + spec: str + + Returns + ------- + handler: Handler or None + + See https://blueskyproject.io/event-model/external.html + """ + handler = self._handler_registry.pop(spec, None) + if handler is not None: + self._unpatched_handler_registry.pop(spec) + for key in list(self._handler_cache): + resource_uid, spec_ = key + if spec == spec_: + del self._handler_cache[key] + return handler + + def resource(self, doc: Resource) -> Resource: + # Defer creating the handler instance until we actually need it, when + # we fill the first Event field that requires this Resource. + self._resource_cache[doc["uid"]] = doc + return doc + + # Handlers operate document-wise, so we'll explode pages into individual + # documents. + + def datum_page(self, doc: DatumPage) -> DatumPage: + datum = self.datum # Avoid attribute lookup in hot loop. + for datum_doc in unpack_datum_page(doc): + datum(datum_doc) + return doc + + def datum(self, doc: Datum) -> Datum: + self._datum_cache[doc["datum_id"]] = doc + return doc + + def stream_resource(self, doc: StreamResource) -> StreamResource: + self._stream_resource_cache[doc["uid"]] = doc + return doc + + def stream_datum(self, doc: StreamDatum) -> StreamDatum: + self._stream_datum_cache[doc["uid"]] = doc + return doc + + def event_page(self, doc: EventPage) -> EventPage: + # TODO We may be able to fill a page in place, and that may be more + # efficient than unpacking the page in to Events, filling them, and the + # re-packing a new page. But that seems tricky in general since the + # page may be implemented as a DataFrame or dict, etc. + filled_doc = self.fill_event_page( + doc, include=self.include, exclude=self.exclude + ) + return filled_doc + + def event(self, doc: Event) -> Event: + filled_doc = self.fill_event(doc, include=self.include, exclude=self.exclude) + return filled_doc + + def fill_event_page( + self, + doc: EventPage, + include: Optional[Iterable] = None, + exclude: Optional[Iterable] = None, + inplace: Optional[bool] = None, + ) -> EventPage: + filled_events = [] + for event_doc in unpack_event_page(doc): + filled_events.append( + self.fill_event( + event_doc, include=include, exclude=exclude, inplace=True + ) + ) + filled_doc = pack_event_page(*filled_events) + if inplace is None: + inplace = self._inplace + if inplace: + doc["data"] = filled_doc["data"] + doc["filled"] = filled_doc["filled"] + return doc + else: + return filled_doc + + def get_handler(self, resource: Resource) -> Any: + """ + Return a new Handler instance for this Resource. + + Parameters + ---------- + resource: Resource + + Returns + ------- + handler: Handler + """ + if self._closed: + raise EventModelRuntimeError( + "This Filler has been closed and is no longer usable." + ) + try: + handler_class = self.handler_registry[resource["spec"]] + except KeyError as err: + raise UndefinedAssetSpecification( + f"Resource document with uid {resource['uid']} " + f"refers to spec {resource['spec']!r} which is " + f"not defined in the Filler's " + f"handler registry." + ) from err + # Apply root_map. + resource_path = resource["resource_path"] + original_root = resource.get("root", "") + root = self.root_map.get(original_root, original_root) + if root: + resource_path = os.path.join(root, resource_path) + msg = ( + f"Error instantiating handler " + f"class {handler_class} " + f"with Resource document {resource}. " + ) + if root != original_root: + msg += ( + f"Its 'root' field was " + f"mapped from {original_root} to {root} by root_map." + ) + else: + msg += ( + f"Its 'root' field {original_root} was " f"*not* modified by root_map." + ) + error_to_raise = EventModelError(msg) + handler = _attempt_with_retries( + func=handler_class, + args=(resource_path,), + kwargs=resource["resource_kwargs"], + intervals=[0] + self.retry_intervals, + error_to_catch=IOError, + error_to_raise=error_to_raise, + ) + return handler + + def _get_handler_maybe_cached(self, resource: Resource) -> Any: + "Get a cached handler for this resource or make one and cache it." + key = (resource["uid"], resource["spec"]) + try: + handler = self._handler_cache[key] + except KeyError: + handler = self.get_handler(resource) + self._handler_cache[key] = handler + return handler + + def fill_event( + self, + doc, + include: Optional[Iterable] = None, + exclude: Optional[Iterable] = None, + inplace: Optional[bool] = None, + ) -> Any: + if inplace is None: + inplace = self._inplace + if inplace: + filled_doc = doc + else: + filled_doc = copy.deepcopy(doc) + descriptor = self._descriptor_cache[doc["descriptor"]] + from_datakeys = False + self._current_state.descriptor = descriptor + try: + needs_filling = {key for key, val in doc["filled"].items() if val is False} + except KeyError: + # This document is not telling us which, if any, keys are filled. + # Infer that none of the external data is filled. + needs_filling = { + key for key, val in descriptor["data_keys"].items() if "external" in val + } + from_datakeys = True + for key in needs_filling: + self._current_state.key = key + if exclude is not None and key in exclude: + continue + if include is not None and key not in include: + continue + try: + datum_id = doc["data"][key] + except KeyError as err: + if from_datakeys: + raise MismatchedDataKeys( + "The documents are not valid. Either because they " + "were recorded incorrectly in the first place, " + "corrupted since, or exercising a yet-undiscovered " + "bug in a reader. event['data'].keys() " + "must equal descriptor['data_keys'].keys(). " + f"event['data'].keys(): {doc['data'].keys()}, " + "descriptor['data_keys'].keys(): " + f"{descriptor['data_keys'].keys()}" + ) from err + else: + raise MismatchedDataKeys( + "The documents are not valid. Either because they " + "were recorded incorrectly in the first place, " + "corrupted since, or exercising a yet-undiscovered " + "bug in a reader. event['filled'].keys() " + "must be a subset of event['data'].keys(). " + f"event['data'].keys(): {doc['data'].keys()}, " + "event['filled'].keys(): " + f"{doc['filled'].keys()}" + ) from err + # Look up the cached Datum doc. + try: + datum_doc = self._datum_cache[datum_id] + except KeyError as err: + raise UnresolvableForeignKeyError( + datum_id, + f"Event with uid {doc['uid']} refers to unknown Datum " + f"datum_id {datum_id}", + ) from err + resource_uid = datum_doc["resource"] + # Look up the cached Resource. + try: + resource = self._resource_cache[resource_uid] + except KeyError as err: + raise UnresolvableForeignKeyError( + resource_uid, + f"Datum with id {datum_id} refers to unknown Resource " + f"uid {resource_uid}", + ) from err + self._current_state.resource = resource + self._current_state.datum = datum_doc + handler = self._get_handler_maybe_cached(resource) + error_to_raise = DataNotAccessible( + f"Filler was unable to load the data referenced by " + f"the Datum document {datum_doc} and the Resource " + f"document {resource}." + ) + payload = _attempt_with_retries( + func=handler, + args=(), + kwargs=datum_doc["datum_kwargs"], + intervals=[0] + self.retry_intervals, + error_to_catch=IOError, + error_to_raise=error_to_raise, + ) + # Here we are intentionally modifying doc in place. + filled_doc["data"][key] = payload + filled_doc["filled"][key] = datum_id + self._current_state.key = None + self._current_state.descriptor = None + self._current_state.resource = None + self._current_state.datum = None + return filled_doc + + def descriptor(self, doc: EventDescriptor) -> EventDescriptor: + self._descriptor_cache[doc["uid"]] = doc + return doc + + def __enter__(self): + return self + + @no_type_check + def close(self) -> None: + """ + Drop cached documents and handlers. + + They are *not* explicitly cleared, so if there are other references to + these caches they will remain. + """ + # Drop references to the caches. If the user holds another reference to + # them it's the user's problem to manage their lifecycle. If the user + # does not (e.g. they are the default caches) the gc will look after + # them. + self._closed = True + self._handler_cache = None + self._resource_cache = None + self._datum_cache = None + self._descriptor_cache = None + + @property + def closed(self) -> bool: + return self._closed + + def clear_handler_cache(self) -> None: + """ + Clear any cached handler instances. + + This operation may free significant memory, depending on the + implementation of the handlers. + """ + self._handler_cache.clear() + + def clear_document_caches(self) -> None: + """ + Clear any cached documents. + """ + self._resource_cache.clear() + self._descriptor_cache.clear() + self._datum_cache.clear() + + def __exit__(self, *exc_details) -> None: + self.close() + + def __call__( + self, name: str, doc: dict, validate: bool = False + ) -> Tuple[str, dict]: + if self._closed: + raise EventModelRuntimeError( + "This Filler has been closed and is no longer usable." + ) + return super().__call__(name, doc, validate) + + +class EventModelError(Exception): ... + + +def _attempt_with_retries( + func, + args, + kwargs, + intervals: Iterable, + error_to_catch: Type[OSError], + error_to_raise: EventModelError, +) -> Any: + """ + Return func(*args, **kwargs), using a retry loop. + + func, args, kwargs: self-explanatory + intervals: list + How long to wait (seconds) between each attempt including the first. + error_to_catch: Exception class + If this is raised, retry. + error_to_raise: Exception instance or class + If we run out of retries, raise this from the proximate error. + """ + error = None + for interval in intervals: + ttime.sleep(interval) + try: + return func(*args, **kwargs) + except error_to_catch as error_: + # The file may not be visible on the filesystem yet. + # Wait and try again. Stash the error in a variable + # that we can access later if we run out of attempts. + error = error_ + else: + break + else: + # We have used up all our attempts. There seems to be an + # actual problem. Raise specified error from the error stashed above. + raise error_to_raise from error + + +class NoFiller(Filler): + """ + This does not fill the documents; it merely validates them. + + It checks that all the references between the documents are resolvable and + *could* be filled. This is useful when the filling will be done later, as + a delayed computation, but we want to make sure in advance that we have all + the information that we will need when that computation occurs. + """ + + def __init__(self, *args, **kwargs) -> None: + # Do not make Filler make copies because we are not going to alter the + # documents anyway. + kwargs.setdefault("inplace", True) + super().__init__(*args, **kwargs) + + def fill_event_page( + self, + doc: EventPage, + include: Optional[Iterable] = None, + exclude: Optional[Iterable] = None, + *kwargs, + ) -> EventPage: + filled_events = [] + for event_doc in unpack_event_page(doc): + filled_events.append( + self.fill_event( + event_doc, include=include, exclude=exclude, inplace=True + ) + ) + filled_doc = pack_event_page(*filled_events) + return filled_doc + + def fill_event( + self, + doc: Event, + include: Optional[Iterable] = None, + exclude: Optional[Iterable] = None, + inplace: Optional[bool] = None, + ) -> Event: + descriptor = self._descriptor_cache[doc["descriptor"]] + from_datakeys = False + try: + needs_filling = {key for key, val in doc["filled"].items() if val is False} + except KeyError: + # This document is not telling us which, if any, keys are filled. + # Infer that none of the external data is filled. + needs_filling = { + key for key, val in descriptor["data_keys"].items() if "external" in val + } + from_datakeys = True + for key in needs_filling: + if exclude is not None and key in exclude: + continue + if include is not None and key not in include: + continue + try: + datum_id = doc["data"][key] + except KeyError as err: + if from_datakeys: + raise MismatchedDataKeys( + "The documents are not valid. Either because they " + "were recorded incorrectly in the first place, " + "corrupted since, or exercising a yet-undiscovered " + "bug in a reader. event['data'].keys() " + "must equal descriptor['data_keys'].keys(). " + f"event['data'].keys(): {doc['data'].keys()}, " + "descriptor['data_keys'].keys(): " + f"{descriptor['data_keys'].keys()}" + ) from err + else: + raise MismatchedDataKeys( + "The documents are not valid. Either because they " + "were recorded incorrectly in the first place, " + "corrupted since, or exercising a yet-undiscovered " + "bug in a reader. event['filled'].keys() " + "must be a subset of event['data'].keys(). " + f"event['data'].keys(): {doc['data'].keys()}, " + "event['filled'].keys(): " + f"{doc['filled'].keys()}" + ) from err + # Look up the cached Datum doc. + try: + datum_doc = self._datum_cache[datum_id] + except KeyError as err: + err_with_key = UnresolvableForeignKeyError( + datum_id, + f"Event with uid {doc['uid']} refers to unknown Datum " + f"datum_id {datum_id}", + ) + err_with_key.key = datum_id + raise err_with_key from err + resource_uid = datum_doc["resource"] + # Look up the cached Resource. + try: + self._resource_cache[resource_uid] + except KeyError as err: + raise UnresolvableForeignKeyError( + datum_id, + f"Datum with id {datum_id} refers to unknown Resource " + f"uid {resource_uid}", + ) from err + return doc + + +DOCS_PASSED_IN_1_14_0_WARNING = ( + "The callback {callback!r} raised {err!r} when " + "RunRouter passed it a {name!r} document. This is " + "probably because in earlier releases the RunRouter " + "expected its factory functions to forward the 'start' " + "document, but starting in event-model 1.14.0 the " + "RunRouter passes in the document, causing the " + "callback to receive it twice and potentially raise " + "an error. Update the factory function. In a future " + "release this warning will become an error." +) + + +class RunRouter(DocumentRouter): + """ + Routes documents, by run, to callbacks it creates from factory functions. + + A RunRouter is callable, and it has the signature ``router(name, doc)``, + suitable for subscribing to the RunEngine. + + It is configured with a list of factory functions that produce callbacks in + a two-layered scheme, described below. + + .. warning:: + + This is experimental. In a future release, it may be changed in a + backward-incompatible way or fully removed. + + Parameters + ---------- + factories : list + A list of callables with the signature:: + + factory('start', start_doc) -> List[Callbacks], List[SubFactories] + + which should return two lists, which may be empty. All items in the + first list should be callbacks --- callables with the signature:: + + callback(name, doc) + + that will receive that RunStart document and all subsequent documents + from the run including the RunStop document. All items in the second + list should be "subfactories" with the signature:: + + subfactory('descriptor', descriptor_doc) -> List[Callbacks] + + These will receive each of the EventDescriptor documents for the run, + as they arrive. They must return one list, which may be empty, + containing callbacks that will receive the RunStart document, that + EventDescriptor, all Events that reference that EventDescriptor and + finally the RunStop document for the run. + handler_registry : dict, optional + This is passed to the Filler or whatever class is given in the + filler_class parametr below. + + Maps each 'spec' (a string identifying a given type or external + resource) to a handler class. + + A 'handler class' may be any callable with the signature:: + + handler_class(full_path, **resource_kwargs) + + It is expected to return an object, a 'handler instance', which is also + callable and has the following signature:: + + handler_instance(**datum_kwargs) + + As the names 'handler class' and 'handler instance' suggest, this is + typically implemented using a class that implements ``__init__`` and + ``__call__``, with the respective signatures. But in general it may be + any callable-that-returns-a-callable. + root_map: dict, optional + This is passed to Filler or whatever class is given in the filler_class + parameter below. + + str -> str mapping to account for temporarily moved/copied/remounted + files. Any resources which have a ``root`` in ``root_map`` will be + loaded using the mapped ``root``. + filler_class: type + This is Filler by default. It can be a Filler subclass, + ``functools.partial(Filler, ...)``, or any class that provides the same + methods as ``DocumentRouter``. + fill_or_fail: boolean, optional + By default (False), if a document with a spec not in + ``handler_registry`` is encountered, let it pass through unfilled. But + if set to True, fill everything and raise + ``UndefinedAssetSpecification`` if some unknown spec is encountered. + """ + + def __init__( + self, + factories, + handler_registry: Optional[dict] = None, + *, + root_map: Optional[dict] = None, + filler_class: Type[Filler] = Filler, + fill_or_fail: bool = False, + ) -> None: + self.factories = factories + self.handler_registry = handler_registry or {} + self.filler_class = filler_class + self.fill_or_fail = fill_or_fail + self.root_map = root_map + + # Map RunStart UID to "subfactory" functions that want all + # EventDescriptors from that run. + self._subfactories: defaultdict = defaultdict(list) + + # Callbacks that want all the documents from a given run, keyed on + # RunStart UID. + self._factory_cbs_by_start: defaultdict = defaultdict(list) + + # Callbacks that want all the documents from a given run, keyed on + # each EventDescriptor UID in the run. + self._factory_cbs_by_descriptor: defaultdict = defaultdict(list) + + # Callbacks that want documents related to a given EventDescriptor, + # keyed on EventDescriptor UID. + self._subfactory_cbs_by_descriptor: defaultdict = defaultdict(list) + + # Callbacks that want documents related to a given EventDescriptor, + # keyed on the RunStart UID referenced by that EventDescriptor. + self._subfactory_cbs_by_start: defaultdict = defaultdict(list) + + # Map RunStart UID to RunStart document. This is used to send + # RunStart documents to subfactory callbacks. + self._start_to_start_doc: dict = dict() + + # Map RunStart UID to the list EventDescriptor. This is used to + # facilitate efficient cleanup of the caches above. + self._start_to_descriptors: defaultdict = defaultdict(list) + + # Map EventDescriptor UID to RunStart UID. This is used for looking up + # Fillers. + self._descriptor_to_start: dict = {} + + # Map Resource UID to RunStart UID. + self._resources: dict = {} + self._stream_resources: dict = {} + + # Old-style Resources that do not have a RunStart UID + self._unlabeled_resources: deque = deque(maxlen=10000) + + # Map Runstart UID to instances of self.filler_class. + self._fillers: dict = {} + + def __repr__(self): + return ( + "RunRouter([\n" + + "\n".join(f" {factory}" for factory in self.factories) + + "])" + ) + + def start(self, start_doc: RunStart) -> None: + uid = start_doc["uid"] + # If we get the same uid twice, weird things will happen, so check for + # that and give a nice error message. + if uid in self._start_to_start_doc: + if self._start_to_start_doc[uid] == start_doc: + raise ValueError( + "RunRouter received the same 'start' document twice:\n" + "{start_doc!r}" + ) + else: + raise ValueError( + "RunRouter received two 'start' documents with different " + "contents but the same uid:\n" + "First: {self._start_to_start_doc[uid]!r}\n" + "Second: {start_doc!r}" + ) + self._start_to_start_doc[uid] = start_doc + filler = self.filler_class( + self.handler_registry, root_map=self.root_map, inplace=False + ) + self._fillers[uid] = filler + # No need to pass the document to filler + # because Fillers do nothing with 'start'. + for factory in self.factories: + callbacks, subfactories = factory("start", start_doc) + for callback in callbacks: + try: + callback("start", start_doc) + except Exception as err: + warnings.warn( + DOCS_PASSED_IN_1_14_0_WARNING.format( + callback=callback, name="start", err=err + ) + ) + raise err + self._factory_cbs_by_start[uid].extend(callbacks) + self._subfactories[uid].extend(subfactories) + + def descriptor(self, descriptor_doc: EventDescriptor) -> None: + descriptor_uid = descriptor_doc["uid"] + start_uid = descriptor_doc["run_start"] + + # Keep track of the RunStart UID -> [EventDescriptor UIDs] mapping for + # purposes of cleanup in stop(). + self._start_to_descriptors[start_uid].append(descriptor_uid) + # Keep track of the EventDescriptor UID -> RunStartUID for filling + # purposes. + self._descriptor_to_start[descriptor_uid] = start_uid + + self._fillers[start_uid].descriptor(descriptor_doc) + # Apply all factory cbs for this run to this descriptor, and run them. + factory_cbs = self._factory_cbs_by_start[start_uid] + self._factory_cbs_by_descriptor[descriptor_uid].extend(factory_cbs) + for callback in factory_cbs: + callback("descriptor", descriptor_doc) + # Let all the subfactories add any relevant callbacks. + for subfactory in self._subfactories[start_uid]: + callbacks = subfactory("descriptor", descriptor_doc) + self._subfactory_cbs_by_start[start_uid].extend(callbacks) + self._subfactory_cbs_by_descriptor[descriptor_uid].extend(callbacks) + for callback in callbacks: + try: + start_doc = self._start_to_start_doc[start_uid] + callback("start", start_doc) + except Exception as err: + warnings.warn( + DOCS_PASSED_IN_1_14_0_WARNING.format( + callback=callback, name="start", err=err + ) + ) + raise err + try: + callback("descriptor", descriptor_doc) + except Exception as err: + warnings.warn( + DOCS_PASSED_IN_1_14_0_WARNING.format( + callback=callback, name="descriptor", err=err + ) + ) + raise err + + def event_page(self, doc: EventPage): + descriptor_uid = doc["descriptor"] + start_uid = self._descriptor_to_start[descriptor_uid] + try: + doc = self._fillers[start_uid].event_page(doc) + except UndefinedAssetSpecification: + if self.fill_or_fail: + raise + for callback in self._factory_cbs_by_descriptor[descriptor_uid]: + callback("event_page", doc) + for callback in self._subfactory_cbs_by_descriptor[descriptor_uid]: + callback("event_page", doc) + + def datum_page(self, doc: DatumPage) -> None: + resource_uid = doc["resource"] + try: + start_uid = self._resources[resource_uid] + except KeyError: + if resource_uid not in self._unlabeled_resources: + raise UnresolvableForeignKeyError( + resource_uid, + f"DatumPage refers to unknown Resource uid {resource_uid}", + ) + # Old Resources do not have a reference to a RunStart document, + # so in turn we cannot immediately tell which run these datum + # documents belong to. + # Fan them out to every run currently flowing through RunRouter. If + # they are not applicable they will do no harm, and this is + # expected to be an increasingly rare case. + for callbacks in self._factory_cbs_by_start.values(): + for callback in callbacks: + callback("datum_page", doc) + for callbacks in self._subfactory_cbs_by_start.values(): + for callback in callbacks: + callback("datum_page", doc) + for filler in self._fillers.values(): + filler.datum_page(doc) + else: + self._fillers[start_uid].datum_page(doc) + for callback in self._factory_cbs_by_start[start_uid]: + callback("datum_page", doc) + for callback in self._subfactory_cbs_by_start[start_uid]: + callback("datum_page", doc) + + def stream_datum(self, doc: StreamDatum) -> None: + resource_uid = doc["stream_resource"] + start_uid = self._stream_resources[resource_uid] + self._fillers[start_uid].stream_datum(doc) + for callback in self._factory_cbs_by_start[start_uid]: + callback("stream_datum", doc) + for callback in self._subfactory_cbs_by_start[start_uid]: + callback("stream_datum", doc) + + def resource(self, doc: Resource) -> None: + try: + start_uid = doc["run_start"] + except KeyError: + # Old Resources do not have a reference to a RunStart document. + # Fan them out to every run currently flowing through RunRouter. If + # they are not applicable they will do no harm, and this is + # expected to be an increasingly rare case. + self._unlabeled_resources.append(doc["uid"]) + for callbacks in self._factory_cbs_by_start.values(): + for callback in callbacks: + callback("resource", doc) + for callbacks in self._subfactory_cbs_by_start.values(): + for callback in callbacks: + callback("resource", doc) + for filler in self._fillers.values(): + filler.resource(doc) + else: + self._fillers[start_uid].resource(doc) + self._resources[doc["uid"]] = doc["run_start"] + for callback in self._factory_cbs_by_start[start_uid]: + callback("resource", doc) + for callback in self._subfactory_cbs_by_start[start_uid]: + callback("resource", doc) + + def stream_resource(self, doc: StreamResource) -> None: + start_uid = doc["run_start"] # No need for Try + self._fillers[start_uid].stream_resource(doc) + self._stream_resources[doc["uid"]] = doc["run_start"] + for callback in self._factory_cbs_by_start[start_uid]: + callback("stream_resource", doc) + for callback in self._subfactory_cbs_by_start[start_uid]: + callback("stream_resource", doc) + + def stop(self, doc: RunStop) -> None: + start_uid = doc["run_start"] + for callback in self._factory_cbs_by_start[start_uid]: + callback("stop", doc) + for callback in self._subfactory_cbs_by_start[start_uid]: + callback("stop", doc) + # Clean up references. + self._fillers.pop(start_uid, None) + self._subfactories.pop(start_uid, None) + self._factory_cbs_by_start.pop(start_uid, None) + self._subfactory_cbs_by_start.pop(start_uid, None) + for descriptor_uid in self._start_to_descriptors.pop(start_uid, ()): + self._descriptor_to_start.pop(descriptor_uid, None) + self._factory_cbs_by_descriptor.pop(descriptor_uid, None) + self._subfactory_cbs_by_descriptor.pop(descriptor_uid, None) + self._resources.pop(start_uid, None) + self._start_to_start_doc.pop(start_uid, None) + + +# Here we define subclasses of all of the built-in Python exception types (as +# needed, not a comprehensive list) so that all errors raised *directly* by +# event_model also inhereit from EventModelError as well as the appropriate +# built-in type. This means, for example, that `EventModelValueError` can be +# caught by `except ValueError:` or by `except EventModelError:`. This can be +# useful for higher-level libraries and for debugging. + + +class EventModelKeyError(EventModelError, KeyError): ... + + +class EventModelValueError(EventModelError, ValueError): ... + + +class EventModelRuntimeError(EventModelError, RuntimeError): ... + + +class EventModelTypeError(EventModelError, TypeError): ... + + +class EventModelValidationError(EventModelError): ... + + +class UnfilledData(EventModelError): + """raised when unfilled data is found""" + + ... + + +class UndefinedAssetSpecification(EventModelKeyError): + """raised when a resource spec is missing from the handler registry""" + + ... + + +class DataNotAccessible(EventModelError, IOError): + """raised when attempts to load data referenced by Datum document fail""" + + ... + + +class UnresolvableForeignKeyError(EventModelValueError): + """when we see a foreign before we see the thing to which it refers""" + + def __init__(self, key: Any, message: str) -> None: + self.key = key + self.message = message + + +class DuplicateHandler(EventModelRuntimeError): + """raised when a handler is already registered for a given spec""" + + ... + + +class InvalidData(EventModelError): + """raised when the data is invalid""" + + ... + + +class MismatchedDataKeys(InvalidData): + """ + Raised when any data keys structures are out of sync. This includes, + event['data'].keys(), descriptor['data_keys'].keys(), + event['timestamp'].keys(), event['filled'].keys() + """ + + ... + + +SCHEMA_PATH = "schemas" +SCHEMA_NAMES = { + DocumentNames.start: "schemas/run_start.json", + DocumentNames.stop: "schemas/run_stop.json", + DocumentNames.event: "schemas/event.json", + DocumentNames.event_page: "schemas/event_page.json", + DocumentNames.descriptor: "schemas/event_descriptor.json", + DocumentNames.datum: "schemas/datum.json", + DocumentNames.datum_page: "schemas/datum_page.json", + DocumentNames.resource: "schemas/resource.json", + DocumentNames.stream_datum: "schemas/stream_datum.json", + DocumentNames.stream_resource: "schemas/stream_resource.json", + # DEPRECATED: + DocumentNames.bulk_events: "schemas/bulk_events.json", + DocumentNames.bulk_datum: "schemas/bulk_datum.json", +} +schemas = {} +for name, filename in SCHEMA_NAMES.items(): + ref = importlib_resources.files("event_model") / filename + with ref.open() as f: + schemas[name] = json.load(f) + + +def _is_array(checker, instance): + return ( + jsonschema.validators.Draft202012Validator.TYPE_CHECKER.is_type( + instance, "array" + ) + or isinstance(instance, tuple) + or hasattr(instance, "__array__") + ) + + +_array_type_checker = jsonschema.validators.Draft202012Validator.TYPE_CHECKER.redefine( + "array", _is_array +) + +_Validator = jsonschema.validators.extend( + jsonschema.validators.Draft202012Validator, type_checker=_array_type_checker +) + +schema_validators = { + name: _Validator(schema=schema) for name, schema in schemas.items() +} + + +@dataclass +class ComposeDatum: + resource: Resource + counter: Iterator + + def __call__(self, datum_kwargs: Dict[str, Any], validate: bool = True) -> Datum: + resource_uid = self.resource["uid"] + doc = Datum( + resource=resource_uid, + datum_kwargs=datum_kwargs, + datum_id="{}/{}".format(resource_uid, next(self.counter)), + ) + if validate: + schema_validators[DocumentNames.datum].validate(doc) + return doc + + +def compose_datum( + *, + resource: Resource, + counter: Iterator, + datum_kwargs: Dict[str, Any], + validate: bool = True, +) -> Datum: + """ + Here for backwards compatibility, the Compose class is prefered. + """ + return ComposeDatum(resource, counter)(datum_kwargs, validate=validate) + + +@dataclass +class ComposeDatumPage: + resource: Resource + counter: Iterator + + def __call__(self, datum_kwargs: dict, validate: bool = True) -> DatumPage: + resource_uid = self.resource["uid"] + any_column, *_ = datum_kwargs.values() + N = len(any_column) + doc = DatumPage( + resource=resource_uid, + datum_kwargs=datum_kwargs, + datum_id=[ + "{}/{}".format(resource_uid, next(self.counter)) for _ in range(N) + ], + ) + if validate: + schema_validators[DocumentNames.datum_page].validate(doc) + return doc + + +def compose_datum_page( + *, + resource: Resource, + counter: Iterator, + datum_kwargs: Dict[str, List[Any]], + validate: bool = True, +) -> DatumPage: + """ + Here for backwards compatibility, the Compose class is prefered. + """ + return ComposeDatumPage(resource, counter)(datum_kwargs, validate=validate) + + +@dataclass +class ComposeResourceBundle: + resource_doc: Resource + compose_datum: ComposeDatum + compose_datum_page: ComposeDatumPage + + # iter for backwards compatibility + def __iter__(self) -> Iterator: + return iter( + ( + self.resource_doc, + self.compose_datum, + self.compose_datum_page, + ) + ) + + +PATH_SEMANTICS: Dict[str, Literal["posix", "windows"]] = { + "posix": "posix", + "nt": "windows", +} +default_path_semantics: Literal["posix", "windows"] = PATH_SEMANTICS[os.name] + + +@dataclass +class ComposeResource: + start: Optional[RunStart] + + def __call__( + self, + spec: str, + root: str, + resource_path: str, + resource_kwargs: Dict[str, Any], + path_semantics: Literal["posix", "windows"] = default_path_semantics, + uid: Optional[str] = None, + validate: bool = True, + ) -> ComposeResourceBundle: + if uid is None: + uid = str(uuid.uuid4()) + + doc = Resource( + path_semantics=path_semantics, + uid=uid, + spec=spec, + root=root, + resource_kwargs=resource_kwargs, + resource_path=resource_path, + ) + + if self.start: + doc["run_start"] = self.start["uid"] + + if validate: + schema_validators[DocumentNames.resource].validate(doc) + + counter = itertools.count() + return ComposeResourceBundle( + doc, + ComposeDatum(resource=doc, counter=counter), + ComposeDatumPage(resource=doc, counter=counter), + ) + + +def compose_resource( + *, + spec: str, + root: str, + resource_path: str, + resource_kwargs: Dict[str, Any], + path_semantics: Literal["posix", "windows"] = default_path_semantics, + start: Optional[RunStart] = None, + uid: Optional[str] = None, + validate: bool = True, +) -> ComposeResourceBundle: + """ + Here for backwards compatibility, the Compose class is prefered. + """ + return ComposeResource(start)( + spec, + root, + resource_path, + resource_kwargs, + path_semantics=path_semantics, + uid=uid, + validate=validate, + ) + + +@dataclass +class ComposeStreamDatum: + stream_resource: StreamResource + counter: Iterator + + def __call__( + self, + indices: StreamRange, + seq_nums: Optional[StreamRange] = None, + descriptor: Optional[EventDescriptor] = None, + validate: bool = True, + ) -> StreamDatum: + resource_uid = self.stream_resource["uid"] + + # If the seq_nums aren't passed in then the bluesky + # bundler will keep track of them + if not seq_nums: + seq_nums = StreamRange(start=0, stop=0) + + doc = StreamDatum( + stream_resource=resource_uid, + uid=f"{resource_uid}/{next(self.counter)}", + seq_nums=seq_nums, + indices=indices, + descriptor=descriptor["uid"] if descriptor else "", + ) + + if validate: + schema_validators[DocumentNames.stream_datum].validate(doc) + + return doc + + +def compose_stream_datum( + *, + stream_resource: StreamResource, + counter: Iterator, + seq_nums: StreamRange, + indices: StreamRange, + validate: bool = True, +) -> StreamDatum: + """ + Here for backwards compatibility, the Compose class is prefered. + """ + warnings.warn( + "compose_stream_datum() will be removed in the minor version.", + DeprecationWarning, + ) + return ComposeStreamDatum(stream_resource, counter)( + seq_nums, + indices, + validate=validate, + ) + + +@dataclass +class ComposeStreamResourceBundle: + stream_resource_doc: StreamResource + compose_stream_datum: ComposeStreamDatum + + # iter for backwards compatibility + def __iter__(self) -> Iterator: + return iter( + ( + self.stream_resource_doc, + self.compose_stream_datum, + ) + ) + + +@dataclass +class ComposeStreamResource: + start: Optional[RunStart] = None + + def __call__( + self, + mimetype: str, + uri: str, + data_key: str, + parameters: Dict[str, Any], + uid: Optional[str] = None, + validate: bool = True, + ) -> ComposeStreamResourceBundle: + if uid is None: + uid = str(uuid.uuid4()) + + doc = StreamResource( + uid=uid, + data_key=data_key, + mimetype=mimetype, + uri=uri, + parameters=parameters, + ) + + if self.start: + doc["run_start"] = self.start["uid"] + + if validate: + schema_validators[DocumentNames.stream_resource].validate(doc) + + return ComposeStreamResourceBundle( + doc, + ComposeStreamDatum( + stream_resource=doc, + counter=itertools.count(), + ), + ) + + +def compose_stream_resource( + *, + mimetype: str, + uri: str, + data_key: str, + parameters: Dict[str, Any], + start: Optional[RunStart] = None, + uid: Optional[str] = None, + validate: bool = True, +) -> ComposeStreamResourceBundle: + """ + Here for backwards compatibility, the Compose class is prefered. + """ + return ComposeStreamResource(start=start)( + mimetype, + uri, + data_key, + parameters, + uid=uid, + validate=validate, + ) + + +@dataclass +class ComposeStop: + start: RunStart + event_counters: Dict[str, int] + poison_pill: List + + def __call__( + self, + exit_status: Literal["success", "abort", "fail"] = "success", + reason: str = "", + uid: Optional[str] = None, + time: Optional[float] = None, + validate: bool = True, + ) -> RunStop: + if self.poison_pill: + raise EventModelError( + "Already composed a RunStop document for run " + "{!r}.".format(self.start["uid"]) + ) + self.poison_pill.append(object()) + if uid is None: + uid = str(uuid.uuid4()) + if time is None: + time = ttime.time() + doc = RunStop( + uid=uid, + time=time, + run_start=self.start["uid"], + exit_status=exit_status, + reason=reason, + num_events={k: v - 1 for k, v in self.event_counters.items()}, + ) + if validate: + schema_validators[DocumentNames.stop].validate(doc) + return doc + + +def compose_stop( + *, + start: RunStart, + event_counters: Dict[str, int], + poison_pill: List, + exit_status: Literal["success", "abort", "fail"] = "success", + reason: str = "", + uid: Optional[str] = None, + time: Optional[float] = None, + validate: bool = True, +) -> RunStop: + """ + Here for backwards compatibility, the Compose class is prefered. + """ + return ComposeStop( + start=start, + event_counters=event_counters, + poison_pill=poison_pill, + )(exit_status=exit_status, reason=reason, uid=uid, time=time, validate=validate) + + +def length_of_value(dictionary: Dict[str, List], error_msg: str) -> Optional[int]: + length = None + for k, v in dictionary.items(): + v_len = len(v) + if length is not None: + if v_len != length: + raise EventModelError(error_msg) + length = v_len + return length + + +@dataclass +class ComposeEventPage: + descriptor: EventDescriptor + event_counters: Dict[str, int] + + def __call__( + self, + data: Dict[str, List], + timestamps: Dict[str, Any], + seq_num: Optional[List[int]] = None, + filled: Optional[Dict[str, List[Union[bool, str]]]] = None, + uid: Optional[List] = None, + time: Optional[List] = None, + validate: bool = True, + ) -> EventPage: + timestamps_length = length_of_value( + timestamps, + "Cannot compose event_page: event_page contains `timestamps` " + "list values of different lengths", + ) + data_length = length_of_value( + data, + "Cannot compose event_page: event_page contains `data` " + "lists of different lengths", + ) + assert timestamps_length == data_length, ( + "Cannot compose event_page: the lists in `timestamps` are of a different " + "length to those in `data`" + ) + + if seq_num is None: + last_seq_num = self.event_counters[self.descriptor["name"]] + seq_num = list( + range(last_seq_num, len(next(iter(data.values()))) + last_seq_num) + ) + N = len(seq_num) + if uid is None: + uid = [str(uuid.uuid4()) for _ in range(N)] + if time is None: + time = [ttime.time()] * N + if filled is None: + filled = {} + doc = EventPage( + uid=uid, + time=time, + data=data, + timestamps=timestamps, + seq_num=seq_num, + filled=filled, + descriptor=self.descriptor["uid"], + ) + if validate: + schema_validators[DocumentNames.event_page].validate(doc) + + if not ( + set( + keys_without_stream_keys( + self.descriptor["data_keys"], self.descriptor["data_keys"] + ) + ) + == set(keys_without_stream_keys(data, self.descriptor["data_keys"])) + == set( + keys_without_stream_keys(timestamps, self.descriptor["data_keys"]) + ) + ): + raise EventModelValidationError( + 'These sets of keys must match (other than "STREAM:" keys):\n' + "event['data'].keys(): {}\n" + "event['timestamps'].keys(): {}\n" + "descriptor['data_keys'].keys(): {}\n".format( + data.keys(), + timestamps.keys(), + self.descriptor["data_keys"].keys(), + ) + ) + if set(filled) - set(data): + raise EventModelValidationError( + "Keys in event['filled'] {} must be a subset of those in " + "event['data'] {}".format(filled.keys(), data.keys()) + ) + self.event_counters[self.descriptor["name"]] += len(seq_num) + return doc + + +def compose_event_page( + *, + descriptor: EventDescriptor, + event_counters: Dict[str, int], + data: Dict[str, List], + timestamps: Dict[str, Any], + seq_num: List[int], + filled: Optional[Dict[str, List[Union[bool, str]]]] = None, + uid: Optional[List] = None, + time: Optional[List] = None, + validate: bool = True, +) -> EventPage: + """ + Here for backwards compatibility, the Compose class is prefered. + """ + return ComposeEventPage(descriptor, event_counters)( + data, + timestamps, + seq_num=seq_num, + filled=filled, + uid=uid, + time=time, + validate=validate, + ) + + +def keys_without_stream_keys(dictionary, descriptor_data_keys): + return [ + key + for key in dictionary.keys() + if ( + "external" not in descriptor_data_keys[key] + or descriptor_data_keys[key]["external"] != "STREAM:" + ) + ] + + +@dataclass +class ComposeEvent: + descriptor: EventDescriptor + event_counters: Dict[str, int] + + def __call__( + self, + data: dict, + timestamps: dict, + seq_num: Optional[int] = None, + filled: Optional[Dict[str, Union[bool, str]]] = None, + uid: Optional[str] = None, + time: Optional[float] = None, + validate: bool = True, + ) -> Event: + if seq_num is None: + seq_num = self.event_counters[self.descriptor["name"]] + if uid is None: + uid = str(uuid.uuid4()) + if time is None: + time = ttime.time() + if filled is None: + filled = {} + doc = Event( + uid=uid, + time=time, + data=data, + timestamps=timestamps, + seq_num=seq_num, + filled=filled, + descriptor=self.descriptor["uid"], + ) + if validate: + schema_validators[DocumentNames.event].validate(doc) + + if not ( + set( + keys_without_stream_keys( + self.descriptor["data_keys"], self.descriptor["data_keys"] + ) + ) + == set(keys_without_stream_keys(data, self.descriptor["data_keys"])) + == set( + keys_without_stream_keys(timestamps, self.descriptor["data_keys"]) + ) + ): + raise EventModelValidationError( + 'These sets of keys must match (other than "STREAM:" keys):\n' + "event['data'].keys(): {}\n" + "event['timestamps'].keys(): {}\n" + "descriptor['data_keys'].keys(): {}\n".format( + data.keys(), + timestamps.keys(), + self.descriptor["data_keys"].keys(), + ) + ) + if set(filled) - set(data): + raise EventModelValidationError( + "Keys in event['filled'] {} must be a subset of those in " + "event['data'] {}".format(filled.keys(), data.keys()) + ) + self.event_counters[self.descriptor["name"]] = seq_num + 1 + return doc + + +def compose_event( + *, + descriptor: EventDescriptor, + event_counters: Dict[str, int], + data: Dict[str, Any], + timestamps: Dict[str, Any], + seq_num: int, + filled: Optional[Dict[str, Union[bool, str]]] = None, + uid: Optional[str] = None, + time: Optional[float] = None, + validate: bool = True, +) -> Event: + """ + Here for backwards compatibility, the Compose class is prefered. + """ + return ComposeEvent(descriptor, event_counters)( + data, + timestamps, + seq_num=seq_num, + filled=filled, + uid=uid, + time=time, + validate=validate, + ) + + +@dataclass +class ComposeDescriptorBundle: + descriptor_doc: EventDescriptor + compose_event: ComposeEvent + compose_event_page: ComposeEventPage + + def __iter__(self) -> Iterator: + return iter( + ( + self.descriptor_doc, + self.compose_event, + self.compose_event_page, + ) + ) + + +@dataclass +class ComposeDescriptor: + start: RunStart + streams: dict + event_counters: Dict[str, int] + + def __call__( + self, + name, + data_keys, + hints=None, + configuration=None, + object_keys=None, + time=None, + uid=None, + validate=True, + ) -> ComposeDescriptorBundle: + if time is None: + time = ttime.time() + if uid is None: + uid = str(uuid.uuid4()) + if hints is None: + hints = {} + if configuration is None: + configuration = {} + if object_keys is None: + object_keys = {} + + doc = EventDescriptor( + configuration=configuration, + data_keys=data_keys, + name=name, + object_keys=object_keys, + run_start=self.start["uid"], + time=time, + uid=uid, + hints=hints, + ) + if validate: + if name in self.streams and self.streams[name] != set(data_keys): + raise EventModelValidationError( + "A descriptor with the name {} has already been composed with " + "data_keys {}. The requested data_keys were {}. All " + "descriptors in a given stream must have the same " + "data_keys.".format(name, self.streams[name], set(data_keys)) + ) + schema_validators[DocumentNames.descriptor].validate(doc) + + if name not in self.streams: + self.streams[name] = set(data_keys) + self.event_counters[name] = 1 + + return ComposeDescriptorBundle( + descriptor_doc=doc, + compose_event=ComposeEvent( + descriptor=doc, event_counters=self.event_counters + ), + compose_event_page=ComposeEventPage( + descriptor=doc, event_counters=self.event_counters + ), + ) + + +def compose_descriptor( + *, + start: RunStart, + streams: Dict[str, Iterable], + event_counters: Dict[str, int], + name: str, + data_keys: Dict[str, DataKey], + uid: Optional[str] = None, + time: Optional[float] = None, + object_keys: Optional[Dict[str, Any]] = None, + configuration: Optional[Dict[str, Configuration]] = None, + hints: Optional[PerObjectHint] = None, + validate: bool = True, +) -> ComposeDescriptorBundle: + """ + Here for backwards compatibility, the Compose class is prefered. + """ + return ComposeDescriptor(start, streams, event_counters)( + name, + data_keys, + hints=hints, + configuration=configuration, + object_keys=object_keys, + time=time, + uid=uid, + validate=validate, + ) + + +@dataclass +class ComposeRunBundle: + """Extensible compose run bundle. This maintains backward compatibility + by unpacking into a basic run bundle + (start, compose_descriptor, compose_resource, stop). + Further extensions are optional and require keyword referencing + (i.e. compose_stream_resource). + """ + + start_doc: RunStart + compose_descriptor: ComposeDescriptor + compose_resource: ComposeResource + compose_stop: ComposeStop + compose_stream_resource: Optional[ComposeStreamResource] = None + + # iter for backwards compatibility + def __iter__(self) -> Iterator: + return iter( + ( + self.start_doc, + self.compose_descriptor, + self.compose_resource, + self.compose_stop, + ) + ) + + +def compose_run( + *, + uid: Optional[str] = None, + time: Optional[float] = None, + metadata: Optional[Dict] = None, + validate: bool = True, + event_counters: Optional[Dict[str, int]] = None, +) -> ComposeRunBundle: + """ + Compose a RunStart document and factory functions for related documents. + + Parameters + ---------- + uid : string, optional + Unique identifier for this run, conventionally a UUID4. If None is + given, a UUID4 will be generated. + time : float, optional + UNIX epoch time of start of this run. If None is given, the current + time will be used. + metadata : dict, optional + Additional metadata include the document + validate : boolean, optional + Validate this document conforms to the schema. + event_counters : dict, optional + A dict for counting events, when an event is composed by any of the + descriptors composed by this run, the element in this dict with the key of the + descriptor name will be increased by 1. + + Returns + ------- + ComposeRunBundle + """ + if uid is None: + uid = str(uuid.uuid4()) + if time is None: + time = ttime.time() + if metadata is None: + metadata = {} + + # Define some mutable state to be shared internally by the closures composed + # below. + streams: Dict[str, Iterable] = {} + if event_counters is None: + event_counters = {} + poison_pill: list = [] + + doc = dict(uid=uid, time=time, **metadata) + + if validate: + schema_validators[DocumentNames.start].validate(doc) + + return ComposeRunBundle( + cast(RunStart, doc), + ComposeDescriptor( + start=cast(RunStart, doc), streams=streams, event_counters=event_counters + ), + ComposeResource(start=cast(RunStart, doc)), + ComposeStop( + start=cast(RunStart, doc), + event_counters=event_counters, + poison_pill=poison_pill, + ), + compose_stream_resource=ComposeStreamResource(start=cast(RunStart, doc)), + ) + + +def pack_event_page(*events: Event) -> EventPage: + """ + Transform one or more Event documents into an EventPage document. + + Parameters + ---------- + *event : dicts + any number of Event documents + + Returns + ------- + event_page : dict + """ + if not events: + raise ValueError( + "The pack_event_page() function was called with empty *args. " + "Cannot create an EventPage from an empty collection of Events " + "because the 'descriptor' field in an EventPage cannot be NULL." + ) + time_list = [] + uid_list = [] + seq_num_list = [] + data_list = [] + filled_list = [] + timestamps_list = [] + for event in events: + time_list.append(event["time"]) + uid_list.append(event["uid"]) + seq_num_list.append(event["seq_num"]) + filled_list.append(event.get("filled", {})) + data_list.append(event["data"]) + timestamps_list.append(event["timestamps"]) + event_page = EventPage( + time=time_list, + uid=uid_list, + seq_num=seq_num_list, + descriptor=event["descriptor"], + filled=_transpose_list_of_dicts(filled_list), + data=_transpose_list_of_dicts(data_list), + timestamps=_transpose_list_of_dicts(timestamps_list), + ) + return event_page + + +def unpack_event_page(event_page: EventPage) -> Generator: + """ + Transform an EventPage document into individual Event documents. + + Parameters + ---------- + event_page : EventPage + + Yields + ------ + event : Event + """ + descriptor = event_page["descriptor"] + data_list = _transpose_dict_of_lists(event_page["data"]) + timestamps_list = _transpose_dict_of_lists(event_page["timestamps"]) + filled_list = _transpose_dict_of_lists(event_page.get("filled", {})) + for uid, time, seq_num, data, timestamps, filled in itertools.zip_longest( + event_page["uid"], + event_page["time"], + event_page["seq_num"], + data_list, + timestamps_list, + filled_list, + fillvalue={}, + ): + yield Event( + descriptor=descriptor, + uid=uid, + time=time, + seq_num=seq_num, + data=data, + timestamps=timestamps, + filled=filled, + ) + + +def pack_datum_page(*datum: Datum) -> DatumPage: + """ + Transform one or more Datum documents into a DatumPage document. + + Parameters + ---------- + *datum : dicts + any number of Datum documents + + Returns + ------- + datum_page : dict + """ + if not datum: + raise ValueError( + "The pack_datum_page() function was called with empty *args. " + "Cannot create an DatumPage from an empty collection of Datum " + "because the 'resource' field in a DatumPage cannot be NULL." + ) + datum_id_list = [] + datum_kwarg_list = [] + for datum_ in datum: + datum_id_list.append(datum_["datum_id"]) + datum_kwarg_list.append(datum_["datum_kwargs"]) + datum_page = DatumPage( + resource=datum_["resource"], + datum_id=datum_id_list, + datum_kwargs=_transpose_list_of_dicts(datum_kwarg_list), + ) + return datum_page + + +def unpack_datum_page(datum_page: DatumPage) -> Generator: + """ + Transform a DatumPage document into individual Datum documents. + + Parameters + ---------- + datum_page : DatumPage + + Yields + ------ + datum : Datum + """ + resource = datum_page["resource"] + datum_kwarg_list = _transpose_dict_of_lists(datum_page["datum_kwargs"]) + datum_id: Any + datum_kwargs: Any + for datum_id, datum_kwargs in itertools.zip_longest( + datum_page["datum_id"], datum_kwarg_list, fillvalue={} + ): + yield Datum(datum_id=datum_id, datum_kwargs=datum_kwargs, resource=resource) + + +def rechunk_event_pages(event_pages: Iterable, chunk_size: int) -> Generator: + """ + Resizes the event_pages in a iterable of event_pages. + + Parameters + ---------- + event_pages: Iterabile + An iterable of event_pages + chunk_size: integer + Size of pages to yield + + Yields + ------ + event_page : dict + """ + remainder = chunk_size + chunk_list = [] + + def page_chunks(page: dict, chunk_size: int, remainder: int) -> Generator: + """ + Yields chunks of a event_page. + The first chunk will be of size remainder, the following chunks will be + of size chunk_size. The last chunk will be what ever is left over. + """ + array_keys = ["seq_num", "time", "uid"] + page_size = len(page["uid"]) # Number of events in the page. + + # Make a list of the chunk indexes. + chunks = [(0, remainder)] + chunks.extend( + [(i, i + chunk_size) for i in range(remainder, page_size, chunk_size)] + ) + + for start, stop in chunks: + yield { + "descriptor": page["descriptor"], + **{key: page[key][start:stop] for key in array_keys}, + "data": { + key: page["data"][key][start:stop] for key in page["data"].keys() + }, + "timestamps": { + key: page["timestamps"][key][start:stop] + for key in page["timestamps"].keys() + }, + "filled": { + key: page["filled"][key][start:stop] + for key in page["filled"].keys() + }, + } + + for page in event_pages: + new_chunks = page_chunks(page, chunk_size, remainder) + for chunk in new_chunks: + remainder -= len(chunk["uid"]) # Subtract the size of the chunk. + chunk_list.append(chunk) + if remainder == 0: + yield merge_event_pages(chunk_list) + remainder = chunk_size + chunk_list = [] + if chunk_list: + yield merge_event_pages(chunk_list) + + +def merge_event_pages(event_pages: Iterable[EventPage]) -> EventPage: + """ + Combines a iterable of event_pages to a single event_page. + + Parameters + ---------- + event_pages: Iterabile + An iterable of event_pages + + Returns + ------ + event_page : dict + """ + pages = list(event_pages) + if len(pages) == 1: + return pages[0] + + doc = dict( + descriptor=pages[0]["descriptor"], + seq_num=list( + itertools.chain.from_iterable([page["seq_num"] for page in pages]) + ), + time=list(itertools.chain.from_iterable([page["time"] for page in pages])), + uid=list(itertools.chain.from_iterable([page["uid"] for page in pages])), + data={ + key: list( + itertools.chain.from_iterable([page["data"][key] for page in pages]) + ) + for key in pages[0]["data"].keys() + }, + timestamps={ + key: list( + itertools.chain.from_iterable( + [page["timestamps"][key] for page in pages] + ) + ) + for key in pages[0]["data"].keys() + }, + filled={ + key: list( + itertools.chain.from_iterable([page["filled"][key] for page in pages]) + ) + for key in pages[0]["filled"].keys() + }, + ) + return cast(EventPage, doc) + + +def rechunk_datum_pages(datum_pages: Iterable, chunk_size: int) -> Generator: + """ + Resizes the datum_pages in a iterable of event_pages. + + Parameters + ---------- + datum_pages: Iterabile + An iterable of datum_pages + chunk_size: integer + Size of pages to yield + + Yields + ------ + datum_page : dict + """ + remainder = chunk_size + chunk_list = [] + + def page_chunks(page: dict, chunk_size: int, remainder: int) -> Generator: + """ + Yields chunks of a datum_page. + The first chunk will be of size remainder, the following chunks will be + of size chunk_size. The last chunk will be what ever is left over. + """ + + array_keys = ["datum_id"] + page_size = len(page["datum_id"]) # Number of datum in the page. + + # Make a list of the chunk indexes. + chunks = [(0, remainder)] + chunks.extend( + [(i, i + chunk_size) for i in range(remainder, page_size, chunk_size)] + ) + + for start, stop in chunks: + yield { + "resource": page["resource"], + **{key: page[key][start:stop] for key in array_keys}, + "datum_kwargs": { + key: page["datum_kwargs"][key][start:stop] + for key in page["datum_kwargs"].keys() + }, + } + + for page in datum_pages: + new_chunks = page_chunks(page, chunk_size, remainder) + for chunk in new_chunks: + remainder -= len(chunk["datum_id"]) # Subtract the size of the chunk. + chunk_list.append(chunk) + if remainder == 0: + yield merge_datum_pages(chunk_list) + remainder = chunk_size + chunk_list = [] + if chunk_list: + yield merge_datum_pages(chunk_list) + + +def merge_datum_pages(datum_pages: Iterable) -> DatumPage: + """ + Combines a iterable of datum_pages to a single datum_page. + + Parameters + ---------- + datum_pages: Iterabile + An iterable of datum_pages + + Returns + ------ + datum_page : dict + """ + pages = list(datum_pages) + if len(pages) == 1: + return pages[0] + + array_keys = ["datum_id"] + + doc = dict( + resource=pages[0]["resource"], + **{ + key: list(itertools.chain.from_iterable([page[key] for page in pages])) + for key in array_keys + }, + datum_kwargs={ + key: list( + itertools.chain.from_iterable( + [page["datum_kwargs"][key] for page in pages] + ) + ) + for key in pages[0]["datum_kwargs"].keys() + }, + ) + return cast(DatumPage, doc) + + +def bulk_events_to_event_pages(bulk_events: dict) -> list: + """ + Transform a BulkEvents document into a list of EventPage documents. + + Note: The BulkEvents layout has been deprecated in favor of EventPage. + + Parameters + ---------- + bulk_events : dict + + Returns + ------- + event_pages : list + """ + # This is for a deprecated document type, so we are not being fussy + # about efficiency/laziness here. + event_pages: dict = {} # descriptor uid mapped to page + for events in bulk_events.values(): + for event in events: + descriptor = event["descriptor"] + try: + page = event_pages[descriptor] + except KeyError: + page = {"time": [], "uid": [], "seq_num": [], "descriptor": descriptor} + page["data"] = {k: [] for k in event["data"]} + page["timestamps"] = {k: [] for k in event["timestamps"]} + page["filled"] = {k: [] for k in event.get("filled", {})} + event_pages[descriptor] = page + page["uid"].append(event["uid"]) + page["time"].append(event["time"]) + page["seq_num"].append(event["seq_num"]) + page_data = page["data"] + for k, v in event["data"].items(): + page_data[k].append(v) + page_timestamps = page["timestamps"] + for k, v in event["timestamps"].items(): + page_timestamps[k].append(v) + page_filled = page["filled"] + for k, v in event.get("filled", {}).items(): + page_filled[k].append(v) + return list(event_pages.values()) + + +def bulk_datum_to_datum_page(bulk_datum: dict) -> DatumPage: + """ + Transform one BulkDatum into one DatumPage. + + Note: There is only one known usage of BulkDatum "in the wild", and the + BulkDatum layout has been deprecated in favor of DatumPage. + """ + datum_page = DatumPage( + datum_id=bulk_datum["datum_ids"], + resource=bulk_datum["resource"], + datum_kwargs=_transpose_list_of_dicts(bulk_datum["datum_kwarg_list"]), + ) + return datum_page + + +def _transpose_list_of_dicts(list_of_dicts: list) -> dict: + "Transform list-of-dicts into dict-of-lists (i.e. DataFrame-like)." + dict_of_lists = defaultdict(list) + for row in list_of_dicts: + for k, v in row.items(): + dict_of_lists[k].append(v) + return dict(dict_of_lists) + + +def _transpose_dict_of_lists(dict_of_lists: dict) -> list: + "Transform dict-of-lists (i.e. DataFrame-like) into list-of-dicts." + list_of_dicts = [] + keys = list(dict_of_lists) + for row in zip(*(dict_of_lists[k] for k in keys)): + list_of_dicts.append(dict(zip(keys, row))) + return list_of_dicts + + +def verify_filled(event_page: dict) -> None: + """Take an event_page document and verify that it is completely filled. + + Parameters + ---------- + event_page : event_page document + The event page document to check + + Raises + ------ + UnfilledData + Raised if any of the data in the event_page is unfilled, when raised it + inlcudes a list of unfilled data objects in the exception message. + """ + if not all(map(all, event_page["filled"].values())): + # check that all event_page data is filled. + unfilled_data = [] + for field, filled in event_page["filled"].items(): + if not all(filled): + unfilled_data.append(field) + raise UnfilledData( + f"Unfilled data found in fields " + f"{unfilled_data!r}. Use " + f"`event_model.Filler`." + ) + + +def sanitize_doc(doc: dict) -> dict: + """Return a copy with any numpy objects converted to built-in Python types. + + This function takes in an event-model document and returns a copy with any + numpy objects converted to built-in Python types. It is useful for + sanitizing documents prior to sending to any consumer that does not + recognize numpy types, such as a MongoDB database or a JSON encoder. + + Parameters + ---------- + doc : dict + The event-model document to be sanitized + + Returns + ------- + sanitized_doc : event-model document + The event-model document with numpy objects converted to built-in + Python types. + """ + return json.loads(json.dumps(doc, cls=NumpyEncoder)) + + +class NumpyEncoder(json.JSONEncoder): + """ + A json.JSONEncoder for encoding numpy objects using built-in Python types. + + Examples + -------- + + Encode a Python object that includes an arbitrarily-nested numpy object. + + >>> json.dumps({'a': {'b': numpy.array([1, 2, 3])}}, cls=NumpyEncoder) + """ + + # Credit: https://stackoverflow.com/a/47626762/1221924 + @no_type_check + def default(self, obj: object) -> Any: + try: + import dask.array + + if isinstance(obj, dask.array.Array): + obj = numpy.asarray(obj) + except ImportError: + pass + if isinstance(obj, (numpy.generic, numpy.ndarray)): + if numpy.isscalar(obj): + return obj.item() + return obj.tolist() + return json.JSONEncoder.default(self, obj) diff --git a/src/event_model/__main__.py b/src/event_model/__main__.py index e3e537c9..5f25bcc4 100644 --- a/src/event_model/__main__.py +++ b/src/event_model/__main__.py @@ -1,23 +1,14 @@ -"""Interface for ``python -m event_model``.""" - from argparse import ArgumentParser -from collections.abc import Sequence from . import __version__ __all__ = ["main"] -def main(args: Sequence[str] | None = None) -> None: - """Argument parser for the CLI.""" +def main(args=None): parser = ArgumentParser() - parser.add_argument( - "-v", - "--version", - action="version", - version=__version__, - ) - parser.parse_args(args) + parser.add_argument("-v", "--version", action="version", version=__version__) + args = parser.parse_args(args) if __name__ == "__main__": diff --git a/event_model/documents/__init__.py b/src/event_model/documents/__init__.py similarity index 100% rename from event_model/documents/__init__.py rename to src/event_model/documents/__init__.py diff --git a/event_model/documents/datum.py b/src/event_model/documents/datum.py similarity index 100% rename from event_model/documents/datum.py rename to src/event_model/documents/datum.py diff --git a/event_model/documents/datum_page.py b/src/event_model/documents/datum_page.py similarity index 100% rename from event_model/documents/datum_page.py rename to src/event_model/documents/datum_page.py diff --git a/event_model/documents/event.py b/src/event_model/documents/event.py similarity index 100% rename from event_model/documents/event.py rename to src/event_model/documents/event.py diff --git a/event_model/documents/event_descriptor.py b/src/event_model/documents/event_descriptor.py similarity index 100% rename from event_model/documents/event_descriptor.py rename to src/event_model/documents/event_descriptor.py diff --git a/event_model/documents/event_page.py b/src/event_model/documents/event_page.py similarity index 100% rename from event_model/documents/event_page.py rename to src/event_model/documents/event_page.py diff --git a/event_model/documents/generate/__main__.py b/src/event_model/documents/generate/__main__.py similarity index 93% rename from event_model/documents/generate/__main__.py rename to src/event_model/documents/generate/__main__.py index f111e24f..b3915d58 100644 --- a/event_model/documents/generate/__main__.py +++ b/src/event_model/documents/generate/__main__.py @@ -3,7 +3,11 @@ from event_model.documents import ALL_DOCUMENTS from event_model.documents.generate.typeddict_to_schema import typeddict_to_schema -if __name__ == "__main__": +def main(): schema_dir = Path(__file__).parent.parent.parent / "schemas" for document in ALL_DOCUMENTS: typeddict_to_schema(document, schema_dir) + + +if __name__ == "__main__": + main() diff --git a/event_model/documents/generate/type_wrapper.py b/src/event_model/documents/generate/type_wrapper.py similarity index 100% rename from event_model/documents/generate/type_wrapper.py rename to src/event_model/documents/generate/type_wrapper.py diff --git a/event_model/documents/generate/typeddict_to_schema.py b/src/event_model/documents/generate/typeddict_to_schema.py similarity index 100% rename from event_model/documents/generate/typeddict_to_schema.py rename to src/event_model/documents/generate/typeddict_to_schema.py diff --git a/event_model/documents/resource.py b/src/event_model/documents/resource.py similarity index 100% rename from event_model/documents/resource.py rename to src/event_model/documents/resource.py diff --git a/event_model/documents/run_start.py b/src/event_model/documents/run_start.py similarity index 100% rename from event_model/documents/run_start.py rename to src/event_model/documents/run_start.py diff --git a/event_model/documents/run_stop.py b/src/event_model/documents/run_stop.py similarity index 100% rename from event_model/documents/run_stop.py rename to src/event_model/documents/run_stop.py diff --git a/event_model/documents/stream_datum.py b/src/event_model/documents/stream_datum.py similarity index 100% rename from event_model/documents/stream_datum.py rename to src/event_model/documents/stream_datum.py diff --git a/event_model/documents/stream_resource.py b/src/event_model/documents/stream_resource.py similarity index 100% rename from event_model/documents/stream_resource.py rename to src/event_model/documents/stream_resource.py diff --git a/event_model/schemas/bulk_datum.json b/src/event_model/schemas/bulk_datum.json similarity index 100% rename from event_model/schemas/bulk_datum.json rename to src/event_model/schemas/bulk_datum.json diff --git a/event_model/schemas/bulk_events.json b/src/event_model/schemas/bulk_events.json similarity index 100% rename from event_model/schemas/bulk_events.json rename to src/event_model/schemas/bulk_events.json diff --git a/event_model/schemas/datum.json b/src/event_model/schemas/datum.json similarity index 100% rename from event_model/schemas/datum.json rename to src/event_model/schemas/datum.json diff --git a/event_model/schemas/datum_page.json b/src/event_model/schemas/datum_page.json similarity index 100% rename from event_model/schemas/datum_page.json rename to src/event_model/schemas/datum_page.json diff --git a/event_model/schemas/event.json b/src/event_model/schemas/event.json similarity index 100% rename from event_model/schemas/event.json rename to src/event_model/schemas/event.json diff --git a/event_model/schemas/event_descriptor.json b/src/event_model/schemas/event_descriptor.json similarity index 100% rename from event_model/schemas/event_descriptor.json rename to src/event_model/schemas/event_descriptor.json diff --git a/event_model/schemas/event_page.json b/src/event_model/schemas/event_page.json similarity index 100% rename from event_model/schemas/event_page.json rename to src/event_model/schemas/event_page.json diff --git a/event_model/schemas/resource.json b/src/event_model/schemas/resource.json similarity index 100% rename from event_model/schemas/resource.json rename to src/event_model/schemas/resource.json diff --git a/event_model/schemas/run_start.json b/src/event_model/schemas/run_start.json similarity index 100% rename from event_model/schemas/run_start.json rename to src/event_model/schemas/run_start.json diff --git a/event_model/schemas/run_stop.json b/src/event_model/schemas/run_stop.json similarity index 100% rename from event_model/schemas/run_stop.json rename to src/event_model/schemas/run_stop.json diff --git a/event_model/schemas/stream_datum.json b/src/event_model/schemas/stream_datum.json similarity index 100% rename from event_model/schemas/stream_datum.json rename to src/event_model/schemas/stream_datum.json diff --git a/event_model/schemas/stream_resource.json b/src/event_model/schemas/stream_resource.json similarity index 100% rename from event_model/schemas/stream_resource.json rename to src/event_model/schemas/stream_resource.json diff --git a/event_model/tests/__init__.py b/src/event_model/tests/__init__.py similarity index 100% rename from event_model/tests/__init__.py rename to src/event_model/tests/__init__.py diff --git a/event_model/tests/test_auth.py b/src/event_model/tests/test_auth.py similarity index 100% rename from event_model/tests/test_auth.py rename to src/event_model/tests/test_auth.py diff --git a/event_model/tests/test_em.py b/src/event_model/tests/test_em.py similarity index 100% rename from event_model/tests/test_em.py rename to src/event_model/tests/test_em.py diff --git a/event_model/tests/test_emit.py b/src/event_model/tests/test_emit.py similarity index 100% rename from event_model/tests/test_emit.py rename to src/event_model/tests/test_emit.py diff --git a/event_model/tests/test_filler.py b/src/event_model/tests/test_filler.py similarity index 100% rename from event_model/tests/test_filler.py rename to src/event_model/tests/test_filler.py diff --git a/event_model/tests/test_projections.py b/src/event_model/tests/test_projections.py similarity index 100% rename from event_model/tests/test_projections.py rename to src/event_model/tests/test_projections.py diff --git a/event_model/tests/test_run_router.py b/src/event_model/tests/test_run_router.py similarity index 100% rename from event_model/tests/test_run_router.py rename to src/event_model/tests/test_run_router.py diff --git a/event_model/tests/test_schema_generation.py b/src/event_model/tests/test_schema_generation.py similarity index 100% rename from event_model/tests/test_schema_generation.py rename to src/event_model/tests/test_schema_generation.py diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index ebe9c10f..00000000 --- a/tests/conftest.py +++ /dev/null @@ -1,21 +0,0 @@ -import os -from typing import Any - -import pytest - -# Prevent pytest from catching exceptions when debugging in vscode so that break on -# exception works correctly (see: https://github.com/pytest-dev/pytest/issues/7409) -if os.getenv("PYTEST_RAISE", "0") == "1": - - @pytest.hookimpl(tryfirst=True) - def pytest_exception_interact(call: pytest.CallInfo[Any]): - if call.excinfo is not None: - raise call.excinfo.value - else: - raise RuntimeError( - f"{call} has no exception data, an unknown error has occurred" - ) - - @pytest.hookimpl(tryfirst=True) - def pytest_internalerror(excinfo: pytest.ExceptionInfo[Any]): - raise excinfo.value diff --git a/tests/test_cli.py b/tests/test_cli.py deleted file mode 100644 index ab60b9bb..00000000 --- a/tests/test_cli.py +++ /dev/null @@ -1,9 +0,0 @@ -import subprocess -import sys - -from event_model import __version__ - - -def test_cli_version(): - cmd = [sys.executable, "-m", "event_model", "--version"] - assert subprocess.check_output(cmd).decode().strip() == __version__ From 35ab716cb0c452c75945afae35133f8b9eae8046 Mon Sep 17 00:00:00 2001 From: Eva Date: Fri, 18 Oct 2024 10:06:15 +0100 Subject: [PATCH 4/7] general fix --- .github/CONTRIBUTING.md | 30 ------------------------------ .github/workflows/ci.yml | 2 +- 2 files changed, 1 insertion(+), 31 deletions(-) diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index a4432289..17bc44cf 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -1,4 +1,3 @@ -<<<<<<< before updating # Contributing ## Getting Started @@ -73,32 +72,3 @@ Hints to make the integration of your changes easy (and happen faster): - Don't forget your unit tests - All algorithms need documentation, don't forget the .rst file - Don't take changes requests to change your code personally -======= -# Contribute to the project - -Contributions and issues are most welcome! All issues and pull requests are -handled through [GitHub](https://github.com/bluesky/event-model/issues). Also, please check for any existing issues before -filing a new one. If you have a great idea but it involves big changes, please -file a ticket before making a pull request! We want to make sure you don't spend -your time coding something that might not fit the scope of the project. - -## Issue or Discussion? - -Github also offers [discussions](https://github.com/bluesky/event-model/discussions) as a place to ask questions and share ideas. If -your issue is open ended and it is not obvious when it can be "closed", please -raise it as a discussion instead. - -## Code Coverage - -While 100% code coverage does not make a library bug-free, it significantly -reduces the number of easily caught bugs! Please make sure coverage remains the -same or is improved by a pull request! - -## Developer Information - -It is recommended that developers use a [vscode devcontainer](https://code.visualstudio.com/docs/devcontainers/containers). This repository contains configuration to set up a containerized development environment that suits its own needs. - -This project was created using the [Diamond Light Source Copier Template](https://github.com/DiamondLightSource/python-copier-template) for Python projects. - -For more information on common tasks like setting up a developer environment, running the tests, and setting a pre-commit hook, see the template's [How-to guides](https://diamondlightsource.github.io/python-copier-template/2.3.0/how-to.html). ->>>>>>> after updating diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1df64d84..3a391129 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,7 +21,7 @@ jobs: strategy: matrix: runs-on: ["ubuntu-latest"] # can add windows-latest, macos-latest - python-version: ["3.10", "3.11", "3.12"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] include: # Include one that runs in the dev environment - runs-on: "ubuntu-latest" From 89fd82dc5b9da2589967788a3d9314b93b953ca9 Mon Sep 17 00:00:00 2001 From: Eva Date: Fri, 18 Oct 2024 11:14:22 +0100 Subject: [PATCH 5/7] fixed docs Also added an explanation on schema generation. --- LICENSE | 229 ++-------------- README.md | 46 ++-- docs/conf.py | 81 +++++- docs/{user => }/explanations/data-model.rst | 26 +- ...02-switched-to-using-a-python-skeleton.rst | 0 .../0003-exposed-documents-on-event_model.rst | 2 +- ...004-switched-to-python-copier-template.md} | 2 +- .../explanations/event-descriptors.rst | 0 docs/{user => }/explanations/external.rst | 0 docs/explanations/schema-generation.rst | 22 ++ docs/{user => }/how-to/use-cases.rst | 0 docs/images/bluesky-logo-dark.svg | 1 - docs/images/dls-logo.svg | 11 - docs/images/event-model-logo.svg | 258 ++++++++++++++++++ docs/index.md | 3 +- docs/reference.md | 2 +- docs/{user => }/reference/release-history.rst | 0 docs/tutorials/installation.md | 2 +- pyproject.toml | 2 +- 19 files changed, 428 insertions(+), 259 deletions(-) rename docs/{user => }/explanations/data-model.rst (95%) rename docs/{developer => }/explanations/decisions/0002-switched-to-using-a-python-skeleton.rst (100%) rename docs/{developer => }/explanations/decisions/0003-exposed-documents-on-event_model.rst (88%) rename docs/explanations/decisions/{0002-switched-to-python-copier-template.md => 0004-switched-to-python-copier-template.md} (91%) rename docs/{user => }/explanations/event-descriptors.rst (100%) rename docs/{user => }/explanations/external.rst (100%) create mode 100644 docs/explanations/schema-generation.rst rename docs/{user => }/how-to/use-cases.rst (100%) delete mode 100644 docs/images/bluesky-logo-dark.svg delete mode 100644 docs/images/dls-logo.svg create mode 100644 docs/images/event-model-logo.svg rename docs/{user => }/reference/release-history.rst (100%) diff --git a/LICENSE b/LICENSE index 8dada3ed..eda57e7d 100644 --- a/LICENSE +++ b/LICENSE @@ -1,201 +1,28 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. +BSD 3-Clause License + +Copyright (c) 2015, Brookhaven National Laboratory + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/README.md b/README.md index 11e9d693..bc464410 100644 --- a/README.md +++ b/README.md @@ -1,36 +1,38 @@ + + +Data model used by the bluesky ecosystem. + [![CI](https://github.com/bluesky/event-model/actions/workflows/ci.yml/badge.svg)](https://github.com/bluesky/event-model/actions/workflows/ci.yml) [![Coverage](https://codecov.io/gh/bluesky/event-model/branch/main/graph/badge.svg)](https://codecov.io/gh/bluesky/event-model) [![PyPI](https://img.shields.io/pypi/v/event-model.svg)](https://pypi.org/project/event-model) [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) -# event_model - -Data model used by the bluesky ecosystem. - -This is where you should write a short paragraph that describes what your module does, -how it does it, and why people should use it. +# Event Model -Source | -:---: | :---: -PyPI | `pip install event-model` -Documentation | -Releases | +A primary design goal of bluesky is to enable better research by recording +rich metadata alongside measured data for use in later analysis. Documents are +how we do this. -This is where you should put some images or code snippets that illustrate -some relevant examples. If it is a library then you might put some -introductory code here: +This repository contains the formal schemas for bluesky's streaming data model +and some Python tooling for composing, validating, and transforming documents +in the model. -```python -from event_model import __version__ +## Where is my data? -print(f"Hello event_model {__version__}") -``` +For the full details and schema please see the `data_model` section. This is a very quick guide to where +you should look for / put different kinds of information -Or if it is a commandline tool then you might put some example commands here: +* Information about your sample that you know before the measurement → *Start* Document +* What experiment you intended to do → *Start* Document +* Who you are / where you are → *Start* Document +* References to external databases → *Start* Document +* The Data™ → *Event* Document +* Detector calibrations, dark frames, flat fields , or masks → *Event* Document (probably in its own stream) +* The shape / data type / units of The Data™ → *Event Descriptor* Document in the *data_keys* entry +* Anything you read from the controls system that is not device configuration → *Event* Document +* Device configuration data → *Event Descriptor* Document in the *configuration* entry -``` -python -m event_model --version -``` diff --git a/docs/conf.py b/docs/conf.py index bf839aba..0da4ca5b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -17,6 +17,10 @@ # General information about the project. project = "event-model" +copyright = "2019, Brookhaven National Lab" +author = "Brookhaven National Lab" + +language = "en" # The full version, including alpha/beta/rc tags. release = event_model.__version__ @@ -47,16 +51,27 @@ "sphinx_copybutton", # For the card element "sphinx_design", + "sphinx_design", + "sphinx.ext.autosummary", + "sphinx.ext.mathjax", + "sphinx.ext.githubpages", + "matplotlib.sphinxext.plot_directive", + "sphinx_copybutton", + "IPython.sphinxext.ipython_directive", + "IPython.sphinxext.ipython_console_highlighting", + # So we can write markdown files "myst_parser", ] +napoleon_google_docstring = False +napoleon_numpy_docstring = True # So we can use the ::: syntax myst_enable_extensions = ["colon_fence"] # If true, Sphinx will warn about all references where the target cannot # be found. -nitpicky = True +nitpicky = False # A list of (type, target) tuples (by default empty) that should be ignored when # generating warnings in "nitpicky mode". Note that type should include the @@ -104,16 +119,37 @@ # These patterns also affect html_static_path and html_extra_path exclude_patterns = ["_build"] +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # This means you can link things like `str` and `asyncio` to the relevant # docs in the python documentation. -intersphinx_mapping = {"python": ("https://docs.python.org/3/", None)} +intersphinx_mapping = { + "python": ("https://docs.python.org/3/", None), + "cachetools": ("https://cachetools.readthedocs.io/en/stable/", None), + "numpy": ("https://docs.scipy.org/doc/numpy/", None), + "scipy": ("https://docs.scipy.org/doc/scipy/reference/", None), + "pandas": ("https://pandas.pydata.org/pandas-docs/stable", None), + "matplotlib": ("https://matplotlib.org", None), + "jsonschema": ("https://python-jsonschema.readthedocs.io/en/stable/", None), +} # A dictionary of graphviz graph attributes for inheritance diagrams. inheritance_graph_attrs = {"rankdir": "TB"} +# Common links that should be available on every page +rst_epilog = """ +.. _NSLS: https://www.bnl.gov/nsls2 +.. _black: https://github.com/psf/black +.. _flake8: https://flake8.pycqa.org/en/latest/ +.. _isort: https://github.com/PyCQA/isort +.. _mypy: http://mypy-lang.org/ +.. _pre-commit: https://pre-commit.com/ +""" + # Ignore localhost links for periodic check that links in docs are valid linkcheck_ignore = [r"http://localhost:\d+/"] @@ -122,6 +158,43 @@ copybutton_prompt_text = r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: " copybutton_prompt_is_regexp = True +# -- Options for manual page output --------------------------------------- + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "event-model.tex", + "Bluesky Event Model Documentation", + "Contributors", + "manual", + ), +] + + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, "event-model", "Bluesky Event Model Documentation", [author], 1) +] + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "event-model", + "Bluesky Event Model Documentation", + author, + "event-model", + "Data model used by the bluesky ecosystem", + "Miscellaneous", + ), +] + # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for @@ -130,7 +203,7 @@ html_theme = "pydata_sphinx_theme" github_repo = "event-model" github_user = "bluesky" -switcher_json = f"https://{github_user}.github.io/{github_repo}/switcher.json" +switcher_json = f"https://blueskyproject.io/{github_repo}/switcher.json" switcher_exists = requests.get(switcher_json).ok if not switcher_exists: print( @@ -187,5 +260,5 @@ html_show_copyright = False # Logo -html_logo = "images/dls-logo.svg" +html_logo = "images/event-model-logo.svg" html_favicon = html_logo diff --git a/docs/user/explanations/data-model.rst b/docs/explanations/data-model.rst similarity index 95% rename from docs/user/explanations/data-model.rst rename to docs/explanations/data-model.rst index d5efeb7c..162eeaa9 100644 --- a/docs/user/explanations/data-model.rst +++ b/docs/explanations/data-model.rst @@ -59,7 +59,7 @@ Resource and Datum document types manage references to externally-stored data. Example Runs ============ -.. image:: ../../images/document-generation-timeline.svg +.. image:: ../images/document-generation-timeline.svg :width: 100% :align: center @@ -162,7 +162,7 @@ experimetal and subject to backward-incompatible changes in future releases. The run start document formal schema: -.. literalinclude:: ../../../event_model/schemas/run_start.json +.. literalinclude:: ../../src/event_model/schemas/run_start.json .. _descriptor: @@ -241,7 +241,7 @@ Typical example: Formal schema: -.. literalinclude:: ../../../event_model/schemas/event_descriptor.json +.. literalinclude:: ../../src/event_model/schemas/event_descriptor.json .. _event: @@ -298,7 +298,7 @@ overall event 'time' is often more useful. Formal schema: -.. literalinclude:: ../../../event_model/schemas/event.json +.. literalinclude:: ../../src/event_model/schemas/event.json .. _event_page: @@ -326,7 +326,7 @@ the example Event above structured as an Event Page with a single row: Formal Event Page schema: -.. literalinclude:: ../../../event_model/schemas/event_page.json +.. literalinclude:: ../../src/event_model/schemas/event_page.json It is intentional that the values in the "data" and "timestamps" dictionaries do not have structure. The values may be numeric, bool, null (``None``), or a @@ -375,7 +375,7 @@ Typical example: Formal schema: -.. literalinclude:: ../../../event_model/schemas/run_stop.json +.. literalinclude:: ../../src/event_model/schemas/run_stop.json .. _resource: @@ -413,7 +413,7 @@ Typical example: Formal schema: -.. literalinclude:: ../../../event_model/schemas/resource.json +.. literalinclude:: ../../src/event_model/schemas/resource.json .. _datum: @@ -446,7 +446,7 @@ It is an implementation detail that ``datum_id`` is often formatted as Formal schema: -.. literalinclude:: ../../../event_model/schemas/datum.json +.. literalinclude:: ../../src/event_model/schemas/datum.json .. _datum_page: @@ -466,7 +466,7 @@ strucuted as a Datum Page with one row: Formal Datum Page schema: -.. literalinclude:: ../../../event_model/schemas/datum_page.json +.. literalinclude:: ../../src/event_model/schemas/datum_page.json .. _stream_resource: @@ -491,7 +491,7 @@ Typical example: Formal schema: -.. literalinclude:: ../../../event_model/schemas/stream_resource.json +.. literalinclude:: ../../src/event_model/schemas/stream_resource.json .. _stream_datum: @@ -516,7 +516,7 @@ Typical example: Formal schema: -.. literalinclude:: ../../../event_model/schemas/stream_datum.json +.. literalinclude:: ../../src/event_model/schemas/stream_datum.json .. _bulk_events: @@ -526,7 +526,7 @@ Formal schema: This is another representation of Events. This representation is deprecated. Use EventPage instead. -.. literalinclude:: ../../../event_model/schemas/bulk_events.json +.. literalinclude:: ../../src/event_model/schemas/bulk_events.json .. _bulk_datum: @@ -536,4 +536,4 @@ Use EventPage instead. This is another representation of Datum. This representation is deprecated. Use DatumPage instead. -.. literalinclude:: ../../../event_model/schemas/bulk_datum.json +.. literalinclude:: ../../src/event_model/schemas/bulk_datum.json diff --git a/docs/developer/explanations/decisions/0002-switched-to-using-a-python-skeleton.rst b/docs/explanations/decisions/0002-switched-to-using-a-python-skeleton.rst similarity index 100% rename from docs/developer/explanations/decisions/0002-switched-to-using-a-python-skeleton.rst rename to docs/explanations/decisions/0002-switched-to-using-a-python-skeleton.rst diff --git a/docs/developer/explanations/decisions/0003-exposed-documents-on-event_model.rst b/docs/explanations/decisions/0003-exposed-documents-on-event_model.rst similarity index 88% rename from docs/developer/explanations/decisions/0003-exposed-documents-on-event_model.rst rename to docs/explanations/decisions/0003-exposed-documents-on-event_model.rst index 73b564e2..d3dfdb4e 100644 --- a/docs/developer/explanations/decisions/0003-exposed-documents-on-event_model.rst +++ b/docs/explanations/decisions/0003-exposed-documents-on-event_model.rst @@ -21,4 +21,4 @@ Accepted Consequences ------------ -Repositories downstream will be able to simplify their imports. \ No newline at end of file +Repositories downstream will be able to simplify their imports. diff --git a/docs/explanations/decisions/0002-switched-to-python-copier-template.md b/docs/explanations/decisions/0004-switched-to-python-copier-template.md similarity index 91% rename from docs/explanations/decisions/0002-switched-to-python-copier-template.md rename to docs/explanations/decisions/0004-switched-to-python-copier-template.md index 66fe5d8b..e2c1d3d8 100644 --- a/docs/explanations/decisions/0002-switched-to-python-copier-template.md +++ b/docs/explanations/decisions/0004-switched-to-python-copier-template.md @@ -1,4 +1,4 @@ -# 2. Adopt python-copier-template for project structure +# 4. Adopt python-copier-template for project structure ## Status diff --git a/docs/user/explanations/event-descriptors.rst b/docs/explanations/event-descriptors.rst similarity index 100% rename from docs/user/explanations/event-descriptors.rst rename to docs/explanations/event-descriptors.rst diff --git a/docs/user/explanations/external.rst b/docs/explanations/external.rst similarity index 100% rename from docs/user/explanations/external.rst rename to docs/explanations/external.rst diff --git a/docs/explanations/schema-generation.rst b/docs/explanations/schema-generation.rst new file mode 100644 index 00000000..8c948503 --- /dev/null +++ b/docs/explanations/schema-generation.rst @@ -0,0 +1,22 @@ +.. _schema_generation: + +***************** +Schema Generation +***************** + +To allow for python typing of documents, we define them as `TypedDict` in `event_model.documents`. + +.. literalinclude:: ../../src/event_model/documents/datum.py + :language: python + +We then use pydantic to convert these python types into the jsonschema in `event_model.schemas`. + +After changing any of the documents it's necessary to regenerate the schemas. This can be done by running: + +.. code-block:: bash + + regenerate-schema + +which is a python environment script in a dev install of event-model. + +This ensures we can have accurate typing across the bluesky codebase, but also doesn't limit us to python for validating documents. diff --git a/docs/user/how-to/use-cases.rst b/docs/how-to/use-cases.rst similarity index 100% rename from docs/user/how-to/use-cases.rst rename to docs/how-to/use-cases.rst diff --git a/docs/images/bluesky-logo-dark.svg b/docs/images/bluesky-logo-dark.svg deleted file mode 100644 index 0ec00500..00000000 --- a/docs/images/bluesky-logo-dark.svg +++ /dev/null @@ -1 +0,0 @@ -Bluesky_Logo_Final \ No newline at end of file diff --git a/docs/images/dls-logo.svg b/docs/images/dls-logo.svg deleted file mode 100644 index 4fcaa861..00000000 --- a/docs/images/dls-logo.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/docs/images/event-model-logo.svg b/docs/images/event-model-logo.svg new file mode 100644 index 00000000..b7190ae3 --- /dev/null +++ b/docs/images/event-model-logo.svg @@ -0,0 +1,258 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/index.md b/docs/index.md index 730b3fdc..0ecb86f4 100644 --- a/docs/index.md +++ b/docs/index.md @@ -3,10 +3,9 @@ html_theme.sidebar_secondary.remove: true --- ```{include} ../README.md -:end-before: