diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 000000000..44de8d36a --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,54 @@ +// For format details, see https://containers.dev/implementors/json_reference/ +{ + "name": "Python 3 Developer Container", + "build": { + "dockerfile": "../Dockerfile", + "target": "build", + // Only upgrade pip, we will install the project below + "args": { + "PIP_OPTIONS": "--upgrade pip" + } + }, + "remoteEnv": { + "DISPLAY": "${localEnv:DISPLAY}" + }, + // Add the URLs of features you want added when the container is built. + "features": { + "ghcr.io/devcontainers/features/common-utils:1": { + "username": "none", + "upgradePackages": false + } + }, + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/venv/bin/python" + }, + "customizations": { + "vscode": { + // Add the IDs of extensions you want installed when the container is created. + "extensions": [ + "ms-python.python", + "tamasfe.even-better-toml", + "redhat.vscode-yaml", + "ryanluker.vscode-coverage-gutters" + ] + } + }, + // Make sure the files we are mapping into the container exist on the host + "initializeCommand": "bash -c 'for i in $HOME/.inputrc; do [ -f $i ] || touch $i; done'", + "runArgs": [ + "--net=host", + "--security-opt=label=type:container_runtime_t" + ], + "mounts": [ + "source=${localEnv:HOME}/.ssh,target=/root/.ssh,type=bind", + "source=${localEnv:HOME}/.inputrc,target=/root/.inputrc,type=bind", + // map in home directory - not strictly necessary but useful + "source=${localEnv:HOME},target=${localEnv:HOME},type=bind,consistency=cached" + ], + // make the workspace folder the same inside and outside of the container + "workspaceMount": "source=${localWorkspaceFolder},target=${localWorkspaceFolder},type=bind", + "workspaceFolder": "${localWorkspaceFolder}", + // After the container is created, install the python project in editable form + "postCreateCommand": "pip install -e '.[dev]'" +} diff --git a/.gitattributes b/.gitattributes deleted file mode 100644 index 41a139337..000000000 --- a/.gitattributes +++ /dev/null @@ -1 +0,0 @@ -*/_version_git.py export-subst diff --git a/.github/actions/install_requirements/action.yml b/.github/actions/install_requirements/action.yml new file mode 100644 index 000000000..20d7a3adf --- /dev/null +++ b/.github/actions/install_requirements/action.yml @@ -0,0 +1,57 @@ +name: Install requirements +description: Run pip install with requirements and upload resulting requirements +inputs: + requirements_file: + description: Name of requirements file to use and upload + required: true + install_options: + description: Parameters to pass to pip install + required: true + python_version: + description: Python version to install + default: "3.x" + +runs: + using: composite + + steps: + - name: Setup python + uses: actions/setup-python@v4 + with: + python-version: ${{ inputs.python_version }} + + - name: Pip install + run: | + touch ${{ inputs.requirements_file }} + # -c uses requirements.txt as constraints, see 'Validate requirements file' + pip install -c ${{ inputs.requirements_file }} ${{ inputs.install_options }} + shell: bash + + - name: Create lockfile + run: | + mkdir -p lockfiles + pip freeze --exclude-editable > lockfiles/${{ inputs.requirements_file }} + # delete the self referencing line and make sure it isn't blank + sed -i'' -e '/file:/d' lockfiles/${{ inputs.requirements_file }} + shell: bash + + - name: Upload lockfiles + uses: actions/upload-artifact@v3 + with: + name: lockfiles + path: lockfiles + + # This eliminates the class of problems where the requirements being given no + # longer match what the packages themselves dictate. E.g. In the rare instance + # where I install some-package which used to depend on vulnerable-dependency + # but now uses good-dependency (despite being nominally the same version) + # pip will install both if given a requirements file with -r + - name: If requirements file exists, check it matches pip installed packages + run: | + if [ -s ${{ inputs.requirements_file }} ]; then + if ! diff -u ${{ inputs.requirements_file }} lockfiles/${{ inputs.requirements_file }}; then + echo "Error: ${{ inputs.requirements_file }} need the above changes to be exhaustive" + exit 1 + fi + fi + shell: bash diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..fb7c6ee67 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,16 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/pages/index.html b/.github/pages/index.html index d0f588233..cc33127d4 100644 --- a/.github/pages/index.html +++ b/.github/pages/index.html @@ -6,4 +6,4 @@ - \ No newline at end of file + diff --git a/.github/pages/make_switcher.py b/.github/pages/make_switcher.py new file mode 100755 index 000000000..ae227ab7f --- /dev/null +++ b/.github/pages/make_switcher.py @@ -0,0 +1,99 @@ +import json +import logging +from argparse import ArgumentParser +from pathlib import Path +from subprocess import CalledProcessError, check_output +from typing import List, Optional + + +def report_output(stdout: bytes, label: str) -> List[str]: + ret = stdout.decode().strip().split("\n") + print(f"{label}: {ret}") + return ret + + +def get_branch_contents(ref: str) -> List[str]: + """Get the list of directories in a branch.""" + stdout = check_output(["git", "ls-tree", "-d", "--name-only", ref]) + return report_output(stdout, "Branch contents") + + +def get_sorted_tags_list() -> List[str]: + """Get a list of sorted tags in descending order from the repository.""" + stdout = check_output(["git", "tag", "-l", "--sort=-v:refname"]) + return report_output(stdout, "Tags list") + + +def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[str]: + """Generate the file containing the list of all GitHub Pages builds.""" + # Get the directories (i.e. builds) from the GitHub Pages branch + try: + builds = set(get_branch_contents(ref)) + except CalledProcessError: + builds = set() + logging.warning(f"Cannot get {ref} contents") + + # Add and remove from the list of builds + if add: + builds.add(add) + if remove: + assert remove in builds, f"Build '{remove}' not in {sorted(builds)}" + builds.remove(remove) + + # Get a sorted list of tags + tags = get_sorted_tags_list() + + # Make the sorted versions list from main branches and tags + versions: List[str] = [] + for version in ["master", "main"] + tags: + if version in builds: + versions.append(version) + builds.remove(version) + + # Add in anything that is left to the bottom + versions += sorted(builds) + print(f"Sorted versions: {versions}") + return versions + + +def write_json(path: Path, repository: str, versions: str): + org, repo_name = repository.split("/") + struct = [ + {"version": version, "url": f"https://{org}.github.io/{repo_name}/{version}/"} + for version in versions + ] + text = json.dumps(struct, indent=2) + print(f"JSON switcher:\n{text}") + path.write_text(text, encoding="utf-8") + + +def main(args=None): + parser = ArgumentParser( + description="Make a versions.txt file from gh-pages directories" + ) + parser.add_argument( + "--add", + help="Add this directory to the list of existing directories", + ) + parser.add_argument( + "--remove", + help="Remove this directory from the list of existing directories", + ) + parser.add_argument( + "repository", + help="The GitHub org and repository name: ORG/REPO", + ) + parser.add_argument( + "output", + type=Path, + help="Path of write switcher.json to", + ) + args = parser.parse_args(args) + + # Write the versions file + versions = get_versions("origin/gh-pages", args.add, args.remove) + write_json(args.output, args.repository, versions) + + +if __name__ == "__main__": + main() diff --git a/.github/workflows/code.yml b/.github/workflows/code.yml index d54b36dfa..0725d39cc 100644 --- a/.github/workflows/code.yml +++ b/.github/workflows/code.yml @@ -3,68 +3,228 @@ name: Code CI on: push: pull_request: +env: + # The target python version, which must match the Dockerfile version + CONTAINER_PYTHON: "3.11" jobs: - build: + lint: + # pull requests are a duplicate of a branch push if within the same repo. + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install python packages + uses: ./.github/actions/install_requirements + with: + requirements_file: requirements-dev-3.x.txt + install_options: -e .[dev] + + - name: Lint + run: tox -e pre-commit,mypy + + test: + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository strategy: fail-fast: false matrix: - os: ["ubuntu-latest"] # can add windows-latest, macos-latest - python: ["3.7", "3.8", "3.9"] - + os: ["ubuntu-latest"] # can add windows-latest, macos-latest + python: ["3.7", "3.8", "3.9", "3.10", "3.11"] + install: ["-e .[dev]"] + # Make one version be non-editable to test both paths of version code include: - # Tag the Python3.7 runner as the one to publish wheels - os: "ubuntu-latest" - python: "3.7" - publish: true + python: "3.10" + install: ".[dev]" - name: ${{ matrix.os }}/${{ matrix.python }} runs-on: ${{ matrix.os }} + env: + # https://github.com/pytest-dev/pytest/issues/2042 + PY_IGNORE_IMPORTMISMATCH: "1" steps: - - name: Checkout Source - uses: actions/checkout@v2 + - name: Checkout + uses: actions/checkout@v4 + with: + # Need this to get version number from last tag + fetch-depth: 0 - - name: Set up Python ${{ matrix.python }} - uses: actions/setup-python@v2 + - name: Install python packages + uses: ./.github/actions/install_requirements with: - python-version: ${{ matrix.python }} + python_version: ${{ matrix.python }} + requirements_file: requirements-test-${{ matrix.os }}-${{ matrix.python }}.txt + install_options: ${{ matrix.install }} - - name: Install Python Dependencies - run: | - pip install pipenv twine build - pipenv install --dev --deploy --python $(python -c 'import sys; print(sys.executable)') && pipenv graph - - - name: Create Sdist and Wheel - if: matrix.publish - # Set SOURCE_DATE_EPOCH from git commit for reproducible build - # https://reproducible-builds.org/ - # Set group writable and umask to do the same to match inside DLS - run: | - chmod -R g+w . - umask 0002 - SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) python -m build --sdist --wheel + - name: List dependency tree + run: pipdeptree - - name: Run Tests - run: pipenv run tests + - name: Run tests + run: tox -e pytest - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v3 with: - name: ${{ matrix.os }}/${{ matrix.python }} + name: ${{ matrix.python }}/${{ matrix.os }} files: cov.xml - - name: Upload Wheel and Sdist as artifacts - if: matrix.publish - uses: actions/upload-artifact@v2 + dist: + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository + runs-on: "ubuntu-latest" + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + # Need this to get version number from last tag + fetch-depth: 0 + + - name: Build sdist and wheel + run: | + export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && \ + pipx run build + + - name: Upload sdist and wheel as artifacts + uses: actions/upload-artifact@v3 with: name: dist - path: dist/* + path: dist + + - name: Check for packaging errors + run: pipx run twine check --strict dist/* + + - name: Install python packages + uses: ./.github/actions/install_requirements + with: + python_version: ${{env.CONTAINER_PYTHON}} + requirements_file: requirements.txt + install_options: dist/*.whl + + - name: Test module --version works using the installed wheel + # If more than one module in src/ replace with module name to test + run: python -m $(ls src | head -1) --version + + container: + needs: [lint, dist, test] + runs-on: ubuntu-latest + + permissions: + contents: read + packages: write + + env: + TEST_TAG: "testing" - - name: Publish Sdist and Wheel to PyPI - # Only once when on a tag - if: matrix.publish && startsWith(github.ref, 'refs/tags') + steps: + - name: Checkout + uses: actions/checkout@v4 + + # image names must be all lower case + - name: Generate image repo name + run: echo IMAGE_REPOSITORY=ghcr.io/$(tr '[:upper:]' '[:lower:]' <<< "${{ github.repository }}") >> $GITHUB_ENV + + - name: Download wheel and lockfiles + uses: actions/download-artifact@v3 + with: + path: artifacts/ + + - name: Log in to GitHub Docker Registry + if: github.event_name != 'pull_request' + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v3 + + - name: Build and export to Docker local cache + uses: docker/build-push-action@v5 + with: + # Note build-args, context, file, and target must all match between this + # step and the later build-push-action, otherwise the second build-push-action + # will attempt to build the image again + build-args: | + PIP_OPTIONS=-r lockfiles/requirements.txt dist/*.whl + context: artifacts/ + file: ./Dockerfile + target: runtime + load: true + tags: ${{ env.TEST_TAG }} + # If you have a long docker build (2+ minutes), uncomment the + # following to turn on caching. For short build times this + # makes it a little slower + #cache-from: type=gha + #cache-to: type=gha,mode=max + + - name: Test cli works in cached runtime image + run: docker run docker.io/library/${{ env.TEST_TAG }} --version + + - name: Create tags for publishing image + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.IMAGE_REPOSITORY }} + tags: | + type=ref,event=tag + type=raw,value=latest, enable=${{ github.ref_type == 'tag' }} + # type=edge,branch=main + # Add line above to generate image for every commit to given branch, + # and uncomment the end of if clause in next step + + - name: Push cached image to container registry + if: github.ref_type == 'tag' # || github.ref_name == 'main' + uses: docker/build-push-action@v5 + # This does not build the image again, it will find the image in the + # Docker cache and publish it + with: + # Note build-args, context, file, and target must all match between this + # step and the previous build-push-action, otherwise this step will + # attempt to build the image again + build-args: | + PIP_OPTIONS=-r lockfiles/requirements.txt dist/*.whl + context: artifacts/ + file: ./Dockerfile + target: runtime + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + + release: + # upload to PyPI and make a release on every tag + needs: [lint, dist, test] + if: ${{ github.event_name == 'push' && github.ref_type == 'tag' }} + runs-on: ubuntu-latest + env: + HAS_PYPI_TOKEN: ${{ secrets.PYPI_TOKEN != '' }} + + steps: + - uses: actions/download-artifact@v3 + + - name: Fixup blank lockfiles + # Github release artifacts can't be blank + run: for f in lockfiles/*; do [ -s $f ] || echo '# No requirements' >> $f; done + + - name: Github Release + # We pin to the SHA, not the tag, for security reasons. + # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions + uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15 + with: + prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} + files: | + dist/* + lockfiles/* + generate_release_notes: true env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.pypi_token }} - run: twine upload dist/* \ No newline at end of file + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Publish to PyPI + if: ${{ env.HAS_PYPI_TOKEN }} + uses: pypa/gh-action-pypi-publish@release/v1 + with: + password: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index e9e4ce3cd..d89a08624 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -5,45 +5,48 @@ on: pull_request: jobs: - build: + docs: + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository runs-on: ubuntu-latest steps: - - name: Checkout Source - uses: actions/checkout@v2 + - name: Avoid git conflicts when tag and branch pushed at same time + if: startsWith(github.ref, 'refs/tags') + run: sleep 60 + + - name: Checkout + uses: actions/checkout@v4 with: - # require history to get back to last tag for version number of branches + # Need this to get version number from last tag fetch-depth: 0 - - name: Set up Python - uses: actions/setup-python@v2 + - name: Install system packages + # Can delete this if you don't use graphviz in your docs + run: sudo apt-get install graphviz + + - name: Install python packages + uses: ./.github/actions/install_requirements with: - python-version: "3.7" + requirements_file: requirements-dev-3.x.txt + install_options: -e .[dev] - - name: Install Python Dependencies - run: | - pip install pipenv - pipenv install --dev --deploy --python $(which python) && pipenv graph + - name: Build docs + run: tox -e docs - - name: Build Docs - run: pipenv run docs - - - uses: rishabhgupta/split-by@v1 - id: split - with: - string: ${{ github.ref }} - split-by: / + - name: Sanitize ref name for docs version + run: echo "DOCS_VERSION=${GITHUB_REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV - name: Move to versioned directory - # e.g. master or 0.1.2 - run: mv build/html ".github/pages/${{ steps.split.outputs._2 }}" + run: mv build/html .github/pages/$DOCS_VERSION + + - name: Write switcher.json + run: python .github/pages/make_switcher.py --add $DOCS_VERSION ${{ github.repository }} .github/pages/switcher.json - name: Publish Docs to gh-pages - # Only master and tags are published - if: "${{ github.repository_owner == 'dls-controls' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags')) }}" + if: github.event_name == 'push' && github.actor != 'dependabot[bot]' # We pin to the SHA, not the tag, for security reasons. - # https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions - uses: peaceiris/actions-gh-pages@bbdfb200618d235585ad98e965f4aafc39b4c501 # v3.7.3 + # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions + uses: peaceiris/actions-gh-pages@64b46b4226a4a12da2239ba3ea5aa73e3163c75b # v3.9.1 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: .github/pages diff --git a/.github/workflows/docs_clean.yml b/.github/workflows/docs_clean.yml new file mode 100644 index 000000000..e324640e7 --- /dev/null +++ b/.github/workflows/docs_clean.yml @@ -0,0 +1,43 @@ +name: Docs Cleanup CI + +# delete branch documentation when a branch is deleted +# also allow manually deleting a documentation version +on: + delete: + workflow_dispatch: + inputs: + version: + description: "documentation version to DELETE" + required: true + type: string + +jobs: + remove: + if: github.event.ref_type == 'branch' || github.event_name == 'workflow_dispatch' + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + ref: gh-pages + + - name: removing documentation for branch ${{ github.event.ref }} + if: ${{ github.event_name != 'workflow_dispatch' }} + run: echo "REF_NAME=${{ github.event.ref }}" >> $GITHUB_ENV + + - name: manually removing documentation version ${{ github.event.inputs.version }} + if: ${{ github.event_name == 'workflow_dispatch' }} + run: echo "REF_NAME=${{ github.event.inputs.version }}" >> $GITHUB_ENV + + - name: Sanitize ref name for docs version + run: echo "DOCS_VERSION=${REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV + + - name: update index and push changes + run: | + rm -r $DOCS_VERSION + python make_switcher.py --remove $DOCS_VERSION ${{ github.repository }} switcher.json + git config --global user.name 'GitHub Actions Docs Cleanup CI' + git config --global user.email 'GithubActionsCleanup@noreply.github.com' + git commit -am "Removing redundant docs version $DOCS_VERSION" + git push diff --git a/.github/workflows/linkcheck.yml b/.github/workflows/linkcheck.yml new file mode 100644 index 000000000..d2a80410e --- /dev/null +++ b/.github/workflows/linkcheck.yml @@ -0,0 +1,27 @@ +name: Link Check + +on: + workflow_dispatch: + schedule: + # Run weekly to check URL links still resolve + - cron: "0 8 * * WED" + +jobs: + docs: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install python packages + uses: ./.github/actions/install_requirements + with: + requirements_file: requirements-dev-3.x.txt + install_options: -e .[dev] + + - name: Check links + run: tox -e docs build -- -b linkcheck + + - name: Keepalive Workflow + uses: gautamkrishnar/keepalive-workflow@v1 diff --git a/.gitignore b/.gitignore index 0ce69d995..2593ec752 100644 --- a/.gitignore +++ b/.gitignore @@ -22,6 +22,7 @@ var/ *.egg-info/ .installed.cfg *.egg +**/_version.py # PyInstaller # Usually these files are written by a python script from a template @@ -58,8 +59,12 @@ docs/_build/ # PyBuilder target/ -# DLS build dir and virtual environment -/prefix/ -/venv/ -/lightweight-venv/ -/installed.files +# likely venv names +.venv* +venv* + +# further build artifacts +lockfiles/ + +# ruff cache +.ruff_cache/ diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml deleted file mode 100644 index 1efd50249..000000000 --- a/.gitlab-ci.yml +++ /dev/null @@ -1,4 +0,0 @@ -include: - - project: 'controls/reports/ci_templates' - ref: master - file: 'python3/dls_py3_template.yml' diff --git a/.gitremotes b/.gitremotes deleted file mode 100644 index 1871dce5a..000000000 --- a/.gitremotes +++ /dev/null @@ -1 +0,0 @@ -github git@github.com:PandABlocks/PandABlocks-client.git diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..5bc9f001c --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,23 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-added-large-files + - id: check-yaml + - id: check-merge-conflict + + - repo: local + hooks: + - id: black + name: Run black + stages: [commit] + language: system + entry: black --check --diff + types: [python] + + - id: ruff + name: Run ruff + stages: [commit] + language: system + entry: ruff + types: [python] diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 734f215e6..e3b582fde 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -1,7 +1,10 @@ { "recommendations": [ - "ms-python.vscode-pylance", + "ms-vscode-remote.remote-containers", "ms-python.python", - "ryanluker.vscode-coverage-gutters" + "tamasfe.even-better-toml", + "redhat.vscode-yaml", + "ryanluker.vscode-coverage-gutters", + "charliermarsh.Ruff" ] } \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index 47f516405..49b3789f5 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,12 +1,14 @@ { - "editor.defaultFormatter": "ms-python.python", "python.linting.pylintEnabled": false, - "python.linting.flake8Enabled": true, + "python.linting.flake8Enabled": false, "python.linting.mypyEnabled": true, "python.linting.enabled": true, - "python.testing.pytestArgs": [], + "python.testing.pytestArgs": [ + "--cov=python3_pip_skeleton", + "--cov-report", + "xml:cov.xml" + ], "python.testing.unittestEnabled": false, - "python.testing.nosetestsEnabled": false, "python.testing.pytestEnabled": true, "python.formatting.provider": "black", "python.languageServer": "Pylance", @@ -18,4 +20,10 @@ "editor.defaultFormatter": "vscode.json-language-features" }, "python.analysis.typeCheckingMode": "off", + "[python]": { + "editor.codeActionsOnSave": { + "source.fixAll.ruff": false, + "source.organizeImports.ruff": true + } + } } \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json index ff78a11b7..946e69d4b 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -5,8 +5,8 @@ "tasks": [ { "type": "shell", - "label": "Tests with coverage", - "command": "pipenv run tests", + "label": "Tests, lint and docs", + "command": "tox -p", "options": { "cwd": "${workspaceRoot}" }, diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..d712d3b5e --- /dev/null +++ b/Dockerfile @@ -0,0 +1,37 @@ +# This file is for use as a devcontainer and a runtime container +# +# The devcontainer should use the build target and run as root with podman +# or docker with user namespaces. +# +FROM python:3.11 as build + +ARG PIP_OPTIONS=. + +# Add any system dependencies for the developer/build environment here e.g. +# RUN apt-get update && apt-get upgrade -y && \ +# apt-get install -y --no-install-recommends \ +# desired-packages \ +# && rm -rf /var/lib/apt/lists/* + +# set up a virtual environment and put it in PATH +RUN python -m venv /venv +ENV PATH=/venv/bin:$PATH + +# Copy any required context for the pip install over +COPY . /context +WORKDIR /context + +# install python package into /venv +RUN pip install ${PIP_OPTIONS} + +FROM python:3.11-slim as runtime + +# Add apt-get system dependecies for runtime here if needed + +# copy the virtual environment from the build stage and put it in PATH +COPY --from=build /venv/ /venv/ +ENV PATH=/venv/bin:$PATH + +# change this entrypoint if it is not the same as the repo +ENTRYPOINT ["pandablocks"] +CMD ["--version"] diff --git a/Pipfile b/Pipfile deleted file mode 100644 index c77deb445..000000000 --- a/Pipfile +++ /dev/null @@ -1,38 +0,0 @@ -[[source]] -name = "pypi" -url = "https://pypi.org/simple" -verify_ssl = true - -[dev-packages] -# Pinning black stops us having to allow pre-releases globally -black = "*" -# Pins to make lockfile usable on multiple Python versions and platforms -mypy = "*" -atomicwrites = "*" -typed-ast = "*" -# Test and docs dependencies -pytest-cov = "*" -pytest-mypy = "*" -flake8 = "==4.0.1" # https://github.com/tholo/pytest-flake8/issues/87 - will stop being an issue when we move to a new skeleton -pytest-flake8 = "*" -pytest-black = "*" -mock = "*" -types-mock = "*" -pytest-asyncio = "*" -flake8-isort = "*" -isort = ">5.0" -sphinx = "==4.3.2" -sphinx-rtd-theme = "*" - -[packages] -# All other package requirements from setup.cfg -pandablocks = {editable = true, extras = ["hdf5"], path = "."} -# Pins to make lockfile usable on multiple Python versions and platforms -numpy = "*" -typing-extensions = "*" - -[scripts] -tests = "python -m pytest" -docs = "sphinx-build -EWT --keep-going docs build/html" -# Delete any files that git ignore hides from us -gitclean = "git clean -fdX" diff --git a/Pipfile.lock b/Pipfile.lock deleted file mode 100644 index 1931d587f..000000000 --- a/Pipfile.lock +++ /dev/null @@ -1,866 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "54f1fb92311a238c4813c2f415a8b69a3a4c5e070442535412f5e8d8a168f217" - }, - "pipfile-spec": 6, - "requires": {}, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "click": { - "hashes": [ - "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", - "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" - ], - "version": "==8.1.3" - }, - "cycler": { - "hashes": [ - "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d", - "sha256:cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8" - ], - "version": "==0.10.0" - }, - "fonttools": { - "hashes": [ - "sha256:1933415e0fbdf068815cb1baaa1f159e17830215f7e8624e5731122761627557", - "sha256:2b18a172120e32128a80efee04cff487d5d140fe7d817deb648b2eee023a40e4" - ], - "version": "==4.29.1" - }, - "h5py": { - "hashes": [ - "sha256:03d64fb86bb86b978928bad923b64419a23e836499ec6363e305ad28afd9d287", - "sha256:04e2e1e2fc51b8873e972a08d2f89625ef999b1f2d276199011af57bb9fc7851", - "sha256:0798a9c0ff45f17d0192e4d7114d734cac9f8b2b2c76dd1d923c4d0923f27bb6", - "sha256:0a047fddbe6951bce40e9cde63373c838a978c5e05a011a682db9ba6334b8e85", - "sha256:0d8de8cb619fc597da7cf8cdcbf3b7ff8c5f6db836568afc7dc16d21f59b2b49", - "sha256:1fcb11a2dc8eb7ddcae08afd8fae02ba10467753a857fa07a404d700a93f3d53", - "sha256:3fcf37884383c5da64846ab510190720027dca0768def34dd8dcb659dbe5cbf3", - "sha256:43fed4d13743cf02798a9a03a360a88e589d81285e72b83f47d37bb64ed44881", - "sha256:63beb8b7b47d0896c50de6efb9a1eaa81dbe211f3767e7dd7db159cea51ba37a", - "sha256:6776d896fb90c5938de8acb925e057e2f9f28755f67ec3edcbc8344832616c38", - "sha256:9e2ad2aa000f5b1e73b5dfe22f358ca46bf1a2b6ca394d9659874d7fc251731a", - "sha256:9e7535df5ee3dc3e5d1f408fdfc0b33b46bc9b34db82743c82cd674d8239b9ad", - "sha256:a9351d729ea754db36d175098361b920573fdad334125f86ac1dd3a083355e20", - "sha256:c038399ce09a58ff8d89ec3e62f00aa7cb82d14f34e24735b920e2a811a3a426", - "sha256:d77af42cb751ad6cc44f11bae73075a07429a5cf2094dfde2b1e716e059b3911", - "sha256:e5b7820b75f9519499d76cc708e27242ccfdd9dfb511d6deb98701961d0445aa", - "sha256:ed43e2cc4f511756fd664fb45d6b66c3cbed4e3bd0f70e29c37809b2ae013c44", - "sha256:f084bbe816907dfe59006756f8f2d16d352faff2d107f4ffeb1d8de126fc5dc7", - "sha256:f514b24cacdd983e61f8d371edac8c1b780c279d0acb8485639e97339c866073", - "sha256:f73307c876af49aa869ec5df1818e9bb0bdcfcf8a5ba773cc45a4fba5a286a5c" - ], - "version": "==3.7.0" - }, - "importlib-metadata": { - "hashes": [ - "sha256:175f4ee440a0317f6e8d81b7f8d4869f93316170a65ad2b007d2929186c8052c", - "sha256:e0bc84ff355328a4adfc5240c4f211e0ab386f80aa640d1b11f0618a1d282094" - ], - "markers": "python_version < '3.8'", - "version": "==4.11.1" - }, - "kiwisolver": { - "hashes": [ - "sha256:0cd53f403202159b44528498de18f9285b04482bab2a6fc3f5dd8dbb9352e30d", - "sha256:1e1bc12fb773a7b2ffdeb8380609f4f8064777877b2225dec3da711b421fda31", - "sha256:225e2e18f271e0ed8157d7f4518ffbf99b9450fca398d561eb5c4a87d0986dd9", - "sha256:232c9e11fd7ac3a470d65cd67e4359eee155ec57e822e5220322d7b2ac84fbf0", - "sha256:31dfd2ac56edc0ff9ac295193eeaea1c0c923c0355bf948fbd99ed6018010b72", - "sha256:33449715e0101e4d34f64990352bce4095c8bf13bed1b390773fc0a7295967b3", - "sha256:401a2e9afa8588589775fe34fc22d918ae839aaaf0c0e96441c0fdbce6d8ebe6", - "sha256:44a62e24d9b01ba94ae7a4a6c3fb215dc4af1dde817e7498d901e229aaf50e4e", - "sha256:50af681a36b2a1dee1d3c169ade9fdc59207d3c31e522519181e12f1b3ba7000", - "sha256:563c649cfdef27d081c84e72a03b48ea9408c16657500c312575ae9d9f7bc1c3", - "sha256:5989db3b3b34b76c09253deeaf7fbc2707616f130e166996606c284395da3f18", - "sha256:5a7a7dbff17e66fac9142ae2ecafb719393aaee6a3768c9de2fd425c63b53e21", - "sha256:5c3e6455341008a054cccee8c5d24481bcfe1acdbc9add30aa95798e95c65621", - "sha256:5f6ccd3dd0b9739edcf407514016108e2280769c73a85b9e59aa390046dbf08b", - "sha256:72c99e39d005b793fb7d3d4e660aed6b6281b502e8c1eaf8ee8346023c8e03bc", - "sha256:78751b33595f7f9511952e7e60ce858c6d64db2e062afb325985ddbd34b5c131", - "sha256:834ee27348c4aefc20b479335fd422a2c69db55f7d9ab61721ac8cd83eb78882", - "sha256:8be8d84b7d4f2ba4ffff3665bcd0211318aa632395a1a41553250484a871d454", - "sha256:950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248", - "sha256:a357fd4f15ee49b4a98b44ec23a34a95f1e00292a139d6015c11f55774ef10de", - "sha256:a53d27d0c2a0ebd07e395e56a1fbdf75ffedc4a05943daf472af163413ce9598", - "sha256:acef3d59d47dd85ecf909c359d0fd2c81ed33bdff70216d3956b463e12c38a54", - "sha256:b38694dcdac990a743aa654037ff1188c7a9801ac3ccc548d3341014bc5ca278", - "sha256:b9edd0110a77fc321ab090aaa1cfcaba1d8499850a12848b81be2222eab648f6", - "sha256:c08e95114951dc2090c4a630c2385bef681cacf12636fb0241accdc6b303fd81", - "sha256:c5518d51a0735b1e6cee1fdce66359f8d2b59c3ca85dc2b0813a8aa86818a030", - "sha256:c8fd0f1ae9d92b42854b2979024d7597685ce4ada367172ed7c09edf2cef9cb8", - "sha256:ca3820eb7f7faf7f0aa88de0e54681bddcb46e485beb844fcecbcd1c8bd01689", - "sha256:cf8b574c7b9aa060c62116d4181f3a1a4e821b2ec5cbfe3775809474113748d4", - "sha256:d3155d828dec1d43283bd24d3d3e0d9c7c350cdfcc0bd06c0ad1209c1bbc36d0", - "sha256:f8d6f8db88049a699817fd9178782867bf22283e3813064302ac59f61d95be05", - "sha256:fd34fbbfbc40628200730bc1febe30631347103fc8d3d4fa012c21ab9c11eca9" - ], - "version": "==1.3.1" - }, - "matplotlib": { - "hashes": [ - "sha256:0abf8b51cc6d3ba34d1b15b26e329f23879848a0cf1216954c1f432ffc7e1af7", - "sha256:0e020a42f3338823a393dd2f80e39a2c07b9f941dfe2c778eb104eeb33d60bb5", - "sha256:13930a0c9bec0fd25f43c448b047a21af1353328b946f044a8fc3be077c6b1a8", - "sha256:153a0cf6a6ff4f406a0600d2034710c49988bacc6313d193b32716f98a697580", - "sha256:18f6e52386300db5cc4d1e9019ad9da2e80658bab018834d963ebb0aa5355095", - "sha256:2089b9014792dcc87bb1d620cde847913338abf7d957ef05587382b0cb76d44e", - "sha256:2eea16883aa7724c95eea0eb473ab585c6cf66f0e28f7f13e63deb38f4fd6d0f", - "sha256:38892a254420d95594285077276162a5e9e9c30b6da08bdc2a4d53331ad9a6fa", - "sha256:4b018ea6f26424a0852eb60eb406420d9f0d34f65736ea7bbfbb104946a66d86", - "sha256:65f877882b7ddede7090c7d87be27a0f4720fe7fc6fddd4409c06e1aa0f1ae8d", - "sha256:666d717a4798eb9c5d3ae83fe80c7bc6ed696b93e879cb01cb24a74155c73612", - "sha256:66b172610db0ececebebb09d146f54205f87c7b841454e408fba854764f91bdd", - "sha256:6db02c5605f063b67780f4d5753476b6a4944343284aa4e93c5e8ff6e9ec7f76", - "sha256:6e0e6b2111165522ad336705499b1f968c34a9e84d05d498ee5af0b5697d1efe", - "sha256:71a1851111f23f82fc43d2b6b2bfdd3f760579a664ebc939576fe21cc6133d01", - "sha256:7a7cb59ebd63a8ac4542ec1c61dd08724f82ec3aa7bb6b4b9e212d43c611ce3d", - "sha256:7baf23adb698d8c6ca7339c9dde00931bc47b2dd82fa912827fef9f93db77f5e", - "sha256:970aa97297537540369d05fe0fd1bb952593f9ab696c9b427c06990a83e2418b", - "sha256:9bac8eb1eccef540d7f4e844b6313d9f7722efd48c07e1b4bfec1056132127fd", - "sha256:a07ff2565da72a7b384a9e000b15b6b8270d81370af8a3531a16f6fbcee023cc", - "sha256:a0dcaf5648cecddc328e81a0421821a1f65a1d517b20746c94a1f0f5c36fb51a", - "sha256:a0ea10faa3bab0714d3a19c7e0921279a68d57552414d6eceaea99f97d7735db", - "sha256:a5b62d1805cc83d755972033c05cea78a1e177a159fc84da5c9c4ab6303ccbd9", - "sha256:a6cef5b31e27c31253c0f852b629a38d550ae66ec6850129c49d872f9ee428cb", - "sha256:a7bf8b05c214d32fb7ca7c001fde70b9b426378e897b0adbf77b85ea3569d56a", - "sha256:ac17a7e7b06ee426a4989f0b7f24ab1a592e39cdf56353a90f4e998bc0bf44d6", - "sha256:b3b687e905da32e5f2e5f16efa713f5d1fcd9fb8b8c697895de35c91fedeb086", - "sha256:b5e439d9e55d645f2a4dca63e2f66d68fe974c405053b132d61c7e98c25dfeb2", - "sha256:ba107add08e12600b072cf3c47aaa1ab85dd4d3c48107a5d3377d1bf80f8b235", - "sha256:d092b7ba63182d2dd427904e3eb58dd5c46ec67c5968de14a4b5007010a3a4cc", - "sha256:dc8c5c23e7056e126275dbf29efba817b3d94196690930d0968873ac3a94ab82", - "sha256:df0042cab69f4d246f4cb8fc297770ac4ae6ec2983f61836b04a117722037dcd", - "sha256:ee3d9ff16d749a9aa521bd7d86f0dbf256b2d2ac8ce31b19e4d2c86d2f2ff0b6", - "sha256:f23fbf70d2e80f4e03a83fc1206a8306d9bc50482fee4239f10676ce7e470c83", - "sha256:ff5d9fe518ad2de14ce82ab906b6ab5c2b0c7f4f984400ff8a7a905daa580a0a" - ], - "version": "==3.5.0" - }, - "numpy": { - "hashes": [ - "sha256:1dbe1c91269f880e364526649a52eff93ac30035507ae980d2fed33aaee633ac", - "sha256:357768c2e4451ac241465157a3e929b265dfac85d9214074985b1786244f2ef3", - "sha256:3820724272f9913b597ccd13a467cc492a0da6b05df26ea09e78b171a0bb9da6", - "sha256:4391bd07606be175aafd267ef9bea87cf1b8210c787666ce82073b05f202add1", - "sha256:4aa48afdce4660b0076a00d80afa54e8a97cd49f457d68a4342d188a09451c1a", - "sha256:58459d3bad03343ac4b1b42ed14d571b8743dc80ccbf27444f266729df1d6f5b", - "sha256:5c3c8def4230e1b959671eb959083661b4a0d2e9af93ee339c7dada6759a9470", - "sha256:5f30427731561ce75d7048ac254dbe47a2ba576229250fb60f0fb74db96501a1", - "sha256:643843bcc1c50526b3a71cd2ee561cf0d8773f062c8cbaf9ffac9fdf573f83ab", - "sha256:67c261d6c0a9981820c3a149d255a76918278a6b03b6a036800359aba1256d46", - "sha256:67f21981ba2f9d7ba9ade60c9e8cbaa8cf8e9ae51673934480e45cf55e953673", - "sha256:6aaf96c7f8cebc220cdfc03f1d5a31952f027dda050e5a703a0d1c396075e3e7", - "sha256:7c4068a8c44014b2d55f3c3f574c376b2494ca9cc73d2f1bd692382b6dffe3db", - "sha256:7c7e5fa88d9ff656e067876e4736379cc962d185d5cd808014a8a928d529ef4e", - "sha256:7f5ae4f304257569ef3b948810816bc87c9146e8c446053539947eedeaa32786", - "sha256:82691fda7c3f77c90e62da69ae60b5ac08e87e775b09813559f8901a88266552", - "sha256:8737609c3bbdd48e380d463134a35ffad3b22dc56295eff6f79fd85bd0eeeb25", - "sha256:9f411b2c3f3d76bba0865b35a425157c5dcf54937f82bbeb3d3c180789dd66a6", - "sha256:a6be4cb0ef3b8c9250c19cc122267263093eee7edd4e3fa75395dfda8c17a8e2", - "sha256:bcb238c9c96c00d3085b264e5c1a1207672577b93fa666c3b14a45240b14123a", - "sha256:bf2ec4b75d0e9356edea834d1de42b31fe11f726a81dfb2c2112bc1eaa508fcf", - "sha256:d136337ae3cc69aa5e447e78d8e1514be8c3ec9b54264e680cf0b4bd9011574f", - "sha256:d4bf4d43077db55589ffc9009c0ba0a94fa4908b9586d6ccce2e0b164c86303c", - "sha256:d6a96eef20f639e6a97d23e57dd0c1b1069a7b4fd7027482a4c5c451cd7732f4", - "sha256:d9caa9d5e682102453d96a0ee10c7241b72859b01a941a397fd965f23b3e016b", - "sha256:dd1c8f6bd65d07d3810b90d02eba7997e32abbdf1277a481d698969e921a3be0", - "sha256:e31f0bb5928b793169b87e3d1e070f2342b22d5245c755e2b81caa29756246c3", - "sha256:ecb55251139706669fdec2ff073c98ef8e9a84473e51e716211b41aa0f18e656", - "sha256:ee5ec40fdd06d62fe5d4084bef4fd50fd4bb6bfd2bf519365f569dc470163ab0", - "sha256:f17e562de9edf691a42ddb1eb4a5541c20dd3f9e65b09ded2beb0799c0cf29bb", - "sha256:fdffbfb6832cd0b300995a2b08b8f6fa9f6e856d562800fea9182316d99c4e8e" - ], - "index": "pypi", - "version": "==1.21.6" - }, - "packaging": { - "hashes": [ - "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb", - "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522" - ], - "version": "==21.3" - }, - "pandablocks": { - "editable": true, - "extras": [ - "hdf5" - ], - "path": "." - }, - "pillow": { - "hashes": [ - "sha256:01425106e4e8cee195a411f729cff2a7d61813b0b11737c12bd5991f5f14bcd5", - "sha256:031a6c88c77d08aab84fecc05c3cde8414cd6f8406f4d2b16fed1e97634cc8a4", - "sha256:083781abd261bdabf090ad07bb69f8f5599943ddb539d64497ed021b2a67e5a9", - "sha256:0d19d70ee7c2ba97631bae1e7d4725cdb2ecf238178096e8c82ee481e189168a", - "sha256:0e04d61f0064b545b989126197930807c86bcbd4534d39168f4aa5fda39bb8f9", - "sha256:12e5e7471f9b637762453da74e390e56cc43e486a88289995c1f4c1dc0bfe727", - "sha256:22fd0f42ad15dfdde6c581347eaa4adb9a6fc4b865f90b23378aa7914895e120", - "sha256:238c197fc275b475e87c1453b05b467d2d02c2915fdfdd4af126145ff2e4610c", - "sha256:3b570f84a6161cf8865c4e08adf629441f56e32f180f7aa4ccbd2e0a5a02cba2", - "sha256:463822e2f0d81459e113372a168f2ff59723e78528f91f0bd25680ac185cf797", - "sha256:4d98abdd6b1e3bf1a1cbb14c3895226816e666749ac040c4e2554231068c639b", - "sha256:5afe6b237a0b81bd54b53f835a153770802f164c5570bab5e005aad693dab87f", - "sha256:5b70110acb39f3aff6b74cf09bb4169b167e2660dabc304c1e25b6555fa781ef", - "sha256:5cbf3e3b1014dddc45496e8cf38b9f099c95a326275885199f427825c6522232", - "sha256:624b977355cde8b065f6d51b98497d6cd5fbdd4f36405f7a8790e3376125e2bb", - "sha256:63728564c1410d99e6d1ae8e3b810fe012bc440952168af0a2877e8ff5ab96b9", - "sha256:66cc56579fd91f517290ab02c51e3a80f581aba45fd924fcdee01fa06e635812", - "sha256:6c32cc3145928c4305d142ebec682419a6c0a8ce9e33db900027ddca1ec39178", - "sha256:8bb1e155a74e1bfbacd84555ea62fa21c58e0b4e7e6b20e4447b8d07990ac78b", - "sha256:95d5ef984eff897850f3a83883363da64aae1000e79cb3c321915468e8c6add5", - "sha256:a013cbe25d20c2e0c4e85a9daf438f85121a4d0344ddc76e33fd7e3965d9af4b", - "sha256:a787ab10d7bb5494e5f76536ac460741788f1fbce851068d73a87ca7c35fc3e1", - "sha256:a7d5e9fad90eff8f6f6106d3b98b553a88b6f976e51fce287192a5d2d5363713", - "sha256:aac00e4bc94d1b7813fe882c28990c1bc2f9d0e1aa765a5f2b516e8a6a16a9e4", - "sha256:b91c36492a4bbb1ee855b7d16fe51379e5f96b85692dc8210831fbb24c43e484", - "sha256:c03c07ed32c5324939b19e36ae5f75c660c81461e312a41aea30acdd46f93a7c", - "sha256:c5236606e8570542ed424849f7852a0ff0bce2c4c8d0ba05cc202a5a9c97dee9", - "sha256:c6b39294464b03457f9064e98c124e09008b35a62e3189d3513e5148611c9388", - "sha256:cb7a09e173903541fa888ba010c345893cd9fc1b5891aaf060f6ca77b6a3722d", - "sha256:d68cb92c408261f806b15923834203f024110a2e2872ecb0bd2a110f89d3c602", - "sha256:dc38f57d8f20f06dd7c3161c59ca2c86893632623f33a42d592f097b00f720a9", - "sha256:e98eca29a05913e82177b3ba3d198b1728e164869c613d76d0de4bde6768a50e", - "sha256:f217c3954ce5fd88303fc0c317af55d5e0204106d86dea17eb8205700d47dec2" - ], - "version": "==8.2.0" - }, - "pyparsing": { - "hashes": [ - "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb", - "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc" - ], - "version": "==3.0.9" - }, - "python-dateutil": { - "hashes": [ - "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", - "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" - ], - "version": "==2.8.2" - }, - "setuptools-scm": { - "hashes": [ - "sha256:031e13af771d6f892b941adb6ea04545bbf91ebc5ce68c78aaf3fff6e1fb4844", - "sha256:7930f720905e03ccd1e1d821db521bff7ec2ac9cf0ceb6552dd73d24a45d3b02" - ], - "version": "==7.0.5" - }, - "six": { - "hashes": [ - "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", - "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" - ], - "version": "==1.16.0" - }, - "tomli": { - "hashes": [ - "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", - "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" - ], - "version": "==2.0.1" - }, - "typing-extensions": { - "hashes": [ - "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa", - "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e" - ], - "index": "pypi", - "version": "==4.4.0" - }, - "zipp": { - "hashes": [ - "sha256:3a7af91c3db40ec72dd9d154ae18e008c69efe8ca88dde4f9a731bb82fe2f9eb", - "sha256:972cfa31bc2fedd3fa838a51e9bc7e64b7fb725a8c00e7431554311f180e9980" - ], - "version": "==3.9.0" - } - }, - "develop": { - "alabaster": { - "hashes": [ - "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359", - "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02" - ], - "version": "==0.7.12" - }, - "atomicwrites": { - "hashes": [ - "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197", - "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a" - ], - "index": "pypi", - "version": "==1.4.0" - }, - "attrs": { - "hashes": [ - "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6", - "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c" - ], - "version": "==22.1.0" - }, - "babel": { - "hashes": [ - "sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe", - "sha256:5ef4b3226b0180dedded4229651c8b0e1a3a6a2837d45a073272f313e4cf97f6" - ], - "version": "==2.11.0" - }, - "black": { - "hashes": [ - "sha256:0a12e4e1353819af41df998b02c6742643cfef58282915f781d0e4dd7a200411", - "sha256:0ad827325a3a634bae88ae7747db1a395d5ee02cf05d9aa7a9bd77dfb10e940c", - "sha256:32a4b17f644fc288c6ee2bafdf5e3b045f4eff84693ac069d87b1a347d861497", - "sha256:3b2c25f8dea5e8444bdc6788a2f543e1fb01494e144480bc17f806178378005e", - "sha256:4a098a69a02596e1f2a58a2a1c8d5a05d5a74461af552b371e82f9fa4ada8342", - "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27", - "sha256:53198e28a1fb865e9fe97f88220da2e44df6da82b18833b588b1883b16bb5d41", - "sha256:5594efbdc35426e35a7defa1ea1a1cb97c7dbd34c0e49af7fb593a36bd45edab", - "sha256:5b879eb439094751185d1cfdca43023bc6786bd3c60372462b6f051efa6281a5", - "sha256:78dd85caaab7c3153054756b9fe8c611efa63d9e7aecfa33e533060cb14b6d16", - "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e", - "sha256:8ce13ffed7e66dda0da3e0b2eb1bdfc83f5812f66e09aca2b0978593ed636b6c", - "sha256:a05da0430bd5ced89176db098567973be52ce175a55677436a271102d7eaa3fe", - "sha256:a983526af1bea1e4cf6768e649990f28ee4f4137266921c2c3cee8116ae42ec3", - "sha256:bc4d4123830a2d190e9cc42a2e43570f82ace35c3aeb26a512a2102bce5af7ec", - "sha256:c3a73f66b6d5ba7288cd5d6dad9b4c9b43f4e8a4b789a94bf5abfb878c663eb3", - "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd", - "sha256:cea1b2542d4e2c02c332e83150e41e3ca80dc0fb8de20df3c5e98e242156222c", - "sha256:d2c21d439b2baf7aa80d6dd4e3659259be64c6f49dfd0f32091063db0e006db4", - "sha256:d839150f61d09e7217f52917259831fe2b689f5c8e5e32611736351b89bb2a90", - "sha256:dd82842bb272297503cbec1a2600b6bfb338dae017186f8f215c8958f8acf869", - "sha256:e8166b7bfe5dcb56d325385bd1d1e0f635f24aae14b3ae437102dedc0c186747", - "sha256:e981e20ec152dfb3e77418fb616077937378b322d7b26aa1ff87717fb18b4875" - ], - "index": "pypi", - "version": "==22.8.0" - }, - "certifi": { - "hashes": [ - "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3", - "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18" - ], - "version": "==2022.12.7" - }, - "charset-normalizer": { - "hashes": [ - "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845", - "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f" - ], - "version": "==2.1.1" - }, - "click": { - "hashes": [ - "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", - "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" - ], - "version": "==8.1.3" - }, - "coverage": { - "extras": [ - "toml" - ], - "hashes": [ - "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79", - "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a", - "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f", - "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a", - "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa", - "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398", - "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba", - "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d", - "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf", - "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b", - "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518", - "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d", - "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795", - "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2", - "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e", - "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32", - "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745", - "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b", - "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e", - "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d", - "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f", - "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660", - "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62", - "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6", - "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04", - "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c", - "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5", - "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef", - "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc", - "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae", - "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578", - "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466", - "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4", - "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91", - "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0", - "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4", - "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b", - "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe", - "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b", - "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75", - "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b", - "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c", - "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72", - "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b", - "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f", - "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e", - "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53", - "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3", - "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84", - "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987" - ], - "version": "==6.5.0" - }, - "docutils": { - "hashes": [ - "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125", - "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61" - ], - "version": "==0.17.1" - }, - "filelock": { - "hashes": [ - "sha256:7565f628ea56bfcd8e54e42bdc55da899c85c1abfe1b5bcfd147e9188cebb3b2", - "sha256:8df285554452285f79c035efb0c861eb33a4bcfa5b7a137016e32e6a90f9792c" - ], - "version": "==3.8.2" - }, - "flake8": { - "hashes": [ - "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d", - "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d" - ], - "index": "pypi", - "version": "==4.0.1" - }, - "flake8-isort": { - "hashes": [ - "sha256:0951398c343c67f4933407adbbfb495d4df7c038650c5d05753a006efcfeb390", - "sha256:8c4ab431d87780d0c8336e9614e50ef11201bc848ef64ca017532dec39d4bf49" - ], - "index": "pypi", - "version": "==5.0.3" - }, - "idna": { - "hashes": [ - "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", - "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" - ], - "version": "==3.4" - }, - "imagesize": { - "hashes": [ - "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", - "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a" - ], - "version": "==1.4.1" - }, - "importlib-metadata": { - "hashes": [ - "sha256:175f4ee440a0317f6e8d81b7f8d4869f93316170a65ad2b007d2929186c8052c", - "sha256:e0bc84ff355328a4adfc5240c4f211e0ab386f80aa640d1b11f0618a1d282094" - ], - "markers": "python_version < '3.8'", - "version": "==4.11.1" - }, - "iniconfig": { - "hashes": [ - "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", - "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32" - ], - "version": "==1.1.1" - }, - "isort": { - "hashes": [ - "sha256:dd8bbc5c0990f2a095d754e50360915f73b4c26fc82733eb5bfc6b48396af4d2", - "sha256:e486966fba83f25b8045f8dd7455b0a0d1e4de481e1d7ce4669902d9fb85e622" - ], - "index": "pypi", - "version": "==5.11.2" - }, - "jinja2": { - "hashes": [ - "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852", - "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61" - ], - "version": "==3.1.2" - }, - "markupsafe": { - "hashes": [ - "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003", - "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88", - "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5", - "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7", - "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a", - "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603", - "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1", - "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135", - "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247", - "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6", - "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601", - "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77", - "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02", - "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e", - "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63", - "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f", - "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980", - "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b", - "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812", - "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff", - "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96", - "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1", - "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925", - "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a", - "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6", - "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e", - "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f", - "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4", - "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f", - "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3", - "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c", - "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a", - "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417", - "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a", - "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a", - "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37", - "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452", - "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933", - "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a", - "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7" - ], - "version": "==2.1.1" - }, - "mccabe": { - "hashes": [ - "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", - "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" - ], - "version": "==0.6.1" - }, - "mock": { - "hashes": [ - "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62", - "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc" - ], - "index": "pypi", - "version": "==4.0.3" - }, - "mypy": { - "hashes": [ - "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d", - "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6", - "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf", - "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f", - "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813", - "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33", - "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad", - "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05", - "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297", - "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06", - "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd", - "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243", - "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305", - "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476", - "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711", - "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70", - "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5", - "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461", - "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab", - "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c", - "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d", - "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135", - "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93", - "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648", - "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a", - "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb", - "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3", - "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372", - "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb", - "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef" - ], - "index": "pypi", - "version": "==0.991" - }, - "mypy-extensions": { - "hashes": [ - "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d", - "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8" - ], - "version": "==0.4.3" - }, - "packaging": { - "hashes": [ - "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb", - "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522" - ], - "version": "==21.3" - }, - "pathspec": { - "hashes": [ - "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93", - "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d" - ], - "version": "==0.10.1" - }, - "platformdirs": { - "hashes": [ - "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788", - "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19" - ], - "version": "==2.5.2" - }, - "pluggy": { - "hashes": [ - "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", - "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" - ], - "version": "==1.0.0" - }, - "py": { - "hashes": [ - "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", - "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378" - ], - "version": "==1.11.0" - }, - "pycodestyle": { - "hashes": [ - "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20", - "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f" - ], - "version": "==2.8.0" - }, - "pyflakes": { - "hashes": [ - "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c", - "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e" - ], - "version": "==2.4.0" - }, - "pygments": { - "hashes": [ - "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1", - "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42" - ], - "version": "==2.13.0" - }, - "pyparsing": { - "hashes": [ - "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb", - "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc" - ], - "version": "==3.0.9" - }, - "pytest": { - "hashes": [ - "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7", - "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39" - ], - "version": "==7.1.3" - }, - "pytest-asyncio": { - "hashes": [ - "sha256:6d895b02432c028e6957d25fc936494e78c6305736e785d9fee408b1efbc7ff4", - "sha256:e0fe5dbea40516b661ef1bcfe0bd9461c2847c4ef4bb40012324f2454fb7d56d" - ], - "index": "pypi", - "version": "==0.17.2" - }, - "pytest-black": { - "hashes": [ - "sha256:1d339b004f764d6cd0f06e690f6dd748df3d62e6fe1a692d6a5500ac2c5b75a5" - ], - "index": "pypi", - "version": "==0.3.12" - }, - "pytest-cov": { - "hashes": [ - "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b", - "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470" - ], - "index": "pypi", - "version": "==4.0.0" - }, - "pytest-flake8": { - "hashes": [ - "sha256:ba4f243de3cb4c2486ed9e70752c80dd4b636f7ccb27d4eba763c35ed0cd316e", - "sha256:e0661a786f8cbf976c185f706fdaf5d6df0b1667c3bcff8e823ba263618627e7" - ], - "index": "pypi", - "version": "==1.1.1" - }, - "pytest-mypy": { - "hashes": [ - "sha256:1fa55723a4bf1d054fcba1c3bd694215a2a65cc95ab10164f5808afd893f3b11", - "sha256:6e68e8eb7ceeb7d1c83a1590912f784879f037b51adfb9c17b95c6b2fc57466b" - ], - "index": "pypi", - "version": "==0.8.1" - }, - "pytz": { - "hashes": [ - "sha256:2c0784747071402c6e99f0bafdb7da0fa22645f06554c7ae06bf6358897e9c91", - "sha256:48ce799d83b6f8aab2020e369b627446696619e79645419610b9facd909b3174" - ], - "version": "==2022.4" - }, - "requests": { - "hashes": [ - "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983", - "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349" - ], - "version": "==2.28.1" - }, - "snowballstemmer": { - "hashes": [ - "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", - "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a" - ], - "version": "==2.2.0" - }, - "sphinx": { - "hashes": [ - "sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c", - "sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851" - ], - "index": "pypi", - "version": "==4.3.2" - }, - "sphinx-rtd-theme": { - "hashes": [ - "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8", - "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c" - ], - "index": "pypi", - "version": "==1.0.0" - }, - "sphinxcontrib-applehelp": { - "hashes": [ - "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", - "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" - ], - "version": "==1.0.2" - }, - "sphinxcontrib-devhelp": { - "hashes": [ - "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", - "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" - ], - "version": "==1.0.2" - }, - "sphinxcontrib-htmlhelp": { - "hashes": [ - "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07", - "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2" - ], - "version": "==2.0.0" - }, - "sphinxcontrib-jsmath": { - "hashes": [ - "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", - "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" - ], - "version": "==1.0.1" - }, - "sphinxcontrib-qthelp": { - "hashes": [ - "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", - "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" - ], - "version": "==1.0.3" - }, - "sphinxcontrib-serializinghtml": { - "hashes": [ - "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd", - "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952" - ], - "version": "==1.1.5" - }, - "toml": { - "hashes": [ - "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", - "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" - ], - "version": "==0.10.2" - }, - "tomli": { - "hashes": [ - "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", - "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" - ], - "version": "==2.0.1" - }, - "typed-ast": { - "hashes": [ - "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2", - "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1", - "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6", - "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62", - "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac", - "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d", - "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc", - "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2", - "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97", - "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35", - "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6", - "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1", - "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4", - "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c", - "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e", - "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec", - "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f", - "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72", - "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47", - "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72", - "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe", - "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6", - "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3", - "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66" - ], - "index": "pypi", - "version": "==1.5.4" - }, - "types-mock": { - "hashes": [ - "sha256:4535fbb3912b88a247d43cdb41db0c8b2e187138986f6f01a989717e56105848", - "sha256:a849bc2d966063f4946013bf404822ee2b96f77a8dccda4174b70ab61c5293fe" - ], - "index": "pypi", - "version": "==4.0.15" - }, - "typing-extensions": { - "hashes": [ - "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa", - "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e" - ], - "index": "pypi", - "version": "==4.4.0" - }, - "urllib3": { - "hashes": [ - "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e", - "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997" - ], - "version": "==1.26.12" - }, - "zipp": { - "hashes": [ - "sha256:3a7af91c3db40ec72dd9d154ae18e008c69efe8ca88dde4f9a731bb82fe2f9eb", - "sha256:972cfa31bc2fedd3fa838a51e9bc7e64b7fb725a8c00e7431554311f180e9980" - ], - "version": "==3.9.0" - } - } -} diff --git a/CHANGELOG.rst b/docs/CHANGELOG.rst similarity index 100% rename from CHANGELOG.rst rename to docs/CHANGELOG.rst diff --git a/CONTRIBUTING.rst b/docs/CONTRIBUTING.rst similarity index 91% rename from CONTRIBUTING.rst rename to docs/CONTRIBUTING.rst index 309706435..b426da9a3 100644 --- a/CONTRIBUTING.rst +++ b/docs/CONTRIBUTING.rst @@ -17,8 +17,9 @@ To get the source code and run the unit tests, run:: $ git clone git@github.com:PandABlocks/PandABlocks-client.git $ cd PandABlocks-client - $ pipenv install --dev - $ pipenv run tests + $ python3 -m venv /path/to/venv + $ source /path/to/venv/bin/activate + $ pytest While 100% code coverage does not make a library bug-free, it significantly reduces the number of easily caught bugs! Please make sure coverage remains the @@ -37,9 +38,8 @@ The code in this repository conforms to standards set by the following tools: .. _black: https://github.com/psf/black .. _flake8: http://flake8.pycqa.org/en/latest/ .. _isort: https://github.com/timothycrosley/isort -.. _mypy: https://github.com/python/mypy -These tests will be run on code when running ``pipenv run tests`` and also +These tests will be run on code when running ``pytest`` and also automatically at check in. Please read the tool documentation for details on how to fix the errors it reports. @@ -63,7 +63,7 @@ Docs follow the underlining convention:: You can build the docs from the project directory by running:: - $ pipenv run docs + $ tox -e docs $ firefox build/html/index.html Release Checklist diff --git a/docs/conf.py b/docs/conf.py index 657b55add..16afa5272 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -4,6 +4,12 @@ # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html +import sys +from pathlib import Path +from subprocess import check_output + +import requests + import pandablocks # -- General configuration ------------------------------------------------ @@ -18,8 +24,10 @@ # The short X.Y version. if "+" in release: - # Not on a tag - version = "master" + # Not on a tag, use branch name + root = Path(__file__).absolute().parent.parent + git_branch = check_output("git branch --show-current".split(), cwd=root) + version = git_branch.decode().strip() else: version = release @@ -36,6 +44,10 @@ "matplotlib.sphinxext.plot_directive", # Adds the inheritance-diagram generation directive "sphinx.ext.inheritance_diagram", + # Add a copy button to each code block + "sphinx_copybutton", + # For the card element + "sphinx_design", ] # If true, Sphinx will warn about all references where the target cannot @@ -46,7 +58,17 @@ # generating warnings in "nitpicky mode". Note that type should include the # domain name if present. Example entries would be ('py:func', 'int') or # ('envvar', 'LD_LIBRARY_PATH'). -nitpick_ignore = [("py:func", "int")] +nitpick_ignore = [ + ("py:class", "NoneType"), + ("py:class", "'str'"), + ("py:class", "'float'"), + ("py:class", "'int'"), + ("py:class", "'bool'"), + ("py:class", "'object'"), + ("py:class", "'id'"), + ("py:class", "typing_extensions.Literal"), + ("py:func", "int"), +] # Both the class’ and the __init__ method’s docstring are concatenated and # inserted into the main body of the autoclass directive @@ -65,9 +87,6 @@ # role, that is, for text marked up `like this` default_role = "any" -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - # The suffix of source filenames. source_suffix = ".rst" @@ -84,40 +103,96 @@ # This means you can link things like `str` and `asyncio` to the relevant # docs in the python documentation. -intersphinx_mapping = dict( - python=("https://docs.python.org/3/", None), - numpy=("https://numpy.org/doc/stable/", None), - h5py=("https://docs.h5py.org/en/stable/", None), -) +intersphinx_mapping = { + "python": ("https://docs.python.org/3/", None), + "numpy": ("https://numpy.org/doc/stable/", None), + "h5py": ("https://docs.h5py.org/en/stable/", None), +} # A dictionary of graphviz graph attributes for inheritance diagrams. -inheritance_graph_attrs = dict(rankdir="TB") +inheritance_graph_attrs = {"rankdir": "TB"} # Common links that should be available on every page rst_epilog = """ -.. _Diamond Light Source: - http://www.diamond.ac.uk +.. _Diamond Light Source: http://www.diamond.ac.uk +.. _black: https://github.com/psf/black +.. _ruff: https://beta.ruff.rs/docs/ +.. _mypy: http://mypy-lang.org/ +.. _pre-commit: https://pre-commit.com/ +.. _numpy: https://numpy.org/ +.. _h5py: https://www.h5py.org/ +""" -.. _numpy: - https://numpy.org/ +# Ignore localhost links for periodic check that links in docs are valid +linkcheck_ignore = [r"http://localhost:\d+/"] -.. _h5py: - https://www.h5py.org/ -""" +# Set copy-button to ignore python and bash prompts +# https://sphinx-copybutton.readthedocs.io/en/latest/use.html#using-regexp-prompt-identifiers +copybutton_prompt_text = r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: " +copybutton_prompt_is_regexp = True # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = "sphinx_rtd_theme" - -# Options for the sphinx rtd theme -html_theme_options = dict(style_nav_header_background="black") - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +# +html_theme = "pydata_sphinx_theme" +github_repo = project +github_user = "PandABlocks" +switcher_json = f"https://{github_user}.github.io/{github_repo}/switcher.json" +switcher_exists = requests.get(switcher_json).ok +if not switcher_exists: + print( + "*** Can't read version switcher, is GitHub pages enabled? \n" + " Once Docs CI job has successfully run once, set the " + "Github pages source branch to be 'gh-pages' at:\n" + f" https://github.com/{github_user}/{github_repo}/settings/pages", + file=sys.stderr, + ) + +# Theme options for pydata_sphinx_theme +# We don't check switcher because there are 3 possible states for a repo: +# 1. New project, docs are not published so there is no switcher +# 2. Existing project with latest skeleton, switcher exists and works +# 3. Existing project with old skeleton that makes broken switcher, +# switcher exists but is broken +# Point 3 makes checking switcher difficult, because the updated skeleton +# will fix the switcher at the end of the docs workflow, but never gets a chance +# to complete as the docs build warns and fails. +html_theme_options = { + "logo": { + "text": project, + }, + "use_edit_page_button": True, + "github_url": f"https://github.com/{github_user}/{github_repo}", + "icon_links": [ + { + "name": "PyPI", + "url": f"https://pypi.org/project/{project}", + "icon": "fas fa-cube", + } + ], + "switcher": { + "json_url": switcher_json, + "version_match": version, + }, + "check_switcher": False, + "navbar_end": ["theme-switcher", "icon-links", "version-switcher"], + "external_links": [ + { + "name": "Release Notes", + "url": f"https://github.com/{github_user}/{github_repo}/releases", + } + ], +} + +# A dictionary of values to pass into the template engine’s context for all pages +html_context = { + "github_user": github_user, + "github_repo": project, + "github_version": version, + "doc_path": "docs", +} # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. html_show_sphinx = False @@ -125,9 +200,6 @@ # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. html_show_copyright = True -# Add some CSS classes for columns and other tweaks in a custom css file -html_css_files = ["theme_overrides.css"] - # Logo html_logo = "images/PandA-logo-for-black-background.svg" html_favicon = "images/PandA-logo.ico" diff --git a/docs/developer/explanations/decisions.rst b/docs/developer/explanations/decisions.rst new file mode 100644 index 000000000..5841e6ea0 --- /dev/null +++ b/docs/developer/explanations/decisions.rst @@ -0,0 +1,17 @@ +.. This Source Code Form is subject to the terms of the Mozilla Public +.. License, v. 2.0. If a copy of the MPL was not distributed with this +.. file, You can obtain one at http://mozilla.org/MPL/2.0/. + +Architectural Decision Records +============================== + +We record major architectural decisions in Architecture Decision Records (ADRs), +as `described by Michael Nygard +`_. +Below is the list of our current ADRs. + +.. toctree:: + :maxdepth: 1 + :glob: + + decisions/* \ No newline at end of file diff --git a/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst b/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst new file mode 100644 index 000000000..ef0a21e75 --- /dev/null +++ b/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst @@ -0,0 +1,26 @@ +1. Record architecture decisions +================================ + +Date: 2023-10-18 + +Status +------ + +Accepted + +Context +------- + +We need to record the architectural decisions made on this project. + +Decision +-------- + +We will use Architecture Decision Records, as `described by Michael Nygard +`_. + +Consequences +------------ + +See Michael Nygard's article, linked above. To create new ADRs we will copy and +paste from existing ones. diff --git a/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst b/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst new file mode 100644 index 000000000..b139da73f --- /dev/null +++ b/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst @@ -0,0 +1,35 @@ +2. Adopt pandablocks for project structure +========================================== + +Date: 2023-10-18 + +Status +------ + +Accepted + +Context +------- + +We should use the following `pip-skeleton `_. +The skeleton will ensure consistency in developer +environments and package management. + +Decision +-------- + +We have switched to using the skeleton. + +Consequences +------------ + +This module will use a fixed set of tools as developed in pandablocks +and can pull from this skeleton to update the packaging to the latest techniques. + +As such, the developer environment may have changed, the following could be +different: + +- linting +- formatting +- pip venv setup +- CI/CD diff --git a/docs/developer/explanations/decisions/0003-make-library-sans-io.rst b/docs/developer/explanations/decisions/0003-make-library-sans-io.rst new file mode 100644 index 000000000..a1a634ff3 --- /dev/null +++ b/docs/developer/explanations/decisions/0003-make-library-sans-io.rst @@ -0,0 +1,24 @@ +3. Sans-IO pandABlocks-client +============================= + +Date: 2021-08-02 (ADR created retroactively) + +Status +------ + +Accepted + +Context +------- + +Ensure PandABlocks-client works sans-io. + +Decision +-------- + +We will ensure pandablocks works sans-io `sans-io `. + +Consequences +------------ + +We have the option to use an asyncio client or a blocking client. \ No newline at end of file diff --git a/docs/developer/how-to/build-docs.rst b/docs/developer/how-to/build-docs.rst new file mode 100644 index 000000000..11a5e6386 --- /dev/null +++ b/docs/developer/how-to/build-docs.rst @@ -0,0 +1,38 @@ +Build the docs using sphinx +=========================== + +You can build the `sphinx`_ based docs from the project directory by running:: + + $ tox -e docs + +This will build the static docs on the ``docs`` directory, which includes API +docs that pull in docstrings from the code. + +.. seealso:: + + `documentation_standards` + +The docs will be built into the ``build/html`` directory, and can be opened +locally with a web browser:: + + $ firefox build/html/index.html + +Autobuild +--------- + +You can also run an autobuild process, which will watch your ``docs`` +directory for changes and rebuild whenever it sees changes, reloading any +browsers watching the pages:: + + $ tox -e docs autobuild + +You can view the pages at localhost:: + + $ firefox http://localhost:8000 + +If you are making changes to source code too, you can tell it to watch +changes in this directory too:: + + $ tox -e docs autobuild -- --watch src + +.. _sphinx: https://www.sphinx-doc.org/ diff --git a/docs/reference/contributing.rst b/docs/developer/how-to/contribute.rst similarity index 100% rename from docs/reference/contributing.rst rename to docs/developer/how-to/contribute.rst diff --git a/docs/developer/how-to/lint.rst b/docs/developer/how-to/lint.rst new file mode 100644 index 000000000..2df258d8f --- /dev/null +++ b/docs/developer/how-to/lint.rst @@ -0,0 +1,39 @@ +Run linting using pre-commit +============================ + +Code linting is handled by black_ and ruff_ run under pre-commit_. + +Running pre-commit +------------------ + +You can run the above checks on all files with this command:: + + $ tox -e pre-commit + +Or you can install a pre-commit hook that will run each time you do a ``git +commit`` on just the files that have changed:: + + $ pre-commit install + +It is also possible to `automatically enable pre-commit on cloned repositories `_. +This will result in pre-commits being enabled on every repo your user clones from now on. + +Fixing issues +------------- + +If black reports an issue you can tell it to reformat all the files in the +repository:: + + $ black . + +Likewise with ruff:: + + $ ruff --fix . + +Ruff may not be able to automatically fix all issues; in this case, you will have to fix those manually. + +VSCode support +-------------- + +The ``.vscode/settings.json`` will run black formatting as well as +ruff checking on save. Issues will be highlighted in the editor window. diff --git a/docs/developer/how-to/make-release.rst b/docs/developer/how-to/make-release.rst new file mode 100644 index 000000000..df24c3407 --- /dev/null +++ b/docs/developer/how-to/make-release.rst @@ -0,0 +1,16 @@ +Make a release +============== + +To make a new release, please follow this checklist: + +- Choose a new PEP440 compliant release number (see https://peps.python.org/pep-0440/) +- Go to the GitHub release_ page +- Choose ``Draft New Release`` +- Click ``Choose Tag`` and supply the new tag you chose (click create new tag) +- Click ``Generate release notes``, review and edit these notes +- Choose a title and click ``Publish Release`` + +Note that tagging and pushing to the main branch has the same effect except that +you will not get the option to edit the release notes. + +.. _release: https://github.com/PandABlocks/PandABlocks-client/releases diff --git a/docs/developer/how-to/pin-requirements.rst b/docs/developer/how-to/pin-requirements.rst new file mode 100644 index 000000000..89639623a --- /dev/null +++ b/docs/developer/how-to/pin-requirements.rst @@ -0,0 +1,74 @@ +Pinning Requirements +==================== + +Introduction +------------ + +By design this project only defines dependencies in one place, i.e. in +the ``requires`` table in ``pyproject.toml``. + +In the ``requires`` table it is possible to pin versions of some dependencies +as needed. For library projects it is best to leave pinning to a minimum so +that your library can be used by the widest range of applications. + +When CI builds the project it will use the latest compatible set of +dependencies available (after applying your pins and any dependencies' pins). + +This approach means that there is a possibility that a future build may +break because an updated release of a dependency has made a breaking change. + +The correct way to fix such an issue is to work out the minimum pinning in +``requires`` that will resolve the problem. However this can be quite hard to +do and may be time consuming when simply trying to release a minor update. + +For this reason we provide a mechanism for locking all dependencies to +the same version as a previous successful release. This is a quick fix that +should guarantee a successful CI build. + +Finding the lock files +---------------------- + +Every release of the project will have a set of requirements files published +as release assets. + +For example take a look at the release page for python3-pip-skeleton-cli here: +https://github.com/DiamondLightSource/python3-pip-skeleton-cli/releases/tag/3.3.0 + +There is a list of requirements*.txt files showing as assets on the release. + +There is one file for each time the CI installed the project into a virtual +environment. There are multiple of these as the CI creates a number of +different environments. + +The files are created using ``pip freeze`` and will contain a full list +of the dependencies and sub-dependencies with pinned versions. + +You can download any of these files by clicking on them. It is best to use +the one that ran with the lowest Python version as this is more likely to +be compatible with all the versions of Python in the test matrix. +i.e. ``requirements-test-ubuntu-latest-3.8.txt`` in this example. + +Applying the lock file +---------------------- + +To apply a lockfile: + +- copy the requirements file you have downloaded to the root of your + repository +- rename it to requirements.txt +- commit it into the repo +- push the changes + +The CI looks for a requirements.txt in the root and will pass it to pip +when installing each of the test environments. pip will then install exactly +the same set of packages as the previous release. + +Removing dependency locking from CI +----------------------------------- + +Once the reasons for locking the build have been resolved it is a good idea +to go back to an unlocked build. This is because you get an early indication +of any incoming problems. + +To restore unlocked builds in CI simply remove requirements.txt from the root +of the repo and push. diff --git a/docs/developer/how-to/run-tests.rst b/docs/developer/how-to/run-tests.rst new file mode 100644 index 000000000..d2e03644c --- /dev/null +++ b/docs/developer/how-to/run-tests.rst @@ -0,0 +1,12 @@ +Run the tests using pytest +========================== + +Testing is done with pytest_. It will find functions in the project that `look +like tests`_, and run them to check for errors. You can run it with:: + + $ tox -e pytest + +It will also report coverage to the commandline and to ``cov.xml``. + +.. _pytest: https://pytest.org/ +.. _look like tests: https://docs.pytest.org/explanation/goodpractices.html#test-discovery diff --git a/docs/developer/how-to/static-analysis.rst b/docs/developer/how-to/static-analysis.rst new file mode 100644 index 000000000..065920e1c --- /dev/null +++ b/docs/developer/how-to/static-analysis.rst @@ -0,0 +1,8 @@ +Run static analysis using mypy +============================== + +Static type analysis is done with mypy_. It checks type definition in source +files without running them, and highlights potential issues where types do not +match. You can run it with:: + + $ tox -e mypy diff --git a/docs/developer/how-to/test-container.rst b/docs/developer/how-to/test-container.rst new file mode 100644 index 000000000..a4a43a6ff --- /dev/null +++ b/docs/developer/how-to/test-container.rst @@ -0,0 +1,25 @@ +Container Local Build and Test +============================== + +CI builds a runtime container for the project. The local tests +checks available via ``tox -p`` do not verify this because not +all developers will have docker installed locally. + +If CI is failing to build the container, then it is best to fix and +test the problem locally. This would require that you have docker +or podman installed on your local workstation. + +In the following examples the command ``docker`` is interchangeable with +``podman`` depending on which container cli you have installed. + +To build the container and call it ``test``:: + + cd + docker build -t test . + +To verify that the container runs:: + + docker run -it test --help + +You can pass any other command line parameters to your application +instead of --help. diff --git a/docs/developer/how-to/update-tools.rst b/docs/developer/how-to/update-tools.rst new file mode 100644 index 000000000..c1075ee8c --- /dev/null +++ b/docs/developer/how-to/update-tools.rst @@ -0,0 +1,16 @@ +Update the tools +================ + +This module is merged with the python3-pip-skeleton_. This is a generic +Python project structure which provides a means to keep tools and +techniques in sync between multiple Python projects. To update to the +latest version of the skeleton, run:: + + $ git pull --rebase=false https://github.com/DiamondLightSource/python3-pip-skeleton + +Any merge conflicts will indicate an area where something has changed that +conflicts with the setup of the current module. Check the `closed pull requests +`_ +of the skeleton module for more details. + +.. _python3-pip-skeleton: https://DiamondLightSource.github.io/python3-pip-skeleton diff --git a/docs/developer/index.rst b/docs/developer/index.rst new file mode 100644 index 000000000..8a6369b9c --- /dev/null +++ b/docs/developer/index.rst @@ -0,0 +1,64 @@ +Developer Guide +=============== + +Documentation is split into four categories, also accessible from links in the +side-bar. + +.. grid:: 2 + :gutter: 4 + + .. grid-item-card:: :material-regular:`directions_run;3em` + + .. toctree:: + :caption: Tutorials + :maxdepth: 1 + + tutorials/dev-install + + +++ + + Tutorials for getting up and running as a developer. + + .. grid-item-card:: :material-regular:`task;3em` + + .. toctree:: + :caption: How-to Guides + :maxdepth: 1 + + how-to/contribute + how-to/build-docs + how-to/run-tests + how-to/static-analysis + how-to/lint + how-to/update-tools + how-to/make-release + how-to/pin-requirements + how-to/test-container + + +++ + + Practical step-by-step guides for day-to-day dev tasks. + + .. grid-item-card:: :material-regular:`apartment;3em` + + .. toctree:: + :caption: Explanations + :maxdepth: 1 + + explanations/decisions + + +++ + + Explanations of how and why the architecture is why it is. + + .. grid-item-card:: :material-regular:`description;3em` + + .. toctree:: + :caption: Reference + :maxdepth: 1 + + reference/standards + + +++ + + Technical reference material on standards in use. diff --git a/docs/developer/reference/standards.rst b/docs/developer/reference/standards.rst new file mode 100644 index 000000000..5a1fd4782 --- /dev/null +++ b/docs/developer/reference/standards.rst @@ -0,0 +1,63 @@ +Standards +========= + +This document defines the code and documentation standards used in this +repository. + +Code Standards +-------------- + +The code in this repository conforms to standards set by the following tools: + +- black_ for code formatting +- ruff_ for style checks +- mypy_ for static type checking + +.. seealso:: + + How-to guides `../how-to/lint` and `../how-to/static-analysis` + +.. _documentation_standards: + +Documentation Standards +----------------------- + +Docstrings are pre-processed using the Sphinx Napoleon extension. As such, +google-style_ is considered as standard for this repository. Please use type +hints in the function signature for types. For example: + +.. code:: python + + def func(arg1: str, arg2: int) -> bool: + """Summary line. + + Extended description of function. + + Args: + arg1: Description of arg1 + arg2: Description of arg2 + + Returns: + Description of return value + """ + return True + +.. _google-style: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/index.html#google-vs-numpy + +Documentation is contained in the ``docs`` directory and extracted from +docstrings of the API. + +Docs follow the underlining convention:: + + Headling 1 (page title) + ======================= + + Heading 2 + --------- + + Heading 3 + ~~~~~~~~~ + +.. seealso:: + + How-to guide `../how-to/build-docs` diff --git a/docs/developer/tutorials/dev-install.rst b/docs/developer/tutorials/dev-install.rst new file mode 100644 index 000000000..49ecac74c --- /dev/null +++ b/docs/developer/tutorials/dev-install.rst @@ -0,0 +1,68 @@ +Developer install +================= + +These instructions will take you through the minimal steps required to get a dev +environment setup, so you can run the tests locally. + +Clone the repository +-------------------- + +First clone the repository locally using `Git +`_:: + + $ git clone git://github.com/PandABlocks/PandABlocks-client.git + +Install dependencies +-------------------- + +You can choose to either develop on the host machine using a `venv` (which +requires python 3.8 or later) or to run in a container under `VSCode +`_ + +.. tab-set:: + + .. tab-item:: Local virtualenv + + .. code:: + + $ cd pandablocks + $ python3 -m venv venv + $ source venv/bin/activate + $ pip install -e '.[dev]' + + .. tab-item:: VSCode devcontainer + + .. code:: + + $ code pandablocks + # Click on 'Reopen in Container' when prompted + # Open a new terminal + + .. note:: + + See the epics-containers_ documentation for more complex + use cases, such as integration with podman. + +See what was installed +---------------------- + +To see a graph of the python package dependency tree type:: + + $ pipdeptree + +Build and test +-------------- + +Now you have a development environment you can run the tests in a terminal:: + + $ tox -p + +This will run in parallel the following checks: + +- `../how-to/build-docs` +- `../how-to/run-tests` +- `../how-to/static-analysis` +- `../how-to/lint` + + +.. _epics-containers: https://epics-containers.github.io/main/user/tutorials/devcontainer.html diff --git a/docs/genindex.rst b/docs/genindex.rst new file mode 100644 index 000000000..93eb8b294 --- /dev/null +++ b/docs/genindex.rst @@ -0,0 +1,5 @@ +API Index +========= + +.. + https://stackoverflow.com/a/42310803 diff --git a/docs/index.rst b/docs/index.rst index 2bd70d1cc..0023bc4d4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,74 +1,29 @@ +:html_theme.sidebar_secondary.remove: + .. include:: ../README.rst :end-before: when included in index.rst How the documentation is structured ----------------------------------- -Documentation is split into four categories, accessible from links in the side-bar. - -.. rst-class:: columns - -Tutorials -~~~~~~~~~ +The documentation is split into 2 sections: -Tutorials for installation, library and commandline usage. New users start here. - -.. toctree:: - :caption: Tutorials - :hidden: +.. grid:: 2 - tutorials/installation - tutorials/load-save - tutorials/control - tutorials/commandline-hdf + .. grid-item-card:: :material-regular:`person;4em` + :link: user/index + :link-type: doc -.. rst-class:: columns + The User Guide contains documentation on how to install and use pandablocks. -How-to Guides -~~~~~~~~~~~~~ + .. grid-item-card:: :material-regular:`code;4em` + :link: developer/index + :link-type: doc -Practical step-by-step guides for the more experienced user. + The Developer Guide contains documentation on how to develop and contribute changes back to pandablocks. .. toctree:: - :caption: How-to Guides :hidden: - how-to/library-hdf - how-to/poll-changes - how-to/introspect-panda - -.. rst-class:: columns - -Explanations -~~~~~~~~~~~~ - -Explanation of how the library works and why it works that way. - -.. toctree:: - :caption: Explanations - :hidden: - - explanations/sans-io - explanations/performance - -.. rst-class:: columns - -Reference -~~~~~~~~~ - -Technical reference material, for classes, methods, APIs, commands, and contributing to the project. - -.. toctree:: - :caption: Reference - :hidden: - - reference/api - reference/changelog - reference/contributing - -.. rst-class:: endcolumns - -About the documentation -~~~~~~~~~~~~~~~~~~~~~~~ - -`Why is the documentation structured this way? `_ + user/index + developer/index diff --git a/docs/user/explanations/docs-structure.rst b/docs/user/explanations/docs-structure.rst new file mode 100644 index 000000000..f25a09baa --- /dev/null +++ b/docs/user/explanations/docs-structure.rst @@ -0,0 +1,18 @@ +About the documentation +----------------------- + + :material-regular:`format_quote;2em` + + The Grand Unified Theory of Documentation + + -- David Laing + +There is a secret that needs to be understood in order to write good software +documentation: there isn't one thing called *documentation*, there are four. + +They are: *tutorials*, *how-to guides*, *technical reference* and *explanation*. +They represent four different purposes or functions, and require four different +approaches to their creation. Understanding the implications of this will help +improve most documentation - often immensely. + +`More information on this topic. `_ diff --git a/docs/explanations/performance.rst b/docs/user/explanations/performance.rst similarity index 100% rename from docs/explanations/performance.rst rename to docs/user/explanations/performance.rst diff --git a/docs/explanations/sans-io.rst b/docs/user/explanations/sans-io.rst similarity index 96% rename from docs/explanations/sans-io.rst rename to docs/user/explanations/sans-io.rst index 246c84449..e8f759686 100644 --- a/docs/explanations/sans-io.rst +++ b/docs/user/explanations/sans-io.rst @@ -48,7 +48,7 @@ Wrappers -------- Of course, these Connections are useless without connecting some I/O. To aid with -this, wrappers are included for use in `asyncio ` and blocking programs. They expose +this, wrappers are included for use in `asyncio ` and blocking programs. They expose slightly different APIs to make best use of the features of their respective concurrency frameworks. For example, to send multiple commands in fields with the `blocking` wrapper:: diff --git a/docs/how-to/introspect-panda.rst b/docs/user/how-to/introspect-panda.rst similarity index 88% rename from docs/how-to/introspect-panda.rst rename to docs/user/how-to/introspect-panda.rst index 44d65c19d..2bd129147 100644 --- a/docs/how-to/introspect-panda.rst +++ b/docs/user/how-to/introspect-panda.rst @@ -7,9 +7,9 @@ to list all blocks, and all fields inside each block, that exist. Call the following script, with the address of the PandA as the first and only command line argument: -.. literalinclude:: ../../examples/introspect_panda.py +.. literalinclude:: ../../../examples/introspect_panda.py -This script can be found in ``docs/examples/introspect_panda.py``. +This script can be found in ``examples/introspect_panda.py``. By examining the `BlockInfo` structure returned from `GetBlockInfo` for each Block the number and description may be acquired for every block. diff --git a/docs/how-to/library-hdf.rst b/docs/user/how-to/library-hdf.rst similarity index 94% rename from docs/how-to/library-hdf.rst rename to docs/user/how-to/library-hdf.rst index 6201e112f..54ebd97a0 100644 --- a/docs/how-to/library-hdf.rst +++ b/docs/user/how-to/library-hdf.rst @@ -13,7 +13,7 @@ Approach 1: Call the function directly If you need a one-shot configure and run application, you can use the function directly: -.. literalinclude:: ../../examples/arm_and_hdf.py +.. literalinclude:: ../../../examples/arm_and_hdf.py With the `AsyncioClient` as a `Context Manager `, this code sets up some fields of a PandA before taking a single acquisition. The code in @@ -36,7 +36,7 @@ means you can make decisions about when to start and stop acquisitions based on the `Data` objects that go past. For example, if we want to make a progress bar we could: -.. literalinclude:: ../../examples/hdf_queue_reporting.py +.. literalinclude:: ../../../examples/hdf_queue_reporting.py This time, after setting up the PandA, we create the `AsyncioClient.data` iterator ourselves. Each `Data` object we get is queued on the first `Pipeline` diff --git a/docs/how-to/poll-changes.rst b/docs/user/how-to/poll-changes.rst similarity index 100% rename from docs/how-to/poll-changes.rst rename to docs/user/how-to/poll-changes.rst diff --git a/docs/user/how-to/run-container.rst b/docs/user/how-to/run-container.rst new file mode 100644 index 000000000..7285ef9b9 --- /dev/null +++ b/docs/user/how-to/run-container.rst @@ -0,0 +1,15 @@ +Run in a container +================== + +Pre-built containers with pandablocks and its dependencies already +installed are available on `Github Container Registry +`_. + +Starting the container +---------------------- + +To pull the container from github container registry and run:: + + $ docker run ghcr.io/PandABlocks/PandABlocks-client:main --version + +To get a released version, use a numbered release instead of ``main``. diff --git a/docs/user/index.rst b/docs/user/index.rst new file mode 100644 index 000000000..c315ff2d1 --- /dev/null +++ b/docs/user/index.rst @@ -0,0 +1,67 @@ +User Guide +========== + +Documentation is split into four categories, also accessible from links in the +side-bar. + +.. grid:: 2 + :gutter: 4 + + .. grid-item-card:: :material-regular:`directions_walk;3em` + + .. toctree:: + :caption: Tutorials + :maxdepth: 1 + + tutorials/installation + tutorials/commandline-hdf + tutorials/control + tutorials/load-save + + +++ + + Tutorials for installation and typical usage. New users start here. + + .. grid-item-card:: :material-regular:`directions;3em` + + .. toctree:: + :caption: How-to Guides + :maxdepth: 1 + + how-to/run-container + how-to/introspect-panda + how-to/library-hdf + how-to/poll-changes + + +++ + + Practical step-by-step guides for the more experienced user. + + .. grid-item-card:: :material-regular:`info;3em` + + .. toctree:: + :caption: Explanations + :maxdepth: 1 + + explanations/docs-structure + explanations/performance + explanations/sans-io + + +++ + + Explanations of how the library works and why it works that way. + + .. grid-item-card:: :material-regular:`menu_book;3em` + + .. toctree:: + :caption: Reference + :maxdepth: 1 + + reference/api + reference/changelog + reference/contributing + ../genindex + + +++ + + Technical reference material including APIs and release notes. diff --git a/docs/reference/api.rst b/docs/user/reference/api.rst similarity index 100% rename from docs/reference/api.rst rename to docs/user/reference/api.rst diff --git a/docs/reference/appendix.rst b/docs/user/reference/appendix.rst similarity index 100% rename from docs/reference/appendix.rst rename to docs/user/reference/appendix.rst diff --git a/docs/reference/changelog.rst b/docs/user/reference/changelog.rst similarity index 100% rename from docs/reference/changelog.rst rename to docs/user/reference/changelog.rst diff --git a/docs/user/reference/contributing.rst b/docs/user/reference/contributing.rst new file mode 100644 index 000000000..ac7b6bcf3 --- /dev/null +++ b/docs/user/reference/contributing.rst @@ -0,0 +1 @@ +.. include:: ../../CONTRIBUTING.rst diff --git a/docs/tutorials/commandline-hdf.rst b/docs/user/tutorials/commandline-hdf.rst similarity index 98% rename from docs/tutorials/commandline-hdf.rst rename to docs/user/tutorials/commandline-hdf.rst index fc433d2f7..28b5f0cec 100644 --- a/docs/tutorials/commandline-hdf.rst +++ b/docs/user/tutorials/commandline-hdf.rst @@ -51,7 +51,7 @@ mode so that you can read partial acquisitions before they are complete. In the repository ``examples/plot_counter_hdf.py`` is an example of reading the file, listing the datasets, and plotting the counters: -.. literalinclude:: ../../examples/plot_counter_hdf.py +.. literalinclude:: ../../../examples/plot_counter_hdf.py Running it on ``/tmp/panda-capture-1.h5`` will show the three counter values: diff --git a/docs/tutorials/control.rst b/docs/user/tutorials/control.rst similarity index 100% rename from docs/tutorials/control.rst rename to docs/user/tutorials/control.rst diff --git a/docs/tutorials/installation.rst b/docs/user/tutorials/installation.rst similarity index 73% rename from docs/tutorials/installation.rst rename to docs/user/tutorials/installation.rst index 5038e2f7c..41131e63b 100644 --- a/docs/tutorials/installation.rst +++ b/docs/user/tutorials/installation.rst @@ -1,11 +1,5 @@ -Installation Tutorial -===================== - -.. note:: - - For installation inside DLS, please see the internal documentation on - ``dls-python3`` and ``pipenv``. Although these instructions will work - inside DLS, they are intended for external use. +Installation +============ Check your version of python ---------------------------- @@ -13,7 +7,8 @@ Check your version of python You will need python 3.7 or later. You can check your version of python by typing into a terminal:: - python3 --version + $ python3 --version + Create a virtual environment ---------------------------- @@ -21,8 +16,8 @@ Create a virtual environment It is recommended that you install into a “virtual environment” so this installation will not interfere with any existing Python software:: - python3 -m venv /path/to/venv - source /path/to/venv/bin/activate + $ python3 -m venv /path/to/venv + $ source /path/to/venv/bin/activate Installing the library diff --git a/docs/tutorials/load-save.rst b/docs/user/tutorials/load-save.rst similarity index 96% rename from docs/tutorials/load-save.rst rename to docs/user/tutorials/load-save.rst index 20a972c7b..5134c40f6 100644 --- a/docs/tutorials/load-save.rst +++ b/docs/user/tutorials/load-save.rst @@ -22,7 +22,7 @@ fields. e.g. the first few lines of the tutorial save file look like this: -.. literalinclude:: ../../pandablocks/saves/tutorial.sav +.. literalinclude:: ../../../src/pandablocks/saves/tutorial.sav :lines: 1-12 Load diff --git a/docs/tutorials/tutorial_layout.png b/docs/user/tutorials/tutorial_layout.png similarity index 100% rename from docs/tutorials/tutorial_layout.png rename to docs/user/tutorials/tutorial_layout.png diff --git a/examples/introspect_panda.py b/examples/introspect_panda.py index 29b2a9eb3..15112c421 100644 --- a/examples/introspect_panda.py +++ b/examples/introspect_panda.py @@ -9,7 +9,6 @@ async def introspect(): # Create a client and connect the control and data ports async with AsyncioClient(sys.argv[1]) as client: - # Get the list of all blocks in the PandA block_info = await client.send(GetBlockInfo()) # Find and print all fields for each block diff --git a/pandablocks/__init__.py b/pandablocks/__init__.py deleted file mode 100644 index 7cf448dad..000000000 --- a/pandablocks/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from pandablocks._version_git import __version__ - -__all__ = ["__version__"] diff --git a/pandablocks/_version_git.py b/pandablocks/_version_git.py deleted file mode 100644 index ec811d1bf..000000000 --- a/pandablocks/_version_git.py +++ /dev/null @@ -1,97 +0,0 @@ -# Compute a version number from a git repo or archive - -# This file is released into the public domain. Generated by: -# versiongit-1.0 (https://github.com/dls-controls/versiongit) -import os -import re -import sys -from subprocess import STDOUT, CalledProcessError, check_output - -# These will be filled in if git archive is run or by setup.py cmdclasses -GIT_REFS = "$Format:%D$" -GIT_SHA1 = "$Format:%h$" - -# Git describe gives us sha1, last version-like tag, and commits since then -CMD = "git describe --tags --dirty --always --long --match=[0-9]*[-.][0-9]*" - - -def get_version_from_git(path=None): - """Try to parse version from git describe, fallback to git archive tags""" - tag, plus, suffix = "0.0", "untagged", "" - if not GIT_SHA1.startswith("$"): - # git archive or the cmdclasses below have filled in these strings - sha1 = GIT_SHA1 - for ref_name in GIT_REFS.split(", "): - if ref_name.startswith("tag: "): - # git from 1.8.3 onwards labels archive tags "tag: TAGNAME" - tag, plus = ref_name[5:], "0" - else: - if path is None: - # If no path to git repo, choose the directory this file is in - path = os.path.dirname(os.path.abspath(__file__)) - # output is TAG-NUM-gHEX[-dirty] or HEX[-dirty] - try: - cmd_out = check_output(CMD.split(), stderr=STDOUT, cwd=path) - except Exception as e: - sys.stderr.write("%s: %s\n" % (type(e).__name__, str(e))) - if isinstance(e, CalledProcessError): - sys.stderr.write("-> %s" % e.output.decode()) - return "0.0+unknown", None, e - else: - out = cmd_out.decode().strip() - if out.endswith("-dirty"): - out = out[:-6] - suffix = ".dirty" - if "-" in out: - # There is a tag, extract it and the other pieces - match = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", out) - tag, plus, sha1 = match.groups() - else: - # No tag, just sha1 - sha1 = out - # Replace dashes in tag for dots - tag = tag.replace("-", ".") - if plus != "0" or suffix: - # Not on a tag, add additional info - tag = "%(tag)s+%(plus)s.g%(sha1)s%(suffix)s" % locals() - return tag, sha1, None - - -__version__, git_sha1, git_error = get_version_from_git() - - -def get_cmdclass(build_py=None, sdist=None): - """Create cmdclass dict to pass to setuptools.setup that will write a - _version_static.py file in our resultant sdist, wheel or egg""" - if build_py is None: - from setuptools.command.build_py import build_py - if sdist is None: - from setuptools.command.sdist import sdist - - def make_version_static(base_dir, pkg): - vg = os.path.join(base_dir, pkg.split(".")[0], "_version_git.py") - if os.path.isfile(vg): - lines = open(vg).readlines() - with open(vg, "w") as f: - for line in lines: - # Replace GIT_* with static versions - if line.startswith("GIT_SHA1 = "): - f.write("GIT_SHA1 = '%s'\n" % git_sha1) - elif line.startswith("GIT_REFS = "): - f.write("GIT_REFS = 'tag: %s'\n" % __version__) - else: - f.write(line) - - class BuildPy(build_py): - def run(self): - build_py.run(self) - for pkg in self.packages: - make_version_static(self.build_lib, pkg) - - class Sdist(sdist): - def make_release_tree(self, base_dir, files): - sdist.make_release_tree(self, base_dir, files) - for pkg in self.distribution.packages: - make_version_static(base_dir, pkg) - - return dict(build_py=BuildPy, sdist=Sdist) diff --git a/pyproject.toml b/pyproject.toml index 64253b60b..7fd9d918a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,4 +1,115 @@ [build-system] -# Pin versions compatible with dls-python3 for reproducible wheels -requires = ["setuptools==44.1.1", "wheel==0.33.1"] +requires = ["setuptools>=64", "setuptools_scm[toml]>=6.2", "wheel"] build-backend = "setuptools.build_meta" + +[project] +description = "A Python client to control and data ports of the PandABlocks TCP server" +name = "pandablocks" +classifiers = [ + "Development Status :: 5 - Production/Stable", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", +] +dependencies = ["typing-extensions;python_version<'3.8'", "numpy", "click"] +dynamic = ["version"] +license.file = "Apache License 2.0" +readme = "README.rst" +requires-python = ">=3.7" + +[project.optional-dependencies] +h5py = ["h5py", "matplotlib"] +dev = [ + # A dev install will require [h5py] packages too + "pandablocks[h5py]", + "black", + "mypy", + "mock", + "types-mock", + "atomicwrites", + "typed-ast", + "pipdeptree", + "pre-commit", + "pydata-sphinx-theme>=0.12", + "pytest-cov", + "pytest-asyncio", + "ruff", + "sphinx-autobuild", + "sphinx-copybutton", + "sphinx-design", + "tox-direct", + "types-mock", +] + +[project.scripts] +pandablocks = "pandablocks.cli:cli" + +[project.urls] +GitHub = "https://github.com/PandABlocks/Pandablocks-client" + +[[project.authors]] # Further authors may be added by duplicating this section +email = "tom.cobb@diamond.ac.uk" +name = "Tom Cobb" + + +[tool.setuptools_scm] +write_to = "src/pandablocks/_version.py" + +[tool.mypy] +ignore_missing_imports = true # Ignore missing stubs in imported modules + +[tool.pytest.ini_options] +# Run pytest with all our checkers, and don't spam us with massive tracebacks on error +addopts = """ + --tb=native -vv --doctest-modules --doctest-glob="*.rst" + --cov=src/pandablocks --cov-report term --cov-report xml:cov.xml + """ +asyncio_mode = "auto" +# Doctest python code in docs, python code in src docstrings, test functions in tests +testpaths = "docs src tests" + +[tool.coverage.run] +data_file = "/tmp/pandablocks.coverage" + +[tool.coverage.paths] +# Tests are run from installed location, map back to the src directory +source = ["src", "**/site-packages/"] + +# tox must currently be configured via an embedded ini string +# See: https://github.com/tox-dev/tox/issues/999 +[tool.tox] +legacy_tox_ini = """ +[tox] +skipsdist=True + +[testenv:{pre-commit,mypy,pytest,docs}] +# Don't create a virtualenv for the command, requires tox-direct plugin +direct = True +passenv = * +allowlist_externals = + pytest + pre-commit + mypy + sphinx-build + sphinx-autobuild +commands = + pytest: pytest {posargs} + mypy: mypy src tests {posargs} + pre-commit: pre-commit run --all-files {posargs} + docs: sphinx-{posargs:build -EW --keep-going} -T docs build/html +""" + + +[tool.ruff] +src = ["src", "tests"] +line-length = 88 +select = [ + "C4", # flake8-comprehensions - https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4 + "E", # pycodestyle errors - https://beta.ruff.rs/docs/rules/#error-e + "F", # pyflakes rules - https://beta.ruff.rs/docs/rules/#pyflakes-f + "W", # pycodestyle warnings - https://beta.ruff.rs/docs/rules/#warning-w + "I001", # isort +] diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 29e341b89..000000000 --- a/setup.cfg +++ /dev/null @@ -1,70 +0,0 @@ -[metadata] -name = pandablocks -description = A Python client to control and data ports of the PandABlocks TCP server -url = https://github.com/PandABlocks/PandABlocks-client -author = Tom Cobb -author_email = tom.cobb@diamond.ac.uk -license = Apache License 2.0 -long_description = file: README.rst -long_description_content_type = text/x-rst -classifiers = - Development Status :: 4 - Beta - License :: OSI Approved :: Apache Software License - Programming Language :: Python :: 3.7 - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - -[options] -packages = find: -install_requires = - numpy - click - importlib-metadata <5.0 # 5.0 deprecated a lot of interfaces that various modules rely on - -[options.extras_require] -hdf5 = - matplotlib - h5py - -[options.entry_points] -# Include a command line script -console_scripts = - pandablocks = pandablocks.cli:cli - -[options.packages.find] -# Don't include our tests directory in the distribution -exclude = tests - -[options.package_data] -pandablocks = - saves/*.sav - -[mypy] -# Ignore missing stubs for modules we use -ignore_missing_imports = True - -[isort] -profile=black -float_to_top=true -skip=setup.py,conf.py,build - -[flake8] -# Make flake8 respect black's line length (default 88), -max-line-length = 88 -extend-ignore = - E203, # See https://github.com/PyCQA/pycodestyle/issues/373 - F811, # support typing.overload decorator - F722, # allow Annotated[typ, some_func("some string")] - -[tool:pytest] -# Run pytest with all our checkers, and don't spam us with massive tracebacks on error -addopts = - --tb=native -vv --flake8 --black --mypy --doctest-modules --doctest-glob="*.rst" - --cov=pandablocks --cov-report term --cov-report xml:cov.xml -# Enables all discovered async tests and fixtures to be automatically marked as async, even if -# they don't have a specific marker https://github.com/pytest-dev/pytest-asyncio#auto-mode -asyncio_mode = auto - -[coverage:run] -# This is covered in the versiongit test suite so exclude it here -omit = */_version_git.py diff --git a/setup.py b/setup.py deleted file mode 100644 index d11d75416..000000000 --- a/setup.py +++ /dev/null @@ -1,17 +0,0 @@ -import os -import sys - -from setuptools import setup - -# Place the directory containing _version_git on the path -for path, _, filenames in os.walk(os.path.dirname(os.path.abspath(__file__))): - if "_version_git.py" in filenames: - sys.path.append(path) - break - -from _version_git import __version__, get_cmdclass # type: ignore # noqa isort:skip - - -# Setup information is stored in setup.cfg but this function call -# is still necessary. -setup(cmdclass=get_cmdclass(), version=__version__) diff --git a/src/pandablocks/__init__.py b/src/pandablocks/__init__.py new file mode 100644 index 000000000..457ddb1f8 --- /dev/null +++ b/src/pandablocks/__init__.py @@ -0,0 +1,11 @@ +import sys + +if sys.version_info < (3, 8): + from importlib_metadata import version # noqa +else: + from importlib.metadata import version # noqa + +__version__ = version("pandablocks") +del version + +__all__ = ["__version__"] diff --git a/pandablocks/__main__.py b/src/pandablocks/__main__.py similarity index 100% rename from pandablocks/__main__.py rename to src/pandablocks/__main__.py diff --git a/pandablocks/_control.py b/src/pandablocks/_control.py similarity index 100% rename from pandablocks/_control.py rename to src/pandablocks/_control.py diff --git a/pandablocks/_exchange.py b/src/pandablocks/_exchange.py similarity index 100% rename from pandablocks/_exchange.py rename to src/pandablocks/_exchange.py diff --git a/pandablocks/asyncio.py b/src/pandablocks/asyncio.py similarity index 100% rename from pandablocks/asyncio.py rename to src/pandablocks/asyncio.py diff --git a/pandablocks/blocking.py b/src/pandablocks/blocking.py similarity index 100% rename from pandablocks/blocking.py rename to src/pandablocks/blocking.py diff --git a/pandablocks/cli.py b/src/pandablocks/cli.py similarity index 100% rename from pandablocks/cli.py rename to src/pandablocks/cli.py diff --git a/pandablocks/commands.py b/src/pandablocks/commands.py similarity index 99% rename from pandablocks/commands.py rename to src/pandablocks/commands.py index abb063df9..d273a66ee 100644 --- a/pandablocks/commands.py +++ b/src/pandablocks/commands.py @@ -424,7 +424,6 @@ class GetFieldInfo(Command[Dict[str, FieldInfo]]): ] = field(init=False, repr=False, default_factory=dict) def __post_init__(self): - # Map a (type, subtype) to a method that returns the appropriate # subclasss of FieldInfo, and a list of all the Commands to request. # Note that fields that do not have additional attributes are not listed. @@ -472,7 +471,6 @@ def _get_desc(self, field_name: str) -> GetLine: def _uint( self, field_name: str, field_type: str, field_subtype: Optional[str] ) -> _FieldGeneratorType: - desc, maximum = yield from _execute_commands( self._get_desc(field_name), GetLine(f"{self.block}1.{field_name}.MAX"), @@ -484,7 +482,6 @@ def _uint( def _scalar( self, field_name: str, field_type: str, field_subtype: Optional[str] ) -> _FieldGeneratorType: - desc, units, scale, offset = yield from _execute_commands( self._get_desc(field_name), GetLine(f"{self.block}.{field_name}.UNITS"), @@ -501,7 +498,6 @@ def _scalar( def _subtype_time( self, field_name: str, field_type: str, field_subtype: Optional[str] ) -> _FieldGeneratorType: - desc, units_labels = yield from _execute_commands( self._get_desc(field_name), GetMultiline(f"*ENUMS.{self.block}.{field_name}.UNITS"), @@ -514,7 +510,6 @@ def _subtype_time( def _enum( self, field_name: str, field_type: str, field_subtype: Optional[str] ) -> _FieldGeneratorType: - desc, labels = yield from _execute_commands( self._get_desc(field_name), GetMultiline(f"*ENUMS.{self.block}.{field_name}"), @@ -527,7 +522,6 @@ def _enum( def _time( self, field_name: str, field_type: str, field_subtype: Optional[str] ) -> _FieldGeneratorType: - desc, units, min = yield from _execute_commands( self._get_desc(field_name), GetMultiline(f"*ENUMS.{self.block}.{field_name}.UNITS"), @@ -541,7 +535,6 @@ def _time( def _bit_out( self, field_name: str, field_type: str, field_subtype: Optional[str] ) -> _FieldGeneratorType: - desc, capture_word, offset = yield from _execute_commands( self._get_desc(field_name), GetLine(f"{self.block}1.{field_name}.CAPTURE_WORD"), @@ -557,7 +550,6 @@ def _bit_out( def _bit_mux( self, field_name: str, field_type: str, field_subtype: Optional[str] ) -> _FieldGeneratorType: - desc, max_delay, labels = yield from _execute_commands( self._get_desc(field_name), GetLine(f"{self.block}1.{field_name}.MAX_DELAY"), @@ -573,7 +565,6 @@ def _bit_mux( def _pos_mux( self, field_name: str, field_type: str, field_subtype: Optional[str] ) -> _FieldGeneratorType: - desc, labels = yield from _execute_commands( self._get_desc(field_name), GetMultiline(f"*ENUMS.{self.block}.{field_name}"), @@ -585,7 +576,6 @@ def _pos_mux( def _table( self, field_name: str, field_type: str, field_subtype: Optional[str] ) -> _FieldGeneratorType: - # Ignore the ROW_WORDS attribute as it's new and won't be present on all PandAs, # and there's no easy way to try it and catch an error while also running other # Get commands at the same time @@ -655,7 +645,6 @@ def _table( def _pos_out( self, field_name: str, field_type: str, field_subtype: Optional[str] ) -> _FieldGeneratorType: - desc, capture_labels = yield from _execute_commands( self._get_desc(field_name), GetMultiline(f"*ENUMS.{self.block}.{field_name}.CAPTURE"), @@ -667,7 +656,6 @@ def _pos_out( def _ext_out( self, field_name: str, field_type: str, field_subtype: Optional[str] ) -> _FieldGeneratorType: - desc, capture_labels = yield from _execute_commands( self._get_desc(field_name), GetMultiline(f"*ENUMS.{self.block}.{field_name}.CAPTURE"), @@ -845,7 +833,6 @@ def execute(self) -> ExchangeGenerator[Changes]: multivalue_get_commands: List[Tuple[str, GetMultiline]] = [] for line in ex.multiline: if line[-1] == "<": - if self.get_multiline: field = line[0:-1] multivalue_get_commands.append((field, GetMultiline(field))) diff --git a/pandablocks/connections.py b/src/pandablocks/connections.py similarity index 100% rename from pandablocks/connections.py rename to src/pandablocks/connections.py diff --git a/pandablocks/hdf.py b/src/pandablocks/hdf.py similarity index 100% rename from pandablocks/hdf.py rename to src/pandablocks/hdf.py diff --git a/pandablocks/responses.py b/src/pandablocks/responses.py similarity index 100% rename from pandablocks/responses.py rename to src/pandablocks/responses.py diff --git a/pandablocks/saves/tutorial.sav b/src/pandablocks/saves/tutorial.sav similarity index 100% rename from pandablocks/saves/tutorial.sav rename to src/pandablocks/saves/tutorial.sav diff --git a/pandablocks/utils.py b/src/pandablocks/utils.py similarity index 100% rename from pandablocks/utils.py rename to src/pandablocks/utils.py diff --git a/tests/conftest.py b/tests/conftest.py index 60f02aba5..afb76cf25 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -231,7 +231,6 @@ def fast_dump_expected(): class DummyServer: - # Flag for useful debug output when writing tests # for diagnosing mismatching sent data. debug = False diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 1d49d3685..9e3902a8d 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -38,7 +38,7 @@ async def test_asyncio_data( ): if not disarmed: # simulate getting the data without the END marker as if arm was not pressed - fast_dump = map(lambda x: x.split(b"END")[0], fast_dump) + fast_dump = (x.split(b"END")[0] for x in fast_dump) fast_dump_expected = list(fast_dump_expected)[:-1] dummy_server_async.data = fast_dump events = [] diff --git a/tests/test_utils.py b/tests/test_utils.py index f11e7218e..e9424b34c 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -230,13 +230,13 @@ def table_data_1() -> List[str]: @pytest.fixture def table_2_np_arrays() -> Dict[str, UnpackedArray]: - table: Dict[str, UnpackedArray] = dict( - REPEATS=np.array([1, 0], dtype=np.uint32), - TRIGGER=["Immediate", "Immediate"], - POSITION=np.array([-20, 2**31 - 1], dtype=np.int32), - TIME1=np.array([12, 2**32 - 1], dtype=np.uint32), - TIME2=np.array([32, 1], dtype=np.uint32), - ) + table: Dict[str, UnpackedArray] = { + "REPEATS": np.array([1, 0], dtype=np.uint32), + "TRIGGER": ["Immediate", "Immediate"], + "POSITION": np.array([-20, 2**31 - 1], dtype=np.int32), + "TIME1": np.array([12, 2**32 - 1], dtype=np.uint32), + "TIME2": np.array([32, 1], dtype=np.uint32), + } table["OUTA1"] = np.array([0, 1], dtype=np.uint8) table["OUTA2"] = np.array([1, 0], dtype=np.uint8)