diff --git a/.copier-answers.yml b/.copier-answers.yml
new file mode 100644
index 00000000..c499717c
--- /dev/null
+++ b/.copier-answers.yml
@@ -0,0 +1,15 @@
+# Changes here will be overwritten by Copier
+_commit: 1.2.0
+_src_path: gh:DiamondLightSource/python-copier-template
+author_email: tom.cobb@diamond.ac.uk
+author_name: Tom Cobb
+component_owner: group:default/sscc
+description: Specify step and flyscan paths in a serializable, efficient and Pythonic
+ way
+distribution_name: scanspec
+docker: true
+docs_type: sphinx
+git_platform: github.com
+github_org: dls-controls
+package_name: scanspec
+repo_name: scanspec
diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
deleted file mode 100644
index 0266d11e..00000000
--- a/.devcontainer/Dockerfile
+++ /dev/null
@@ -1,37 +0,0 @@
-# This file is for use as a devcontainer and a runtime container
-#
-# The devcontainer should use the build target and run as root with podman
-# or docker with user namespaces.
-#
-FROM python:3.11 as build
-
-ARG PIP_OPTIONS
-
-# Add any system dependencies for the developer/build environment here e.g.
-# RUN apt-get update && apt-get upgrade -y && \
-# apt-get install -y --no-install-recommends \
-# desired-packages \
-# && rm -rf /var/lib/apt/lists/*
-
-# set up a virtual environment and put it in PATH
-RUN python -m venv /venv
-ENV PATH=/venv/bin:$PATH
-
-# Copy any required context for the pip install over
-COPY . /context
-WORKDIR /context
-
-# install python package into /venv
-RUN pip install ${PIP_OPTIONS}
-
-FROM python:3.11-slim as runtime
-
-# Add apt-get system dependecies for runtime here if needed
-
-# copy the virtual environment from the build stage and put it in PATH
-COPY --from=build /venv/ /venv/
-ENV PATH=/venv/bin:$PATH
-
-# change this entrypoint if it is not the same as the repo
-ENTRYPOINT ["scanspec"]
-CMD ["--version"]
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index f5e71fe9..79b85ff4 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -2,53 +2,45 @@
{
"name": "Python 3 Developer Container",
"build": {
- "dockerfile": "Dockerfile",
- "target": "build",
- // Only upgrade pip, we will install the project below
- "args": {
- "PIP_OPTIONS": "--upgrade pip"
- }
+ "dockerfile": "../Dockerfile",
+ "target": "developer"
},
"remoteEnv": {
+ // Allow X11 apps to run inside the container
"DISPLAY": "${localEnv:DISPLAY}"
},
- // Add the URLs of features you want added when the container is built.
- "features": {
- "ghcr.io/devcontainers/features/common-utils:1": {
- "username": "none",
- "upgradePackages": false
- }
- },
- // Set *default* container specific settings.json values on container create.
- "settings": {
- "python.defaultInterpreterPath": "/venv/bin/python"
- },
"customizations": {
"vscode": {
+ // Set *default* container specific settings.json values on container create.
+ "settings": {
+ "python.defaultInterpreterPath": "/venv/bin/python"
+ },
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"ms-python.python",
+ "github.vscode-github-actions",
"tamasfe.even-better-toml",
"redhat.vscode-yaml",
- "ryanluker.vscode-coverage-gutters"
+ "ryanluker.vscode-coverage-gutters",
+ "charliermarsh.ruff",
+ "ms-azuretools.vscode-docker"
]
}
},
- // Make sure the files we are mapping into the container exist on the host
- "initializeCommand": "bash -c 'for i in $HOME/.inputrc; do [ -f $i ] || touch $i; done'",
+ "features": {
+ // Some default things like git config
+ "ghcr.io/devcontainers/features/common-utils:2": {
+ "upgradePackages": false
+ }
+ },
"runArgs": [
+ // Allow the container to access the host X11 display and EPICS CA
"--net=host",
- "--security-opt=label=type:container_runtime_t"
- ],
- "mounts": [
- "source=${localEnv:HOME}/.ssh,target=/root/.ssh,type=bind",
- "source=${localEnv:HOME}/.inputrc,target=/root/.inputrc,type=bind",
- // map in home directory - not strictly necessary but useful
- "source=${localEnv:HOME},target=${localEnv:HOME},type=bind,consistency=cached"
+ // Make sure SELinux does not disable with access to host filesystems like tmp
+ "--security-opt=label=disable"
],
- // make the workspace folder the same inside and outside of the container
- "workspaceMount": "source=${localWorkspaceFolder},target=${localWorkspaceFolder},type=bind",
- "workspaceFolder": "${localWorkspaceFolder}",
+ // Mount the parent as /workspaces so we can pip install peers as editable
+ "workspaceMount": "source=${localWorkspaceFolder}/..,target=/workspaces,type=bind",
// After the container is created, install the python project in editable form
- "postCreateCommand": "pip install -e .[dev]"
+ "postCreateCommand": "pip install $([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e '.[dev]' && pre-commit install"
}
\ No newline at end of file
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
new file mode 100644
index 00000000..58db3d99
--- /dev/null
+++ b/.github/CONTRIBUTING.md
@@ -0,0 +1,27 @@
+# Contribute to the project
+
+Contributions and issues are most welcome! All issues and pull requests are
+handled through [GitHub](https://github.com/dls-controls/scanspec/issues). Also, please check for any existing issues before
+filing a new one. If you have a great idea but it involves big changes, please
+file a ticket before making a pull request! We want to make sure you don't spend
+your time coding something that might not fit the scope of the project.
+
+## Issue or Discussion?
+
+Github also offers [discussions](https://github.com/dls-controls/scanspec/discussions) as a place to ask questions and share ideas. If
+your issue is open ended and it is not obvious when it can be "closed", please
+raise it as a discussion instead.
+
+## Code Coverage
+
+While 100% code coverage does not make a library bug-free, it significantly
+reduces the number of easily caught bugs! Please make sure coverage remains the
+same or is improved by a pull request!
+
+## Developer Information
+
+It is recommended that developers use a [vscode devcontainer](https://code.visualstudio.com/docs/devcontainers/containers). This repository contains configuration to set up a containerized development environment that suits its own needs.
+
+This project was created using the [Diamond Light Source Copier Template](https://github.com/DiamondLightSource/python-copier-template) for Python projects.
+
+For more information on common tasks like setting up a developer environment, running the tests, and setting a pre-commit hook, see the template's [How-to guides](https://diamondlightsource.github.io/python-copier-template/1.2.0/how-to.html).
diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst
deleted file mode 100644
index a2fa3ae6..00000000
--- a/.github/CONTRIBUTING.rst
+++ /dev/null
@@ -1,35 +0,0 @@
-Contributing to the project
-===========================
-
-Contributions and issues are most welcome! All issues and pull requests are
-handled through GitHub_. Also, please check for any existing issues before
-filing a new one. If you have a great idea but it involves big changes, please
-file a ticket before making a pull request! We want to make sure you don't spend
-your time coding something that might not fit the scope of the project.
-
-.. _GitHub: https://github.com/dls-controls/scanspec/issues
-
-Issue or Discussion?
---------------------
-
-Github also offers discussions_ as a place to ask questions and share ideas. If
-your issue is open ended and it is not obvious when it can be "closed", please
-raise it as a discussion instead.
-
-.. _discussions: https://github.com/dls-controls/scanspec/discussions
-
-Code coverage
--------------
-
-While 100% code coverage does not make a library bug-free, it significantly
-reduces the number of easily caught bugs! Please make sure coverage remains the
-same or is improved by a pull request!
-
-Developer guide
----------------
-
-The `Developer Guide`_ contains information on setting up a development
-environment, running the tests and what standards the code and documentation
-should follow.
-
-.. _Developer Guide: https://diamondlightsource.github.io/scanspec/main/developer/how-to/contribute.html
diff --git a/.github/actions/install_requirements/action.yml b/.github/actions/install_requirements/action.yml
index 25a146d1..d33e0805 100644
--- a/.github/actions/install_requirements/action.yml
+++ b/.github/actions/install_requirements/action.yml
@@ -1,58 +1,34 @@
name: Install requirements
-description: Run pip install with requirements and upload resulting requirements
+description: Install a version of python then call pip install and report what was installed
inputs:
- requirements_file:
- description: Name of requirements file to use and upload
- required: true
- install_options:
+ python-version:
+ description: Python version to install, default is from Dockerfile
+ default: "dev"
+ pip-install:
description: Parameters to pass to pip install
- required: true
- python_version:
- description: Python version to install
- default: "3.x"
+ default: "$([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e .[dev]"
runs:
using: composite
-
steps:
- - name: Setup python
- uses: actions/setup-python@v4
- with:
- python-version: ${{ inputs.python_version }}
-
- - name: Pip install
- run: |
- touch ${{ inputs.requirements_file }}
- # -c uses requirements.txt as constraints, see 'Validate requirements file'
- pip install -c ${{ inputs.requirements_file }} ${{ inputs.install_options }}
- shell: bash
-
- - name: Create lockfile
+ - name: Get version of python
run: |
- mkdir -p lockfiles
- pip freeze --exclude-editable > lockfiles/${{ inputs.requirements_file }}
- # delete the self referencing line and make sure it isn't blank
- sed -i '/file:/d' lockfiles/${{ inputs.requirements_file }}
+ PYTHON_VERSION="${{ inputs.python-version }}"
+ if [ $PYTHON_VERSION == "dev" ]; then
+ PYTHON_VERSION=$(sed -n "s/ARG PYTHON_VERSION=//p" Dockerfile)
+ fi
+ echo "PYTHON_VERSION=$PYTHON_VERSION" >> "$GITHUB_ENV"
shell: bash
- - name: Upload lockfiles
- uses: actions/upload-artifact@v3
+ - name: Setup python
+ uses: actions/setup-python@v5
with:
- name: lockfiles
- path: lockfiles
+ python-version: ${{ env.PYTHON_VERSION }}
- # This eliminates the class of problems where the requirements being given no
- # longer match what the packages themselves dictate. E.g. In the rare instance
- # where I install some-package which used to depend on vulnerable-dependency
- # but now uses good-dependency (despite being nominally the same version)
- # pip will install both if given a requirements file with -r
- - name: If requirements file exists, check it matches pip installed packages
- run: |
- if [ -s ${{ inputs.requirements_file }} ]; then
- if ! diff -u ${{ inputs.requirements_file }} lockfiles/${{ inputs.requirements_file }}; then
- echo "Error: ${{ inputs.requirements_file }} need the above changes to be exhaustive"
- exit 1
- fi
- fi
+ - name: Install packages
+ run: pip install ${{ inputs.pip-install }}
shell: bash
+ - name: Report what was installed
+ run: pip freeze
+ shell: bash
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index fb7c6ee6..184ba363 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -9,8 +9,16 @@ updates:
directory: "/"
schedule:
interval: "weekly"
+ groups:
+ actions:
+ patterns:
+ - "*"
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "weekly"
+ groups:
+ dev-dependencies:
+ patterns:
+ - "*"
diff --git a/.github/pages/index.html b/.github/pages/index.html
index 661903e0..80f0a009 100644
--- a/.github/pages/index.html
+++ b/.github/pages/index.html
@@ -2,10 +2,10 @@
- Redirecting to master branch
-
-
-
+ Redirecting to main branch
+
+
+
\ No newline at end of file
diff --git a/.github/pages/make_switcher.py b/.github/pages/make_switcher.py
index 39c12772..e2c8e6f6 100755
--- a/.github/pages/make_switcher.py
+++ b/.github/pages/make_switcher.py
@@ -24,7 +24,7 @@ def get_sorted_tags_list() -> List[str]:
return report_output(stdout, "Tags list")
-def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[str]:
+def get_versions(ref: str, add: Optional[str]) -> List[str]:
"""Generate the file containing the list of all GitHub Pages builds."""
# Get the directories (i.e. builds) from the GitHub Pages branch
try:
@@ -36,9 +36,6 @@ def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[st
# Add and remove from the list of builds
if add:
builds.add(add)
- if remove:
- assert remove in builds, f"Build '{remove}' not in {sorted(builds)}"
- builds.remove(remove)
# Get a sorted list of tags
tags = get_sorted_tags_list()
@@ -59,26 +56,22 @@ def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[st
def write_json(path: Path, repository: str, versions: str):
org, repo_name = repository.split("/")
struct = [
- dict(version=version, url=f"https://{org}.github.io/{repo_name}/{version}/")
+ {"version": version, "url": f"https://{org}.github.io/{repo_name}/{version}/"}
for version in versions
]
text = json.dumps(struct, indent=2)
print(f"JSON switcher:\n{text}")
- path.write_text(text)
+ path.write_text(text, encoding="utf-8")
def main(args=None):
parser = ArgumentParser(
- description="Make a versions.txt file from gh-pages directories"
+ description="Make a versions.json file from gh-pages directories"
)
parser.add_argument(
"--add",
help="Add this directory to the list of existing directories",
)
- parser.add_argument(
- "--remove",
- help="Remove this directory from the list of existing directories",
- )
parser.add_argument(
"repository",
help="The GitHub org and repository name: ORG/REPO",
@@ -91,7 +84,7 @@ def main(args=None):
args = parser.parse_args(args)
# Write the versions file
- versions = get_versions("origin/gh-pages", args.add, args.remove)
+ versions = get_versions("origin/gh-pages", args.add)
write_json(args.output, args.repository, versions)
diff --git a/.github/workflows/_check.yml b/.github/workflows/_check.yml
new file mode 100644
index 00000000..a6139c19
--- /dev/null
+++ b/.github/workflows/_check.yml
@@ -0,0 +1,27 @@
+on:
+ workflow_call:
+ outputs:
+ branch-pr:
+ description: The PR number if the branch is in one
+ value: ${{ jobs.pr.outputs.branch-pr }}
+
+jobs:
+ pr:
+ runs-on: "ubuntu-latest"
+ outputs:
+ branch-pr: ${{ steps.script.outputs.result }}
+ steps:
+ - uses: actions/github-script@v7
+ id: script
+ if: github.event_name == 'push'
+ with:
+ script: |
+ const prs = await github.rest.pulls.list({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ head: context.repo.owner + ':${{ github.ref_name }}'
+ })
+ if (prs.data.length) {
+ console.log(`::notice ::Skipping CI on branch push as it is already run in PR #${prs.data[0]["number"]}`)
+ return prs.data[0]["number"]
+ }
diff --git a/.github/workflows/_container.yml b/.github/workflows/_container.yml
new file mode 100644
index 00000000..4857ee9e
--- /dev/null
+++ b/.github/workflows/_container.yml
@@ -0,0 +1,56 @@
+on:
+ workflow_call:
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ # Need this to get version number from last tag
+ fetch-depth: 0
+
+ - name: Set up Docker Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v3
+
+ - name: Log in to GitHub Docker Registry
+ if: github.event_name != 'pull_request'
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Build and export to Docker local cache
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ # Need load and tags so we can test it below
+ load: true
+ tags: tag_for_testing
+
+ - name: Test cli works in cached runtime image
+ run: docker run --rm tag_for_testing --version
+
+ - name: Create tags for publishing image
+ id: meta
+ uses: docker/metadata-action@v5
+ with:
+ images: ghcr.io/${{ github.repository }}
+ tags: |
+ type=ref,event=tag
+ type=raw,value=latest
+
+ - name: Push cached image to container registry
+ if: github.ref_type == 'tag'
+ uses: docker/build-push-action@v5
+ # This does not build the image again, it will find the image in the
+ # Docker cache and publish it
+ with:
+ context: .
+ push: true
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
diff --git a/.github/workflows/_dist.yml b/.github/workflows/_dist.yml
new file mode 100644
index 00000000..b1c4c93c
--- /dev/null
+++ b/.github/workflows/_dist.yml
@@ -0,0 +1,36 @@
+on:
+ workflow_call:
+
+jobs:
+ build:
+ runs-on: "ubuntu-latest"
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ # Need this to get version number from last tag
+ fetch-depth: 0
+
+ - name: Build sdist and wheel
+ run: >
+ export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) &&
+ pipx run build
+
+ - name: Upload sdist and wheel as artifacts
+ uses: actions/upload-artifact@v4
+ with:
+ name: dist
+ path: dist
+
+ - name: Check for packaging errors
+ run: pipx run twine check --strict dist/*
+
+ - name: Install produced wheel
+ uses: ./.github/actions/install_requirements
+ with:
+ pip-install: dist/*.whl
+
+ - name: Test module --version works using the installed wheel
+ # If more than one module in src/ replace with module name to test
+ run: python -m $(ls --hide='*.egg-info' src | head -1) --version
diff --git a/.github/workflows/docs.yml b/.github/workflows/_docs.yml
similarity index 67%
rename from .github/workflows/docs.yml
rename to .github/workflows/_docs.yml
index 1c6efca1..40446e33 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/_docs.yml
@@ -1,38 +1,39 @@
-name: Docs CI
-
on:
- push:
- pull_request:
+ workflow_call:
jobs:
- docs:
- if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository
+ build:
runs-on: ubuntu-latest
steps:
- name: Avoid git conflicts when tag and branch pushed at same time
- if: startsWith(github.ref, 'refs/tags')
+ if: github.ref_type == 'tag'
run: sleep 60
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
# Need this to get version number from last tag
fetch-depth: 0
- name: Install system packages
- # Can delete this if you don't use graphviz in your docs
run: sudo apt-get install graphviz
- name: Install python packages
uses: ./.github/actions/install_requirements
- with:
- requirements_file: requirements-dev-3.x.txt
- install_options: -e .[dev,service,plotting]
- name: Build docs
run: tox -e docs
+ - name: Remove environment.pickle
+ run: rm build/html/.doctrees/environment.pickle
+
+ - name: Upload built docs artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: docs
+ path: build
+
- name: Sanitize ref name for docs version
run: echo "DOCS_VERSION=${GITHUB_REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV
@@ -43,11 +44,11 @@ jobs:
run: python .github/pages/make_switcher.py --add $DOCS_VERSION ${{ github.repository }} .github/pages/switcher.json
- name: Publish Docs to gh-pages
- if: github.event_name == 'push' && github.actor != 'dependabot[bot]'
+ if: github.ref_type == 'tag' || github.ref_name == 'main'
# We pin to the SHA, not the tag, for security reasons.
# https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions
- uses: peaceiris/actions-gh-pages@de7ea6f8efb354206b205ef54722213d99067935 # v3.9.0
+ uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: .github/pages
- keep_files: true
+ keep_files: true
\ No newline at end of file
diff --git a/.github/workflows/_pypi.yml b/.github/workflows/_pypi.yml
new file mode 100644
index 00000000..f2ead1bc
--- /dev/null
+++ b/.github/workflows/_pypi.yml
@@ -0,0 +1,22 @@
+on:
+ workflow_call:
+ secrets:
+ PYPI_TOKEN:
+ required: true
+
+jobs:
+ upload:
+ runs-on: ubuntu-latest
+ environment: release
+
+ steps:
+ - name: Download dist artifact
+ uses: actions/download-artifact@v4
+ with:
+ name: dist
+ path: dist
+
+ - name: Publish to PyPI using trusted publishing
+ uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ password: ${{ secrets.PYPI_TOKEN }}
diff --git a/.github/workflows/_release.yml b/.github/workflows/_release.yml
new file mode 100644
index 00000000..b49fa7dc
--- /dev/null
+++ b/.github/workflows/_release.yml
@@ -0,0 +1,32 @@
+on:
+ workflow_call:
+
+jobs:
+ artifacts:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ merge-multiple: true
+
+ - name: Zip up docs
+ run: |
+ set -vxeuo pipefail
+ if [ -d html ]; then
+ mv html $GITHUB_REF_NAME
+ zip -r docs.zip $GITHUB_REF_NAME
+ rm -rf $GITHUB_REF_NAME
+ fi
+
+ - name: Create GitHub Release
+ # We pin to the SHA, not the tag, for security reasons.
+ # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions
+ uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15
+ with:
+ prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }}
+ files: "*"
+ generate_release_notes: true
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/_test.yml b/.github/workflows/_test.yml
new file mode 100644
index 00000000..f652d414
--- /dev/null
+++ b/.github/workflows/_test.yml
@@ -0,0 +1,62 @@
+on:
+ workflow_call:
+ inputs:
+ python-version:
+ type: string
+ description: The version of python to install
+ required: true
+ runs-on:
+ type: string
+ description: The runner to run this job on
+ required: true
+ secrets:
+ CODECOV_TOKEN:
+ required: true
+
+env:
+ # https://github.com/pytest-dev/pytest/issues/2042
+ PY_IGNORE_IMPORTMISMATCH: "1"
+
+jobs:
+ run:
+ runs-on: ${{ inputs.runs-on }}
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ # Need this to get version number from last tag
+ fetch-depth: 0
+
+ - if: inputs.python-version == 'dev'
+ name: Install dev versions of python packages
+ uses: ./.github/actions/install_requirements
+
+ - if: inputs.python-version == 'dev'
+ name: Write the requirements as an artifact
+ run: pip freeze --exclude-editable > /tmp/dev-requirements.txt
+
+ - if: inputs.python-version == 'dev'
+ name: Upload dev-requirements.txt
+ uses: actions/upload-artifact@v4
+ with:
+ name: dev-requirements
+ path: /tmp/dev-requirements.txt
+
+ - if: inputs.python-version != 'dev'
+ name: Install latest versions of python packages
+ uses: ./.github/actions/install_requirements
+ with:
+ python-version: ${{ inputs.python-version }}
+ pip-install: ".[dev]"
+
+ - name: Run tests
+ run: tox -e tests
+
+ - name: Upload coverage to Codecov
+ uses: codecov/codecov-action@v4
+ with:
+ name: ${{ inputs.python-version }}/${{ inputs.runs-on }}
+ files: cov.xml
+ env:
+ CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
diff --git a/.github/workflows/_tox.yml b/.github/workflows/_tox.yml
new file mode 100644
index 00000000..a13536d3
--- /dev/null
+++ b/.github/workflows/_tox.yml
@@ -0,0 +1,22 @@
+on:
+ workflow_call:
+ inputs:
+ tox:
+ type: string
+ description: What to run under tox
+ required: true
+
+
+jobs:
+ run:
+ runs-on: "ubuntu-latest"
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Install python packages
+ uses: ./.github/actions/install_requirements
+
+ - name: Run tox
+ run: tox -e ${{ inputs.tox }}
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 00000000..7e17ec93
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,68 @@
+name: CI
+
+on:
+ push:
+ pull_request:
+
+jobs:
+ check:
+ uses: ./.github/workflows/_check.yml
+
+ lint:
+ needs: check
+ if: needs.check.outputs.branch-pr == ''
+ uses: ./.github/workflows/_tox.yml
+ with:
+ tox: pre-commit,type-checking
+
+ test:
+ needs: check
+ if: needs.check.outputs.branch-pr == ''
+ strategy:
+ matrix:
+ runs-on: ["ubuntu-latest"] # can add windows-latest, macos-latest
+ python-version: ["3.8", "3.9", "3.10", "3.11"]
+ include:
+ # Include one that runs in the dev environment
+ - runs-on: "ubuntu-latest"
+ python-version: "dev"
+ fail-fast: false
+ uses: ./.github/workflows/_test.yml
+ with:
+ runs-on: ${{ matrix.runs-on }}
+ python-version: ${{ matrix.python-version }}
+ secrets:
+ CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
+
+ container:
+ needs: check
+ if: needs.check.outputs.branch-pr == ''
+ uses: ./.github/workflows/_container.yml
+ permissions:
+ packages: write
+
+ docs:
+ needs: check
+ if: needs.check.outputs.branch-pr == ''
+ uses: ./.github/workflows/_docs.yml
+
+ dist:
+ needs: check
+ if: needs.check.outputs.branch-pr == ''
+ uses: ./.github/workflows/_dist.yml
+
+ pypi:
+ if: github.ref_type == 'tag'
+ needs: dist
+ uses: ./.github/workflows/_pypi.yml
+ permissions:
+ id-token: write
+ secrets:
+ PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
+
+ release:
+ if: github.ref_type == 'tag'
+ needs: [dist, docs]
+ uses: ./.github/workflows/_release.yml
+ permissions:
+ contents: write
diff --git a/.github/workflows/code.yml b/.github/workflows/code.yml
deleted file mode 100644
index c0fd956d..00000000
--- a/.github/workflows/code.yml
+++ /dev/null
@@ -1,210 +0,0 @@
-name: Code CI
-
-on:
- push:
- pull_request:
- schedule:
- # Run weekly to check latest versions of dependencies
- - cron: "0 8 * * WED"
-env:
- # The target python version, which must match the Dockerfile version
- CONTAINER_PYTHON: "3.11"
-
-jobs:
- lint:
- # pull requests are a duplicate of a branch push if within the same repo.
- if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout
- uses: actions/checkout@v3
-
- - name: Install python packages
- uses: ./.github/actions/install_requirements
- with:
- requirements_file: requirements-dev-3.x.txt
- install_options: -e .[dev]
-
- - name: Lint
- run: tox -e pre-commit,mypy
-
- - name: Install minimum python version
- uses: actions/setup-python@v2
- with:
- python-version: ${{ matrix.python }}
-
- test:
- if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository
- strategy:
- fail-fast: false
- matrix:
- os: ["ubuntu-latest"] # can add windows-latest, macos-latest
- python: ["3.9", "3.10", "3.11"]
- install: ["-e .[dev,service,plotting]"]
- # Make one version be non-editable to test both paths of version code
- include:
- - os: "ubuntu-latest"
- python: "3.8"
- install: ".[dev,service,plotting]"
-
- runs-on: ${{ matrix.os }}
- env:
- # https://github.com/pytest-dev/pytest/issues/2042
- PY_IGNORE_IMPORTMISMATCH: "1"
-
- steps:
- - name: Checkout
- uses: actions/checkout@v3
- with:
- # Need this to get version number from last tag
- fetch-depth: 0
-
- - name: Install python packages
- uses: ./.github/actions/install_requirements
- with:
- python_version: ${{ matrix.python }}
- requirements_file: requirements-test-${{ matrix.os }}-${{ matrix.python }}.txt
- install_options: ${{ matrix.install }}
-
- - name: List dependency tree
- run: pipdeptree
-
- - name: Run tests
- run: pytest
-
- - name: Upload coverage to Codecov
- uses: codecov/codecov-action@v3
- with:
- name: ${{ matrix.python }}/${{ matrix.os }}
- files: cov.xml
-
- dist:
- if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository
- runs-on: "ubuntu-latest"
-
- steps:
- - name: Checkout
- uses: actions/checkout@v3
- with:
- # Need this to get version number from last tag
- fetch-depth: 0
-
- - name: Build sdist and wheel
- run: |
- export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && \
- pipx run build
-
- - name: Upload sdist and wheel as artifacts
- uses: actions/upload-artifact@v3
- with:
- name: dist
- path: dist
-
- - name: Check for packaging errors
- run: pipx run twine check --strict dist/*
-
- - name: Install python packages
- uses: ./.github/actions/install_requirements
- with:
- python_version: ${{env.CONTAINER_PYTHON}}
- requirements_file: requirements.txt
- install_options: dist/*.whl
-
- - name: Test module --version works using the installed wheel
- # If more than one module in src/ replace with module name to test
- run: python -m $(ls src | head -1) --version
-
- container:
- needs: [lint, dist, test]
- runs-on: ubuntu-latest
-
- permissions:
- contents: read
- packages: write
-
- steps:
- - name: Checkout
- uses: actions/checkout@v3
-
- # image names must be all lower case
- - name: Generate image repo name
- run: echo IMAGE_REPOSITORY=ghcr.io/$(tr '[:upper:]' '[:lower:]' <<< "${{ github.repository }}") >> $GITHUB_ENV
-
- - name: Download wheel and lockfiles
- uses: actions/download-artifact@v3
- with:
- path: .devcontainer
-
- - name: Log in to GitHub Docker Registry
- if: github.event_name != 'pull_request'
- uses: docker/login-action@v2
- with:
- registry: ghcr.io
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
-
- - name: Docker meta
- id: meta
- uses: docker/metadata-action@v4
- with:
- images: ${{ env.IMAGE_REPOSITORY }}
- tags: |
- type=ref,event=tag
- type=raw,value=latest
-
- - name: Set up Docker Buildx
- id: buildx
- uses: docker/setup-buildx-action@v2
-
- - name: Build runtime image
- uses: docker/build-push-action@v3
- with:
- build-args: |
- PIP_OPTIONS=-r lockfiles/requirements.txt dist/*.whl
- push: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags') }}
- load: ${{ ! (github.event_name == 'push' && startsWith(github.ref, 'refs/tags')) }}
- tags: ${{ steps.meta.outputs.tags }}
- labels: ${{ steps.meta.outputs.labels }}
- context: .devcontainer
- # If you have a long docker build, uncomment the following to turn on caching
- # For short build times this makes it a little slower
- #cache-from: type=gha
- #cache-to: type=gha,mode=max
-
- - name: Test cli works in runtime image
- run: docker run ${{ env.IMAGE_REPOSITORY }} --version
-
- release:
- # upload to PyPI and make a release on every tag
- needs: [lint, dist, test]
- if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags') }}
- runs-on: ubuntu-latest
- env:
- HAS_PYPI_TOKEN: ${{ secrets.PYPI_TOKEN != '' }}
-
- steps:
- - uses: actions/download-artifact@v3
-
- - name: Fixup blank lockfiles
- # Github release artifacts can't be blank
- run: for f in lockfiles/*; do [ -s $f ] || echo '# No requirements' >> $f; done
-
- - name: Github Release
- # We pin to the SHA, not the tag, for security reasons.
- # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions
- uses: softprops/action-gh-release@1e07f4398721186383de40550babbdf2b84acfc5 # v0.1.14
- with:
- prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }}
- files: |
- dist/*
- lockfiles/*
- generate_release_notes: true
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
- - name: Publish to PyPI
- if: ${{ env.HAS_PYPI_TOKEN }}
- uses: pypa/gh-action-pypi-publish@release/v1
- with:
- password: ${{ secrets.PYPI_TOKEN }}
diff --git a/.github/workflows/docs_clean.yml b/.github/workflows/docs_clean.yml
deleted file mode 100644
index a67e1881..00000000
--- a/.github/workflows/docs_clean.yml
+++ /dev/null
@@ -1,43 +0,0 @@
-name: Docs Cleanup CI
-
-# delete branch documentation when a branch is deleted
-# also allow manually deleting a documentation version
-on:
- delete:
- workflow_dispatch:
- inputs:
- version:
- description: "documentation version to DELETE"
- required: true
- type: string
-
-jobs:
- remove:
- if: github.event.ref_type == 'branch' || github.event_name == 'workflow_dispatch'
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout
- uses: actions/checkout@v3
- with:
- ref: gh-pages
-
- - name: removing documentation for branch ${{ github.event.ref }}
- if: ${{ github.event_name != 'workflow_dispatch' }}
- run: echo "REF_NAME=${{ github.event.ref }}" >> $GITHUB_ENV
-
- - name: manually removing documentation version ${{ github.event.inputs.version }}
- if: ${{ github.event_name == 'workflow_dispatch' }}
- run: echo "REF_NAME=${{ github.event.inputs.version }}" >> $GITHUB_ENV
-
- - name: Sanitize ref name for docs version
- run: echo "DOCS_VERSION=${REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV
-
- - name: update index and push changes
- run: |
- rm -r $DOCS_VERSION
- python make_switcher.py --remove $DOCS_VERSION ${{ github.repository }} switcher.json
- git config --global user.name 'GitHub Actions Docs Cleanup CI'
- git config --global user.email 'GithubActionsCleanup@noreply.github.com'
- git commit -am "Removing redundant docs version $DOCS_VERSION"
- git push
diff --git a/.github/workflows/linkcheck.yml b/.github/workflows/linkcheck.yml
deleted file mode 100644
index 02d8df4c..00000000
--- a/.github/workflows/linkcheck.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-name: Link Check
-
-on:
- workflow_dispatch:
- schedule:
- # Run weekly to check URL links still resolve
- - cron: "0 8 * * WED"
-
-jobs:
- docs:
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout
- uses: actions/checkout@v3
-
- - name: Install python packages
- uses: ./.github/actions/install_requirements
- with:
- requirements_file: requirements-dev-3.x.txt
- install_options: -e .[dev]
-
- - name: Check links
- run: tox -e docs build -- -b linkcheck
diff --git a/.github/workflows/periodic.yml b/.github/workflows/periodic.yml
new file mode 100644
index 00000000..e2a0fd1b
--- /dev/null
+++ b/.github/workflows/periodic.yml
@@ -0,0 +1,13 @@
+name: Periodic
+
+on:
+ workflow_dispatch:
+ schedule:
+ # Run weekly to check URL links still resolve
+ - cron: "0 8 * * WED"
+
+jobs:
+ linkcheck:
+ uses: ./.github/workflows/_tox.yml
+ with:
+ tox: docs build -- -b linkcheck
diff --git a/.gitignore b/.gitignore
index 9fbb6bfe..2593ec75 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,7 +8,6 @@ __pycache__/
# Distribution / packaging
.Python
env/
-.venv
build/
develop-eggs/
dist/
@@ -67,3 +66,5 @@ venv*
# further build artifacts
lockfiles/
+# ruff cache
+.ruff_cache/
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 5e270b08..5a4cbf7b 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v2.3.0
+ rev: v4.5.0
hooks:
- id: check-added-large-files
- id: check-yaml
@@ -8,16 +8,16 @@ repos:
- repo: local
hooks:
- - id: black
- name: Run black
- stages: [commit]
+ - id: ruff
+ name: lint with ruff
language: system
- entry: black --check --diff
+ entry: ruff check --force-exclude
types: [python]
+ require_serial: true
- - id: flake8
- name: Run flake8
- stages: [commit]
+ - id: ruff-format
+ name: format with ruff
language: system
- entry: flake8
+ entry: ruff format --force-exclude
types: [python]
+ require_serial: true
diff --git a/.vscode/extensions.json b/.vscode/extensions.json
index 81922991..66ad6324 100644
--- a/.vscode/extensions.json
+++ b/.vscode/extensions.json
@@ -1,9 +1,5 @@
{
"recommendations": [
"ms-vscode-remote.remote-containers",
- "ms-python.python",
- "tamasfe.even-better-toml",
- "redhat.vscode-yaml",
- "ryanluker.vscode-coverage-gutters"
]
}
\ No newline at end of file
diff --git a/.vscode/launch.json b/.vscode/launch.json
index 14815264..df954ee5 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -14,12 +14,6 @@
"debug-test"
],
"console": "integratedTerminal",
- "env": {
- // The default config in setup.cfg's "[tool:pytest]" adds coverage.
- // Cannot have coverage and debugging at the same time.
- // https://github.com/microsoft/vscode-python/issues/693
- "PYTEST_ADDOPTS": "--no-cov"
- },
},
{
"name": "Python (Current File)",
diff --git a/.vscode/settings.json b/.vscode/settings.json
index b73c801b..c129d991 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -1,16 +1,11 @@
{
- "python.linting.pylintEnabled": false,
- "python.linting.flake8Enabled": true,
- "python.linting.mypyEnabled": true,
- "python.linting.pydocstyleEnabled": true,
- "python.linting.enabled": true,
- "python.testing.pytestArgs": [],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true,
- "python.formatting.provider": "black",
- "python.languageServer": "Pylance",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
- "source.organizeImports": true
- }
+ "source.organizeImports": "explicit"
+ },
+ "[python]": {
+ "editor.defaultFormatter": "charliermarsh.ruff",
+ },
}
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 00000000..5be05df7
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,29 @@
+# The devcontainer should use the developer target and run as root with podman
+# or docker with user namespaces.
+ARG PYTHON_VERSION=3.11
+FROM python:${PYTHON_VERSION} as developer
+
+# Add any system dependencies for the developer/build environment here
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ graphviz \
+ && rm -rf /var/lib/apt/lists/*
+
+# Set up a virtual environment and put it in PATH
+RUN python -m venv /venv
+ENV PATH=/venv/bin:$PATH
+
+# The build stage installs the context into the venv
+FROM developer as build
+COPY . /context
+WORKDIR /context
+RUN pip install .
+
+# The runtime stage copies the built venv into a slim runtime container
+FROM python:${PYTHON_VERSION}-slim as runtime
+# Add apt-get system dependecies for runtime here if needed
+COPY --from=build /venv/ /venv/
+ENV PATH=/venv/bin:$PATH
+
+# change this entrypoint if it is not the same as the repo
+ENTRYPOINT ["scanspec"]
+CMD ["--version"]
diff --git a/README.md b/README.md
new file mode 100644
index 00000000..98f45f39
--- /dev/null
+++ b/README.md
@@ -0,0 +1,74 @@
+
+
+[![CI](https://github.com/dls-controls/scanspec/actions/workflows/ci.yml/badge.svg)](https://github.com/dls-controls/scanspec/actions/workflows/ci.yml)
+[![Coverage](https://codecov.io/gh/dls-controls/scanspec/branch/main/graph/badge.svg)](https://codecov.io/gh/dls-controls/scanspec)
+[![PyPI](https://img.shields.io/pypi/v/scanspec.svg)](https://pypi.org/project/scanspec)
+[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
+
+# scanspec
+
+Specify step and flyscan paths in a serializable, efficient and Pythonic way using combinations of:
+- Specs like Line or Spiral
+- Optionally Snaking
+- Zip, Product and Concat to compose
+- Masks with multiple Regions to restrict
+
+Serialize the Spec rather than the expanded Path and reconstruct it on the
+server. It can them be iterated over like a [cycler][], or a stack of scan Frames
+can be produced and expanded Paths created to consume chunk by chunk.
+
+[cycler]: https://matplotlib.org/cycler/
+
+Source |
+:---: | :---:
+PyPI | `pip install scanspec`
+Documentation |
+Releases |
+
+An example ScanSpec of a 2D snaked grid flyscan inside a circle spending 0.4s at
+each point:
+
+```python
+from scanspec.specs import Line, fly
+from scanspec.regions import Circle
+
+grid = Line(y, 2.1, 3.8, 12) * ~Line(x, 0.5, 1.5, 10)
+spec = fly(grid, 0.4) & Circle(x, y, 1.0, 2.8, radius=0.5)
+```
+
+Which when plotted looks like:
+
+![plot][]
+
+Scan points can be iterated through directly for convenience:
+
+```python
+for point in spec.midpoints():
+ print(point)
+# ...
+# {'y': 3.1818181818181817, 'x': 0.8333333333333333, 'DURATION': 0.4}
+# {'y': 3.1818181818181817, 'x': 0.7222222222222222, 'DURATION': 0.4}
+```
+
+or a Path created from the stack of Frames and chunks of a given length
+consumed from it for performance:
+
+```python
+from scanspec.core import Path
+
+stack = spec.calculate()
+len(stack[0]) # 44
+stack[0].axes() # ['y', 'x', 'DURATION']
+
+path = Path(stack, start=5, num=30)
+chunk = path.consume(10)
+chunk.midpoints # {'x': , 'y': , 'DURATION': }
+chunk.upper # bounds are same dimensionality as positions
+```
+
+
+
+See https://dls-controls.github.io/scanspec for more detailed documentation.
+
+[plot]: https://raw.githubusercontent.com/dls-controls/scanspec/master/docs/images/plot_spec.png
diff --git a/README.rst b/README.rst
deleted file mode 100644
index 5104c256..00000000
--- a/README.rst
+++ /dev/null
@@ -1,91 +0,0 @@
-scanspec
-========
-
-|code_ci| |docs_ci| |coverage| |pypi_version| |license|
-
-Specify step and flyscan Paths using combinations of:
-
-- Specs like Line or Spiral
-- Optionally Snaking
-- Zip, Product and Concat to compose
-- Masks with multiple Regions to restrict
-
-Serialize the Spec rather than the expanded Path and reconstruct it on the
-server. It can them be iterated over like a cycler_, or a stack of scan Frames
-can be produced and expanded Paths created to consume chunk by chunk.
-
-.. _cycler: https://matplotlib.org/cycler/
-
-============== ==============================================================
-PyPI ``pip install scanspec``
-Source code https://github.com/dls-controls/scanspec
-Documentation https://dls-controls.github.io/scanspec
-Releases https://github.com/dls-controls/scanspec/releases
-============== ==============================================================
-
-An example ScanSpec of a 2D snaked grid flyscan inside a circle spending 0.4s at
-each point looks like:
-
-.. code-block:: python
-
- from scanspec.specs import Line, fly
- from scanspec.regions import Circle
-
- grid = Line(y, 2.1, 3.8, 12) * ~Line(x, 0.5, 1.5, 10)
- spec = fly(grid, 0.4) & Circle(x, y, 1.0, 2.8, radius=0.5)
-
-|plot|
-
-You can then either iterate through the scan positions directly for convenience:
-
-.. code:: python
-
- for point in spec.midpoints():
- print(point)
- # ...
- # {'y': 3.1818181818181817, 'x': 0.8333333333333333, 'DURATION': 0.4}
- # {'y': 3.1818181818181817, 'x': 0.7222222222222222, 'DURATION': 0.4}
-
-or create a Path from the stack of Frames and consume chunks of a given length
-from it for performance:
-
-.. code:: python
-
- from scanspec.core import Path
-
- stack = spec.calculate()
- len(stack[0]) # 44
- stack[0].axes() # ['y', 'x', 'DURATION']
-
- path = Path(stack, start=5, num=30)
- chunk = path.consume(10)
- chunk.midpoints # {'x': , 'y': , 'DURATION': }
- chunk.upper # bounds are same dimensionality as positions
-
-.. |code_ci| image:: https://github.com/dls-controls/scanspec/actions/workflows/code.yml/badge.svg?branch=master
- :target: https://github.com/dls-controls/scanspec/actions/workflows/code.yml
- :alt: Code CI
-
-.. |docs_ci| image:: https://github.com/dls-controls/scanspec/actions/workflows/docs.yml/badge.svg?branch=master
- :target: https://github.com/dls-controls/scanspec/actions/workflows/docs.yml
- :alt: Docs CI
-
-.. |coverage| image:: https://codecov.io/gh/dls-controls/scanspec/branch/master/graph/badge.svg
- :target: https://codecov.io/gh/dls-controls/scanspec
- :alt: Test Coverage
-
-.. |pypi_version| image:: https://img.shields.io/pypi/v/scanspec.svg
- :target: https://pypi.org/project/scanspec
- :alt: Latest PyPI version
-
-.. |license| image:: https://img.shields.io/badge/License-Apache%202.0-blue.svg
- :target: https://opensource.org/licenses/Apache-2.0
- :alt: Apache License
-
-..
- Anything below this line is used when viewing README.rst and will be replaced
- when included in index.rst
-
-.. |plot| image:: https://raw.githubusercontent.com/dls-controls/scanspec/master/docs/images/plot_spec.png
-
-See https://dls-controls.github.io/scanspec for more detailed documentation.
diff --git a/catalog-info.yaml b/catalog-info.yaml
new file mode 100644
index 00000000..2c93efd0
--- /dev/null
+++ b/catalog-info.yaml
@@ -0,0 +1,10 @@
+apiVersion: backstage.io/v1alpha1
+kind: Component
+metadata:
+ name: scanspec
+ title: scanspec
+ description: Specify step and flyscan paths in a serializable, efficient and Pythonic way
+spec:
+ type: documentation
+ lifecycle: experimental
+ owner: group:default/sscc
\ No newline at end of file
diff --git a/docs/conf.py b/docs/conf.py
index 399592f3..5bee558b 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -50,8 +50,13 @@
"sphinx_copybutton",
# For the card element
"sphinx_design",
+ # So we can write markdown files
+ "myst_parser",
]
+# So we can use the ::: syntax
+myst_enable_extensions = ["colon_fence"]
+
# If true, Sphinx will warn about all references where the target cannot
# be found.
nitpicky = True
@@ -94,7 +99,7 @@
# A dictionary for users defined type aliases that maps a type name to the
# full-qualified object name.
-autodoc_type_aliases = dict(AxesPoints="scanspec.core.AxesPoints")
+autodoc_type_aliases = {"AxesPoints": "scanspec.core.AxesPoints"}
# Include source in plot directive by default
plot_include_source = True
@@ -106,9 +111,6 @@
# role, that is, for text marked up `like this`
default_role = "any"
-# The suffix of source filenames.
-source_suffix = ".rst"
-
# The master toctree document.
master_doc = "index"
@@ -122,20 +124,13 @@
# This means you can link things like `str` and `asyncio` to the relevant
# docs in the python documentation.
-intersphinx_mapping = dict(
- python=("https://docs.python.org/3/", None),
- numpy=("https://numpy.org/doc/stable/", None),
-)
-
-# Common links that should be available on every page
-rst_epilog = """
-.. _Diamond Light Source: http://www.diamond.ac.uk
-.. _black: https://github.com/psf/black
-.. _flake8: https://flake8.pycqa.org/en/latest/
-.. _isort: https://github.com/PyCQA/isort
-.. _mypy: http://mypy-lang.org/
-.. _pre-commit: https://pre-commit.com/
-"""
+intersphinx_mapping = {
+ "python": ("https://docs.python.org/3/", None),
+ "numpy": ("https://numpy.org/doc/stable/", None),
+}
+
+# A dictionary of graphviz graph attributes for inheritance diagrams.
+inheritance_graph_attrs = {"rankdir": "TB"}
# Ignore localhost links for periodic check that links in docs are valid
linkcheck_ignore = [r"http://localhost:\d+/"]
@@ -151,12 +146,11 @@
# a list of builtin themes.
#
html_theme = "pydata_sphinx_theme"
-github_repo = project
+github_repo = "scanspec"
github_user = "dls-controls"
switcher_json = f"https://{github_user}.github.io/{github_repo}/switcher.json"
-# Don't check switcher if it doesn't exist, but warn in a non-failing way
-check_switcher = requests.get(switcher_json).ok
-if not check_switcher:
+switcher_exists = requests.get(switcher_json).ok
+if not switcher_exists:
print(
"*** Can't read version switcher, is GitHub pages enabled? \n"
" Once Docs CI job has successfully run once, set the "
@@ -166,40 +160,43 @@
)
# Theme options for pydata_sphinx_theme
-html_theme_options = dict(
- logo=dict(
- text=project,
- ),
- use_edit_page_button=True,
- github_url=f"https://github.com/{github_user}/{github_repo}",
- icon_links=[
- dict(
- name="PyPI",
- url=f"https://pypi.org/project/{project}",
- icon="fas fa-cube",
- )
- ],
- switcher=dict(
- json_url=switcher_json,
- version_match=version,
- ),
- check_switcher=check_switcher,
- navbar_end=["theme-switcher", "icon-links", "version-switcher"],
- external_links=[
- dict(
- name="Release Notes",
- url=f"https://github.com/{github_user}/{github_repo}/releases",
- )
+# We don't check switcher because there are 3 possible states for a repo:
+# 1. New project, docs are not published so there is no switcher
+# 2. Existing project with latest skeleton, switcher exists and works
+# 3. Existing project with old skeleton that makes broken switcher,
+# switcher exists but is broken
+# Point 3 makes checking switcher difficult, because the updated skeleton
+# will fix the switcher at the end of the docs workflow, but never gets a chance
+# to complete as the docs build warns and fails.
+html_theme_options = {
+ "logo": {
+ "text": project,
+ },
+ "use_edit_page_button": True,
+ "github_url": f"https://github.com/{github_user}/{github_repo}",
+ "icon_links": [
+ {
+ "name": "PyPI",
+ "url": f"https://pypi.org/project/{project}",
+ "icon": "fas fa-cube",
+ }
],
-)
+ "switcher": {
+ "json_url": switcher_json,
+ "version_match": version,
+ },
+ "check_switcher": False,
+ "navbar_end": ["theme-switcher", "icon-links", "version-switcher"],
+ "navigation_with_keys": False,
+}
# A dictionary of values to pass into the template engine’s context for all pages
-html_context = dict(
- github_user=github_user,
- github_repo=project,
- github_version=version,
- doc_path="docs",
-)
+html_context = {
+ "github_user": github_user,
+ "github_repo": project,
+ "github_version": version,
+ "doc_path": "docs",
+}
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
@@ -209,4 +206,4 @@
# Logo
html_logo = "images/scanspec-logo.svg"
-html_favicon = "images/scanspec-logo.ico"
+html_favicon = html_logo
diff --git a/docs/explanations.md b/docs/explanations.md
new file mode 100644
index 00000000..73ab289b
--- /dev/null
+++ b/docs/explanations.md
@@ -0,0 +1,10 @@
+# Explanations
+
+Explanations of how it works and why it works that way.
+
+```{toctree}
+:maxdepth: 1
+:glob:
+
+explanations/*
+```
diff --git a/docs/explanations.rst b/docs/explanations.rst
deleted file mode 100644
index c9b1be15..00000000
--- a/docs/explanations.rst
+++ /dev/null
@@ -1,13 +0,0 @@
-:orphan:
-
-Explanations
-============
-
-Explanation of how the library works and why it works that way.
-
-.. toctree::
- :caption: Explanations
-
- explanations/technical-terms
- explanations/why-stack-frames
- explanations/why-squash-can-change-path
diff --git a/docs/explanations/decisions.md b/docs/explanations/decisions.md
new file mode 100644
index 00000000..0533b98d
--- /dev/null
+++ b/docs/explanations/decisions.md
@@ -0,0 +1,12 @@
+# Architectural Decision Records
+
+Architectural decisions are made throughout a project's lifetime. As a way of keeping track of these decisions, we record these decisions in Architecture Decision Records (ADRs) listed below.
+
+```{toctree}
+:glob: true
+:maxdepth: 1
+
+decisions/*
+```
+
+For more information on ADRs see this [blog by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions).
diff --git a/docs/explanations/decisions/0001-record-architecture-decisions.md b/docs/explanations/decisions/0001-record-architecture-decisions.md
new file mode 100644
index 00000000..44d234ef
--- /dev/null
+++ b/docs/explanations/decisions/0001-record-architecture-decisions.md
@@ -0,0 +1,18 @@
+# 1. Record architecture decisions
+
+## Status
+
+Accepted
+
+## Context
+
+We need to record the architectural decisions made on this project.
+
+## Decision
+
+We will use Architecture Decision Records, as [described by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions).
+
+## Consequences
+
+See Michael Nygard's article, linked above. To create new ADRs we will copy and
+paste from existing ones.
diff --git a/docs/explanations/decisions/0002-switched-to-python-copier-template.md b/docs/explanations/decisions/0002-switched-to-python-copier-template.md
new file mode 100644
index 00000000..66fe5d8b
--- /dev/null
+++ b/docs/explanations/decisions/0002-switched-to-python-copier-template.md
@@ -0,0 +1,28 @@
+# 2. Adopt python-copier-template for project structure
+
+## Status
+
+Accepted
+
+## Context
+
+We should use the following [python-copier-template](https://github.com/DiamondLightSource/python-copier-template).
+The template will ensure consistency in developer
+environments and package management.
+
+## Decision
+
+We have switched to using the template.
+
+## Consequences
+
+This module will use a fixed set of tools as developed in `python-copier-template`
+and can pull from this template to update the packaging to the latest techniques.
+
+As such, the developer environment may have changed, the following could be
+different:
+
+- linting
+- formatting
+- pip venv setup
+- CI/CD
diff --git a/docs/explanations/decisions/COPYME b/docs/explanations/decisions/COPYME
new file mode 100644
index 00000000..b466c792
--- /dev/null
+++ b/docs/explanations/decisions/COPYME
@@ -0,0 +1,19 @@
+# 3. Short descriptive title
+
+Date: Today's date
+
+## Status
+
+Accepted
+
+## Context
+
+Background to allow us to make the decision, to show how we arrived at our conclusions.
+
+## Decision
+
+What decision we made.
+
+## Consequences
+
+What we will do as a result of this decision.
diff --git a/docs/genindex.md b/docs/genindex.md
new file mode 100644
index 00000000..73f1191b
--- /dev/null
+++ b/docs/genindex.md
@@ -0,0 +1,3 @@
+# Index
+
+
diff --git a/docs/how-to.md b/docs/how-to.md
new file mode 100644
index 00000000..6b161417
--- /dev/null
+++ b/docs/how-to.md
@@ -0,0 +1,10 @@
+# How-to Guides
+
+Practical step-by-step guides for the more experienced user.
+
+```{toctree}
+:maxdepth: 1
+:glob:
+
+how-to/*
+```
diff --git a/docs/how-to.rst b/docs/how-to.rst
deleted file mode 100644
index 9efa4f2d..00000000
--- a/docs/how-to.rst
+++ /dev/null
@@ -1,12 +0,0 @@
-:orphan:
-
-How-to Guides
-=============
-
-Practical step-by-step guides for the more experienced user.
-
-.. toctree::
- :caption: How-to Guides
-
- how-to/iterate-a-spec
- how-to/serialize-a-spec
diff --git a/docs/how-to/contribute.md b/docs/how-to/contribute.md
new file mode 100644
index 00000000..f9c4ca1d
--- /dev/null
+++ b/docs/how-to/contribute.md
@@ -0,0 +1,2 @@
+```{include} ../../.github/CONTRIBUTING.md
+```
\ No newline at end of file
diff --git a/docs/how-to/run-container.md b/docs/how-to/run-container.md
new file mode 100644
index 00000000..579c1cd0
--- /dev/null
+++ b/docs/how-to/run-container.md
@@ -0,0 +1,14 @@
+# Run in a container
+
+Pre-built containers with scanspec and its dependencies already
+installed are available on [Github Container Registry](https://ghcr.io/dls-controls/scanspec).
+
+## Starting the container
+
+To pull the container from github container registry and run:
+
+```
+$ docker run ghcr.io/dls-controls/scanspec:main --version
+```
+
+To get a released version, use a numbered release instead of `main`.
diff --git a/docs/images/scanspec-logo.ico b/docs/images/scanspec-logo.ico
deleted file mode 100644
index 75349e66..00000000
Binary files a/docs/images/scanspec-logo.ico and /dev/null differ
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 00000000..a6f58e33
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,58 @@
+---
+html_theme.sidebar_secondary.remove: true
+---
+
+```{include} ../README.md
+:end-before:
+
+::::{grid} 2
+:gutter: 4
+
+:::{grid-item-card} {material-regular}`directions_walk;2em`
+```{toctree}
+:maxdepth: 2
+tutorials
+```
++++
+Tutorials for installation and typical usage. New users start here.
+:::
+
+:::{grid-item-card} {material-regular}`directions;2em`
+```{toctree}
+:maxdepth: 2
+how-to
+```
++++
+Practical step-by-step guides for the more experienced user.
+:::
+
+:::{grid-item-card} {material-regular}`info;2em`
+```{toctree}
+:maxdepth: 2
+explanations
+```
++++
+Explanations of how it works and why it works that way.
+:::
+
+:::{grid-item-card} {material-regular}`menu_book;2em`
+```{toctree}
+:maxdepth: 2
+reference
+```
++++
+Technical reference material including APIs and release notes.
+:::
+
+::::
diff --git a/docs/index.rst b/docs/index.rst
deleted file mode 100644
index 2f712348..00000000
--- a/docs/index.rst
+++ /dev/null
@@ -1,45 +0,0 @@
-.. include:: ../README.rst
- :end-before: when included in index.rst
-
-.. |plot| image:: images/plot_spec.png
-
-How the documentation is structured
------------------------------------
-
-Documentation is split into four categories, also accessible from links in the
-side-bar.
-
-.. grid:: 4
-
- .. grid-item-card:: :material-regular:`person;4em`
- :link: tutorials
- :link-type: doc
-
- Tutorials
-
- .. grid-item-card:: :material-regular:`code;4em`
- :link: how-to
- :link-type: doc
-
- How-To
-
- .. grid-item-card:: :material-regular:`person;4em`
- :link: explanations
- :link-type: doc
-
- Explanations
-
- .. grid-item-card:: :material-regular:`code;4em`
- :link: reference
- :link-type: doc
-
- Reference
-
-
-.. toctree::
- :hidden:
-
- tutorials
- how-to
- explanations
- reference
diff --git a/docs/reference.md b/docs/reference.md
new file mode 100644
index 00000000..da17b81e
--- /dev/null
+++ b/docs/reference.md
@@ -0,0 +1,12 @@
+# Reference
+
+Technical reference material including APIs and release notes.
+
+```{toctree}
+:maxdepth: 1
+:glob:
+
+reference/*
+genindex
+Release Notes
+```
diff --git a/docs/reference.rst b/docs/reference.rst
deleted file mode 100644
index 211d6ff4..00000000
--- a/docs/reference.rst
+++ /dev/null
@@ -1,19 +0,0 @@
-:orphan:
-
-Reference
-=========
-
-Practical step-by-step guides for the more experienced user.
-
-.. toctree::
- :caption: Reference
-
- reference/api
- reference/rest_api
- reference/contributing
- Releases
- Index
-
-..
- Index link above is a hack to make genindex.html a relative link
- https://stackoverflow.com/a/31820846
diff --git a/docs/reference/api.rst b/docs/reference/api.md
similarity index 67%
rename from docs/reference/api.rst
rename to docs/reference/api.md
index 8820f371..6a6204e2 100644
--- a/docs/reference/api.rst
+++ b/docs/reference/api.md
@@ -1,32 +1,38 @@
-API
-===
+# API
+```{eval-rst}
.. automodule:: scanspec
``scanspec``
------------
+```
The top level scanspec module contains a number of packages that can be used
from code:
-- `scanspec.core`: Core classes like `Frames` and `Path`
-- `scanspec.specs`: `Spec` and its subclasses
-- `scanspec.regions`: `Region` and its subclasses
-- `scanspec.plot`: `plot_spec` to visualize a scan
-- `scanspec.service`: Defines queries and field structure in REST such as `MidpointsResponse`
+- [](#scanspec.core): Core classes like [](#Frames) and [](#Path)
+- [](#scanspec.specs): [](#Spec) and its subclasses
+- [](#scanspec.regions): [](#Region) and its subclasses
+- [](#scanspec.plot): [](#plot_spec) to visualize a scan
+- [](#scanspec.service): Defines queries and field structure in REST such as [](#MidpointsResponse)
+```{eval-rst}
.. data:: scanspec.__version__
:type: str
Version number as calculated by https://github.com/dls-controls/versiongit
+```
+```{eval-rst}
.. automodule:: scanspec.core
:members:
``scanspec.core``
-----------------
+```
+```{eval-rst}
.. automodule:: scanspec.specs
:members:
@@ -36,7 +42,9 @@ from code:
.. inheritance-diagram:: scanspec.specs
:top-classes: scanspec.specs.Spec
:parts: 1
+```
+```{eval-rst}
.. automodule:: scanspec.regions
:members:
@@ -46,15 +54,20 @@ from code:
.. inheritance-diagram:: scanspec.regions
:top-classes: scanspec.regions.Region
:parts: 1
+```
+```{eval-rst}
.. automodule:: scanspec.plot
:members:
``scanspec.plot``
-----------------
+```
+```{eval-rst}
.. automodule:: scanspec.service
:members:
``scanspec.service``
--------------------
+```
diff --git a/docs/reference/contributing.rst b/docs/reference/contributing.rst
deleted file mode 100644
index 65b72c8e..00000000
--- a/docs/reference/contributing.rst
+++ /dev/null
@@ -1 +0,0 @@
-.. include:: ../../.github/CONTRIBUTING.rst
diff --git a/docs/reference/rest_api.rst b/docs/reference/rest_api.md
similarity index 75%
rename from docs/reference/rest_api.rst
rename to docs/reference/rest_api.md
index c0ec4eca..f54aabcb 100644
--- a/docs/reference/rest_api.rst
+++ b/docs/reference/rest_api.md
@@ -1,6 +1,7 @@
-REST API
-========
+# REST API
The endpoints of the REST service are documented below.
+```{eval-rst}
.. openapi:: ../../schema.json
+```
diff --git a/docs/tutorials.md b/docs/tutorials.md
new file mode 100644
index 00000000..b88db528
--- /dev/null
+++ b/docs/tutorials.md
@@ -0,0 +1,12 @@
+# Tutorials
+
+Tutorials for installation and typical usage. New users start here.
+
+```{toctree}
+:maxdepth: 1
+:glob:
+
+tutorials/installation
+tutorials/creating-a-spec
+tutorials/rest-service
+```
diff --git a/docs/tutorials.rst b/docs/tutorials.rst
deleted file mode 100644
index e30e8d8d..00000000
--- a/docs/tutorials.rst
+++ /dev/null
@@ -1,13 +0,0 @@
-:orphan:
-
-Tutorials
-=========
-
-Tutorials for installation, library and commandline usage. New users start here.
-
-.. toctree::
- :caption: Tutorials
-
- tutorials/installation
- tutorials/creating-a-spec
- tutorials/rest-service
diff --git a/docs/tutorials/installation.md b/docs/tutorials/installation.md
new file mode 100644
index 00000000..3e004ec0
--- /dev/null
+++ b/docs/tutorials/installation.md
@@ -0,0 +1,55 @@
+# Installation
+
+## Check your version of python
+
+You will need python 3.8 or later. You can check your version of python by
+typing into a terminal:
+
+```
+$ python3 --version
+```
+
+## Create a virtual environment
+
+It is recommended that you install into a “virtual environment” so this
+installation will not interfere with any existing Python software:
+
+```
+$ python3 -m venv /path/to/venv
+$ source /path/to/venv/bin/activate
+```
+
+## Installing the library
+
+You can now use `pip` to install the library and its dependencies:
+
+```
+$ python3 -m pip install scanspec
+```
+
+If you require a feature that is not currently released you can also install
+from github:
+
+```
+$ python3 -m pip install git+https://github.com/dls-controls/scanspec.git
+```
+
+If you need to do any plotting (recommended), you should install the `plotting` extra:
+
+```
+python3 -m pip install scanspec[plotting]
+```
+
+If you need to run scanspec as a REST service for generating points, you should
+install the `service` extra:
+
+```
+python3 -m pip install scanspec[service]
+```
+
+The library should now be installed and the commandline interface on your path.
+You can check the version that has been installed by typing:
+
+```
+$ scanspec --version
+```
diff --git a/docs/tutorials/installation.rst b/docs/tutorials/installation.rst
deleted file mode 100644
index 49c7ba65..00000000
--- a/docs/tutorials/installation.rst
+++ /dev/null
@@ -1,47 +0,0 @@
-Installation
-============
-
-Check your version of python
-----------------------------
-
-You will need python 3.8 or later. You can check your version of python by
-typing into a terminal::
-
- $ python3 --version
-
-
-Create a virtual environment
-----------------------------
-
-It is recommended that you install into a “virtual environment” so this
-installation will not interfere with any existing Python software::
-
- $ python3 -m venv /path/to/venv
- $ source /path/to/venv/bin/activate
-
-
-Installing the library
-----------------------
-
-You can now use ``pip`` to install the library::
-
- python3 -m pip install scanspec
-
-If you require a feature that is not currently released you can also install
-from github::
-
- python3 -m pip install git+git://github.com/dls-controls/scanspec.git
-
-If you need to do any plotting (recommended), you should install the ``plotting`` extra::
-
- python3 -m pip install scanspec[plotting]
-
-If you need to run scanspec as a REST service for generating points, you should
-install the ``service`` extra::
-
- python3 -m pip install scanspec[service]
-
-The library should now be installed and the commandline interface on your path.
-You can check the version that has been installed by typing::
-
- scanspec --version
diff --git a/pyproject.toml b/pyproject.toml
index c2032fbd..9a0e14ba 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,5 +1,5 @@
[build-system]
-requires = ["setuptools>=64", "setuptools_scm[toml]>=6.2", "wheel"]
+requires = ["setuptools>=64", "setuptools_scm[toml]>=6.2"]
build-backend = "setuptools.build_meta"
[project]
@@ -7,6 +7,7 @@ name = "scanspec"
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
@@ -17,12 +18,12 @@ dependencies = [
"numpy>=1.19.3",
"click==8.1.3",
"pydantic<2.0",
- "typing_extensions"
+ "typing_extensions",
]
dynamic = ["version"]
license.file = "LICENSE"
-readme = "README.rst"
-requires-python = ">=3.8"
+readme = "README.md"
+requires-python = ">=3.7"
[project.optional-dependencies]
# Plotting
@@ -32,21 +33,22 @@ plotting = [
"matplotlib>=3.2.2",
]
# REST service support
-service = [
- "fastapi==0.99",
- "uvicorn"
-]
+service = ["fastapi==0.99", "uvicorn"]
# For development tests/docs
dev = [
- "black==22.3.0",
- "mypy<1",
- "flake8-isort",
- "Flake8-pyproject",
+ # This syntax is supported since pip 21.2
+ # https://github.com/pypa/pip/issues/10393
+ "scanspec[plotting]",
+ "scanspec[service]",
+ "copier",
+ "mypy",
+ "myst-parser",
"pipdeptree",
"pre-commit",
"pydata-sphinx-theme>=0.12",
+ "pytest",
"pytest-cov",
- "pydocstyle",
+ "ruff",
"sphinx-autobuild",
"sphinx-copybutton",
"sphinx-design",
@@ -54,6 +56,7 @@ dev = [
"tox-direct",
"types-mock",
"httpx",
+ "myst-parser",
]
[project.scripts]
@@ -71,37 +74,12 @@ name = "Tom Cobb"
write_to = "src/scanspec/_version.py"
[tool.mypy]
-plugins = "pydantic.mypy"
ignore_missing_imports = true # Ignore missing stubs in imported modules
-[tool.isort]
-float_to_top = true
-profile = "black"
-
-[tool.flake8]
-extend-ignore = [
- "E203", # See https://github.com/PyCQA/pycodestyle/issues/373
- "F811", # support typing.overload decorator
- "F722", # allow Annotated[typ, some_func("some string")]
-]
-max-line-length = 88 # Respect black's line length (default 88),
-exclude = [".tox", "venv"]
-
-[tool.pydocstyle]
-convention = "google"
-add-ignore = [
- "D100", # Ignore missing docstring in public modules
- "D102", # Ignore missing docstring in public method; See: https://github.com/PyCQA/pydocstyle/issues/309
- "D104", # Ignore missing docstring in public packages
- "D105", # Ignore missing docstring in magic method
- "D107", # Ignore missing docstring in __init__
-]
-
[tool.pytest.ini_options]
# Run pytest with all our checkers, and don't spam us with massive tracebacks on error
addopts = """
--tb=native -vv --doctest-modules --doctest-glob="*.rst"
- --cov=scanspec --cov-report term --cov-report xml:cov.xml
"""
# https://iscinumpy.gitlab.io/post/bound-version-constraints/#watch-for-warnings
filterwarnings = "error"
@@ -122,19 +100,39 @@ legacy_tox_ini = """
[tox]
skipsdist=True
-[testenv:{pre-commit,mypy,pytest,docs}]
+[testenv:{pre-commit,type-checking,tests,docs}]
# Don't create a virtualenv for the command, requires tox-direct plugin
direct = True
passenv = *
-allowlist_externals =
- pytest
+allowlist_externals =
+ pytest
pre-commit
mypy
sphinx-build
sphinx-autobuild
commands =
- pytest: pytest {posargs}
- mypy: mypy src tests {posargs}
pre-commit: pre-commit run --all-files {posargs}
+ type-checking: mypy src tests {posargs}
+ tests: pytest --cov=scanspec --cov-report term --cov-report xml:cov.xml {posargs}
docs: sphinx-{posargs:build -EW --keep-going} -T docs build/html
"""
+
+[tool.ruff]
+src = ["src", "tests"]
+line-length = 88
+
+[tool.ruff.lint]
+extend-select = [
+ "B", # flake8-bugbear - https://docs.astral.sh/ruff/rules/#flake8-bugbear-b
+ "C4", # flake8-comprehensions - https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4
+ "E", # pycodestyle errors - https://docs.astral.sh/ruff/rules/#error-e
+ "F", # pyflakes rules - https://docs.astral.sh/ruff/rules/#pyflakes-f
+ "W", # pycodestyle warnings - https://docs.astral.sh/ruff/rules/#warning-w
+ "I", # isort - https://docs.astral.sh/ruff/rules/#isort-i
+ "UP", # pyupgrade - https://docs.astral.sh/ruff/rules/#pyupgrade-up
+]
+# We use pydantic, so don't upgrade to py3.10 syntax yet
+pyupgrade.keep-runtime-typing = true
+ignore = [
+ "B008", # We use function calls in service arguments
+]
diff --git a/src/scanspec/__init__.py b/src/scanspec/__init__.py
index dee838fe..b99257eb 100644
--- a/src/scanspec/__init__.py
+++ b/src/scanspec/__init__.py
@@ -1,6 +1,4 @@
-from importlib.metadata import version
-
-__version__ = version("scanspec")
-del version
+from . import regions, specs
+from ._version import __version__
__all__ = ["__version__", "specs", "regions"]
diff --git a/src/scanspec/__main__.py b/src/scanspec/__main__.py
index d8ac7575..cd9cfea2 100644
--- a/src/scanspec/__main__.py
+++ b/src/scanspec/__main__.py
@@ -1,6 +1,5 @@
from scanspec import cli
-# test with:
-# pipenv run python -m scanspec
+# test with: python -m scanspec
if __name__ == "__main__":
cli.cli()
diff --git a/src/scanspec/cli.py b/src/scanspec/cli.py
index 69b48fc1..7c0e4de3 100644
--- a/src/scanspec/cli.py
+++ b/src/scanspec/cli.py
@@ -16,7 +16,7 @@
["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"], case_sensitive=False
),
)
-@click.version_option(prog_name="scanspec")
+@click.version_option(prog_name="scanspec", message="%(version)s")
@click.pass_context
def cli(ctx, log_level: str):
"""Top level scanspec command line interface."""
diff --git a/src/scanspec/core.py b/src/scanspec/core.py
index 1d5877bf..5e77f164 100644
--- a/src/scanspec/core.py
+++ b/src/scanspec/core.py
@@ -135,7 +135,6 @@ def _discriminated_union_of_subclasses(
discriminator: str,
config: Optional[Type[BaseConfig]] = None,
) -> Union[Type, Callable[[Type], Type]]:
-
super_cls._ref_classes = set()
super_cls._model = None
@@ -254,8 +253,8 @@ def __init__(
# Need to calculate gap as not passed one
# We have a gap if upper[i] != lower[i+1] for any axes
axes_gap = [
- np.roll(u, 1) != l
- for u, l in zip(self.upper.values(), self.lower.values())
+ np.roll(upper, 1) != lower
+ for upper, lower in zip(self.upper.values(), self.lower.values())
]
self.gap = np.logical_or.reduce(axes_gap)
# Check all axes and ordering are the same
@@ -264,11 +263,11 @@ def __init__(
f"{list(self.midpoints)} != {list(self.lower)} != {list(self.upper)}"
)
# Check all lengths are the same
- lengths = set(
+ lengths = {
len(arr)
for d in (self.midpoints, self.lower, self.upper)
for arr in d.values()
- )
+ }
lengths.add(len(self.gap))
assert len(lengths) <= 1, f"Mismatching lengths {list(lengths)}"
@@ -372,10 +371,10 @@ def zip_gap(gaps: Sequence[np.ndarray]) -> np.ndarray:
def _merge_frames(
*stack: Frames[Axis],
- dict_merge=Callable[[Sequence[AxesPoints[Axis]]], AxesPoints[Axis]],
+ dict_merge=Callable[[Sequence[AxesPoints[Axis]]], AxesPoints[Axis]], # type: ignore
gap_merge=Callable[[Sequence[np.ndarray]], Optional[np.ndarray]],
) -> Frames[Axis]:
- types = set(type(fs) for fs in stack)
+ types = {type(fs) for fs in stack}
assert len(types) == 1, f"Mismatching types for {stack}"
cls = types.pop()
diff --git a/src/scanspec/plot.py b/src/scanspec/plot.py
index c5fb1a1d..07db1949 100644
--- a/src/scanspec/plot.py
+++ b/src/scanspec/plot.py
@@ -1,10 +1,10 @@
from itertools import cycle
-from typing import Any, Dict, Iterator, List
+from typing import Any, Dict, Iterator, List, Optional
import numpy as np
from matplotlib import colors, patches
from matplotlib import pyplot as plt
-from mpl_toolkits.mplot3d import proj3d
+from mpl_toolkits.mplot3d import Axes3D, proj3d
from scipy import interpolate
from .core import Path
@@ -45,7 +45,10 @@ def _plot_arrow(axes, arrays: List[np.ndarray]):
head = [a[-1] for a in reversed(arrays)]
tail = [a[-1] - (a[-1] - a[-2]) * 0.1 for a in reversed(arrays)]
axes.annotate(
- "", head[:2], tail[:2], arrowprops=dict(color="lightgrey", arrowstyle="-|>")
+ "",
+ head[:2],
+ tail[:2],
+ arrowprops={"color": "lightgrey", "arrowstyle": "-|>"},
)
elif len(arrays) == 3:
arrows = [a[-2:] for a in reversed(arrays)]
@@ -71,7 +74,7 @@ def _plot_spline(axes, ranges, arrays: List[np.ndarray], index_colours: Dict[int
t /= t[-1]
# Scale the arrays so splines don't favour larger scaled axes
tck, _ = interpolate.splprep(scaled_arrays, k=2, s=0)
- starts = sorted(list(index_colours))
+ starts = sorted(index_colours)
stops = starts[1:] + [len(arrays[0]) - 1]
for start, stop in zip(starts, stops):
tnew = np.linspace(t[start], t[stop], num=1001)
@@ -82,7 +85,7 @@ def _plot_spline(axes, ranges, arrays: List[np.ndarray], index_colours: Dict[int
yield unscaled_splines
-def plot_spec(spec: Spec[Any]):
+def plot_spec(spec: Spec[Any], title: Optional[str] = None):
"""Plot a spec, drawing the path taken through the scan.
Uses a different colour for each frame, grey for the turnarounds, and
@@ -105,7 +108,7 @@ def plot_spec(spec: Spec[Any]):
# Setup axes
if ndims > 2:
plt.figure(figsize=(6, 6))
- plt_axes = plt.axes(projection="3d")
+ plt_axes: Axes3D = plt.axes(projection="3d")
plt_axes.grid(False)
plt_axes.set_zlabel(axes[-3])
plt_axes.set_ylabel(axes[-2])
@@ -121,7 +124,8 @@ def plot_spec(spec: Spec[Any]):
plt_axes.set_xlabel(axes[-1])
# Title with dimension sizes
- plt.title(", ".join(f"Dim[{' '.join(d.axes())} len={len(d)}]" for d in dims))
+ title = title or ", ".join(f"Dim[{' '.join(d.axes())} len={len(d)}]" for d in dims)
+ plt.title(title)
# Plot any Regions
if ndims <= 2:
@@ -143,7 +147,7 @@ def plot_spec(spec: Spec[Any]):
height = region.y_radius * 2
angle = region.angle
plt_axes.add_patch(
- patches.Ellipse(xy, width, height, angle, fill=False)
+ patches.Ellipse(xy, width, height, angle=angle, fill=False)
)
elif isinstance(region, Polygon):
# *xy_verts* is a numpy array with shape Nx2.
diff --git a/src/scanspec/service.py b/src/scanspec/service.py
index 73288c47..64865fed 100644
--- a/src/scanspec/service.py
+++ b/src/scanspec/service.py
@@ -124,7 +124,7 @@ class SmallestStepResponse:
@app.post("/valid", response_model=ValidResponse)
def valid(
- spec: Spec = Body(..., examples=[_EXAMPLE_SPEC])
+ spec: Spec = Body(..., examples=[_EXAMPLE_SPEC]),
) -> Union[ValidResponse, JSONResponse]:
"""Validate wether a ScanSpec can produce a viable scan.
@@ -144,7 +144,7 @@ def midpoints(
request: PointsRequest = Body(
...,
examples=[_EXAMPLE_POINTS_REQUEST],
- )
+ ),
) -> MidpointsResponse:
"""Generate midpoints from a scanspec.
@@ -172,7 +172,7 @@ def bounds(
request: PointsRequest = Body(
...,
examples=[_EXAMPLE_POINTS_REQUEST],
- )
+ ),
) -> BoundsResponse:
"""Generate bounds from a scanspec.
@@ -199,7 +199,7 @@ def gap(
spec: Spec = Body(
...,
examples=[_EXAMPLE_SPEC],
- )
+ ),
) -> GapResponse:
"""Generate gaps from a scanspec.
@@ -221,7 +221,7 @@ def gap(
@app.post("/smalleststep", response_model=SmallestStepResponse)
def smallest_step(
- spec: Spec = Body(..., examples=[_EXAMPLE_SPEC])
+ spec: Spec = Body(..., examples=[_EXAMPLE_SPEC]),
) -> SmallestStepResponse:
"""Calculate the smallest step in a scan, both absolutely and per-axis.
diff --git a/src/scanspec/specs.py b/src/scanspec/specs.py
index 948e4879..0576f22e 100644
--- a/src/scanspec/specs.py
+++ b/src/scanspec/specs.py
@@ -128,7 +128,7 @@ def concat(self, other: Spec) -> Concat[Axis]:
def serialize(self) -> Mapping[str, Any]:
"""Serialize the spec to a dictionary."""
- return asdict(self)
+ return asdict(self) # type: ignore
@classmethod
def deserialize(cls, obj):
@@ -744,3 +744,31 @@ def step(spec: Spec[Axis], duration: float, num: int = 1) -> Spec[Axis]:
spec = step(Line("x", 1, 2, 3), 0.1)
"""
return spec * Static.duration(duration, num)
+
+
+def get_constant_duration(frames: List[Frames]) -> Optional[float]:
+ """
+ Returns the duration of a number of ScanSpec frames, if known and consistent.
+
+ Args:
+ frames (List[Frames]): A number of Frame objects
+
+ Returns:
+ duration (float): if all frames have a consistent duration
+ None: otherwise
+
+ """
+ duration_frame = [
+ f for f in frames if DURATION in f.axes() and len(f.midpoints[DURATION])
+ ]
+ if len(duration_frame) != 1 or len(duration_frame[0]) < 1:
+ # Either no frame has DURATION axis,
+ # the frame with a DURATION axis has 0 points,
+ # or multiple frames have DURATION axis
+ return None
+ durations = duration_frame[0].midpoints[DURATION]
+ first_duration = durations[0]
+ if np.any(durations != first_duration):
+ # Not all durations are the same
+ return None
+ return first_duration
diff --git a/tests/test_cli.py b/tests/test_cli.py
index 2dac12b4..9f3641fa 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -1,12 +1,15 @@
import pathlib
import subprocess
import sys
+from typing import List, cast
from unittest.mock import patch
import matplotlib.pyplot as plt
import numpy as np
import pytest
from click.testing import CliRunner
+from matplotlib.patches import Rectangle
+from matplotlib.text import Annotation
from scanspec import __version__, cli
from scanspec.plot import _Arrow3D
@@ -58,9 +61,9 @@ def test_plot_1D_line() -> None:
# End
assert_min_max_2d(lines[3], 2.5, 2.5, 0, 0)
# Arrows
- texts = axes.texts
+ texts = cast(List[Annotation], axes.texts)
assert len(texts) == 1
- assert texts[0].xy == [0.5, 0]
+ assert tuple(texts[0].xy) == (0.5, 0)
def test_plot_1D_line_snake_repeat() -> None:
@@ -83,10 +86,10 @@ def test_plot_1D_line_snake_repeat() -> None:
# End
assert_min_max_2d(lines[4], 1, 1, 0, 0)
# Arrows
- texts = axes.texts
+ texts = cast(List[Annotation], axes.texts)
assert len(texts) == 2
- assert texts[0].xy == [1, 0]
- assert texts[1].xy == pytest.approx([2, 0])
+ assert tuple(texts[0].xy) == (1, 0)
+ assert tuple(texts[1].xy) == pytest.approx([2, 0])
def test_plot_1D_step() -> None:
@@ -107,9 +110,9 @@ def test_plot_1D_step() -> None:
# End
assert_min_max_2d(lines[3], 2, 2, 0, 0)
# Arrows
- texts = axes.texts
+ texts = cast(List[Annotation], axes.texts)
assert len(texts) == 1
- assert texts[0].xy == [2, 0]
+ assert tuple(texts[0].xy) == (2, 0)
def test_plot_2D_line() -> None:
@@ -134,10 +137,10 @@ def test_plot_2D_line() -> None:
# End
assert_min_max_2d(lines[6], 0.5, 0.5, 3, 3)
# Arrows
- texts = axes.texts
+ texts = cast(List[Annotation], axes.texts)
assert len(texts) == 2
- assert texts[0].xy == [0.5, 2]
- assert texts[1].xy == pytest.approx([2.5, 3])
+ assert tuple(texts[0].xy) == (0.5, 2)
+ assert tuple(texts[1].xy) == pytest.approx([2.5, 3])
def test_plot_2D_line_rect_region() -> None:
@@ -161,18 +164,19 @@ def test_plot_2D_line_rect_region() -> None:
# End
assert_min_max_2d(lines[5], 1.5, 1.5, 2, 2)
# Arrows
- texts = axes.texts
+ texts = cast(List[Annotation], axes.texts)
assert len(texts) == 2
- assert texts[0].xy == [-0.5, 1.5]
- assert texts[1].xy == [-0.5, 2]
+ assert tuple(texts[0].xy) == (-0.5, 1.5)
+ assert tuple(texts[1].xy) == (-0.5, 2)
# Regions
patches = axes.patches
assert len(patches) == 1
- assert type(patches[0]).__name__ == "Rectangle"
- assert patches[0].xy == (0, 1.1)
- assert patches[0].get_height() == 1.0
- assert patches[0].get_width() == 1.5
- assert patches[0].angle == 30
+ p = patches[0]
+ assert isinstance(p, Rectangle)
+ assert p.get_xy() == (0, 1.1)
+ assert p.get_height() == 1.0
+ assert p.get_width() == 1.5
+ assert p.angle == 30
def test_plot_3D_line() -> None:
@@ -239,7 +243,4 @@ def test_schema() -> None:
def test_cli_version():
cmd = [sys.executable, "-m", "scanspec", "--version"]
- assert (
- subprocess.check_output(cmd).decode().strip()
- == f"scanspec, version {__version__}"
- )
+ assert subprocess.check_output(cmd).decode().strip() == __version__
diff --git a/tests/test_specs.py b/tests/test_specs.py
index b99d9ee4..51885058 100644
--- a/tests/test_specs.py
+++ b/tests/test_specs.py
@@ -1,3 +1,4 @@
+import re
from typing import Any, Tuple
import pytest
@@ -16,6 +17,7 @@
Static,
Zip,
fly,
+ get_constant_duration,
step,
)
@@ -685,9 +687,71 @@ def test_multiple_statics_with_grid():
),
],
)
+
def test_dimension_info(
spec: Spec, expected_shape: Tuple[int, ...], expected_axes: Tuple[Tuple[str, ...]]
):
dimension_info = spec.dimension_info()
assert expected_shape == dimension_info.shape
assert expected_axes == dimension_info.axes
+
+def test_shape(spec: Spec, expected_shape: Tuple[int, ...]):
+ assert expected_shape == spec.shape()
+
+
+def test_single_frame_single_point():
+ spec = Static.duration(0.1)
+ assert get_constant_duration(spec.calculate()) == 0.1
+
+
+def test_consistent_points():
+ spec = Static.duration(0.1).concat(Static.duration(0.1))
+ assert get_constant_duration(spec.calculate()) == 0.1
+
+
+def test_inconsistent_points():
+ spec = Static.duration(0.1).concat(Static.duration(0.2))
+ assert get_constant_duration(spec.calculate()) is None
+
+
+def test_frame_with_multiple_axes():
+ spec = Static.duration(0.1).zip(Line.bounded("x", 0, 0, 1))
+ frames = spec.calculate()
+ assert len(frames) == 1
+ assert get_constant_duration(frames) == 0.1
+
+
+def test_inconsistent_frame_with_multiple_axes():
+ spec = (
+ Static.duration(0.1)
+ .concat(Static.duration(0.2))
+ .zip(Line.bounded("x", 0, 0, 2))
+ )
+ frames = spec.calculate()
+ assert len(frames) == 1
+ assert get_constant_duration(frames) is None
+
+
+def test_non_static_spec_duration():
+ spec = Line.bounded(DURATION, 0, 0, 3)
+ frames = spec.calculate()
+ assert len(frames) == 1
+ assert get_constant_duration(frames) == 0
+
+
+def test_multiple_duration_frames():
+ spec = (
+ Static.duration(0.1)
+ .concat(Static.duration(0.2))
+ .zip(Line.bounded(DURATION, 0, 0, 2))
+ )
+ with pytest.raises(
+ AssertionError, match=re.escape("Zipping would overwrite axes ['DURATION']")
+ ):
+ spec.calculate()
+ spec = ( # TODO: refactor when https://github.com/dls-controls/scanspec/issues/90
+ Static.duration(0.1) * Line.bounded(DURATION, 0, 0, 2)
+ )
+ frames = spec.calculate()
+ assert len(frames) == 2
+ assert get_constant_duration(frames) is None