diff --git a/.dae-devops/Makefile b/.dae-devops/Makefile new file mode 100644 index 0000000..e778471 --- /dev/null +++ b/.dae-devops/Makefile @@ -0,0 +1,72 @@ +# ********** Please don't edit this file! +# ********** It has been generated automatically by dae_devops version 0.5.2. +# ********** For repository_name chimpflow + +# --------------------------------------------------------------------- +# These make targets are designed to be called from the command line and from .gitlab-ci.yml. +# --------------------------------------------------------------------- + +# I put the package_pip artifacts int this place for now until I can use the corporate internal pipserver. +# Maybe /dls_sw/work/python3/RHEL7-x86_64/distributions would be a better place? +PIP_FIND_LINKS = /dls_sw/apps/bxflow/artifacts + +# I got these from https://gitlab.diamond.ac.uk/controls/reports/ci_templates/-/blob/master/defaults.yml. +# Directory where docs are published. +DOCS_PUBLISH_ROOT = /dls/cs-www/reports/gitlab-ci/chimpflow + +# --------------------------------------------------------------------- +# These are called from the default before_script in the gitlab CI. +# That meants it is normally called before any other job. +# It installs things not covered by pyproject.toml. + +prepare_git_dependencies: + chmod a+x .dae-devops/prepare_git_dependencies.sh + .dae-devops/prepare_git_dependencies.sh + +# --------------------------------------------------------------------- +# The validate stage makes sure code is ready to package and release. + +validate: \ + validate_pre_commit \ + validate_mypy \ + validate_pytest + echo "validation complete" + +validate_pre_commit: + tox -q -e pre-commit + +validate_mypy: + tox -q -e mypy + +validate_pytest: + tox -q -e pytest + +validate_docs: + tox -q -e docs + +# --------------------------------------------------------------------- +# The pip state packages and publishes for pip. + +package: \ + package_pip + +package_pip: + pip install pipx twine + pipx run build + +# --------------------------------------------------------------------- +# The publish stage publishes things which have already been built or packaged. + +publish: \ + publish_pip \ + publish_docs + +publish_pip: + cp -v -p dist/*.whl $(PIP_FIND_LINKS) + +publish_docs: + mkdir -p $(DOCS_PUBLISH_ROOT) + cp -r build/html/* $(DOCS_PUBLISH_ROOT) + + +# dae_devops_fingerprint e7dd1051580ffa0ed1b6a5a9ce3e6290 diff --git a/.dae-devops/docs/conventions.rst b/.dae-devops/docs/conventions.rst new file mode 100644 index 0000000..eab918d --- /dev/null +++ b/.dae-devops/docs/conventions.rst @@ -0,0 +1,34 @@ +.. # ********** Please don't edit this file! +.. # ********** It has been generated automatically by dae_devops version 0.5.2. +.. # ********** For repository_name chimpflow + +Naming conventions +======================================================================= + +Here are the naming conventions used within the source code. + +variables + lowercase, underscores + +constants + uppercase, underscores + +classes + camel case + +class methods + lowercase, underscores + +imports + require fully qualified package name (import does nothing) + + exception: top-level packages __init__.py will have __version__ defined in them + +python packages + lowercase, underscores + +repository + lowercase, hyphens + + +.. # dae_devops_fingerprint 1584b2ca3962a352d751931fba1af516 diff --git a/.dae-devops/docs/developing.rst b/.dae-devops/docs/developing.rst new file mode 100644 index 0000000..ddf8b10 --- /dev/null +++ b/.dae-devops/docs/developing.rst @@ -0,0 +1,39 @@ +.. # ********** Please don't edit this file! +.. # ********** It has been generated automatically by dae_devops version 0.5.2. +.. # ********** For repository_name chimpflow + +Developing +======================================================================= + +If you plan to make change to the code in this repository, you can use the steps below. + +Clone the repository:: + + $ git clone https://github.com/diamondlightsource/chimpflow/chimpflow.git + +It is recommended that you install into a virtual environment so this +installation will not interfere with any existing Python software. +Make sure to have at least python version 3.9 then:: + + $ python3 -m venv /scratch/$USER/myvenv + $ source /scratch/$USER/myvenv/bin/activate + $ pip install --upgrade pip + +Install the package in edit mode which will also install all its dependencies:: + + $ cd chimpflow + $ export PIP_FIND_LINKS=/dls_sw/apps/bxflow/artifacts + $ pip install -e .[dev] + +Now you may begin modifying the code. + +| + +If you plan to modify the docs, you will need to:: + + $ pip install -e .[docs] + + + + +.. # dae_devops_fingerprint 2778ced07515a8064c66e3978215aae0 diff --git a/.dae-devops/docs/devops.rst b/.dae-devops/docs/devops.rst new file mode 100644 index 0000000..825a266 --- /dev/null +++ b/.dae-devops/docs/devops.rst @@ -0,0 +1,46 @@ +.. # ********** Please don't edit this file! +.. # ********** It has been generated automatically by dae_devops version 0.5.2. +.. # ********** For repository_name chimpflow + +Devops +======================================================================= + +There exists a a configuration file called ``.dae-devops/project.yaml``. + +This file defines the project information needed for CI/CD. + +It is parsed by the ``dae_devops.force`` command which creates these files: + +- pyproject.toml +- .gitlab-ci.yml +- .dae-devops/Makefile +- .dae-devops/docs/* + +Local CI/CD execution +----------------------------------------------------------------------- + +All the CI/CD ops which are run by the git server can be run at the command line. + +Running these ops before pushing to the git server can make the turnaround quicker to fix things. + +Follow the steps in the Developing section. Then you can run the following commands. + +Validation of the code:: + + $ make -f .dae-devops/Makefile validate_pre_commit + $ make -f .dae-devops/Makefile validate_mypy + $ make -f .dae-devops/Makefile validate_pytest + $ make -f .dae-devops/Makefile validate_docs + +Packaging:: + + $ make -f .dae-devops/Makefile package_pip + +Publishing:: + + $ make -f .dae-devops/Makefile publish_pip + $ make -f .dae-devops/Makefile publish_docs + + + +.. # dae_devops_fingerprint d1782f75b785c0194b6666f894b338a6 diff --git a/.dae-devops/docs/docs_structure.rst b/.dae-devops/docs/docs_structure.rst new file mode 100644 index 0000000..628e446 --- /dev/null +++ b/.dae-devops/docs/docs_structure.rst @@ -0,0 +1,24 @@ +.. # ********** Please don't edit this file! +.. # ********** It has been generated automatically by dae_devops version 0.5.2. +.. # ********** For repository_name chimpflow + +About the documentation +----------------------- + + :material-regular:`format_quote;2em` + + The Grand Unified Theory of Documentation + + -- David Laing + +There is a secret that needs to be understood in order to write good software +documentation: there isn't one thing called *documentation*, there are four. + +They are: *tutorials*, *how-to guides*, *technical reference* and *explanation*. +They represent four different purposes or functions, and require four different +approaches to their creation. Understanding the implications of this will help +improve most documentation - often immensely. + +`More information on this topic. `_ + +.. # dae_devops_fingerprint b2ecceb2057aa129a1ee55c76c58e867 diff --git a/.dae-devops/docs/installing.rst b/.dae-devops/docs/installing.rst new file mode 100644 index 0000000..086d991 --- /dev/null +++ b/.dae-devops/docs/installing.rst @@ -0,0 +1,43 @@ +.. # ********** Please don't edit this file! +.. # ********** It has been generated automatically by dae_devops version 0.5.2. +.. # ********** For repository_name chimpflow + +Installing +======================================================================= + + +You will need python 3.9 or later. + +On a Diamond Light Source internal computer, you can achieve Python 3.9 by:: + + $ module load python/3.9 + +You can check your version of python by typing into a terminal:: + + $ python3 --version + +It is recommended that you install into a virtual environment so this +installation will not interfere with any existing Python software:: + + $ python3 -m venv /scratch/$USER/myvenv + $ source /scratch/$USER/myvenv/bin/activate + $ pip install --upgrade pip + + +You can now use ``pip`` to install the library and its dependencies:: + + $ export PIP_FIND_LINKS=/dls_sw/apps/bxflow/artifacts + $ python3 -m pip install chimpflow + +If you require a feature that is not currently released you can also install +from git:: + + $ python3 -m pip install git+https://github.com/diamondlightsource/chimpflow/chimpflow.git + +The library should now be installed and the commandline interface on your path. +You can check the version that has been installed by typing:: + + $ chimpflow --version + $ chimpflow --version-json + +.. # dae_devops_fingerprint f4209c13d354b457d8cc21ddf076df26 diff --git a/.dae-devops/docs/testing.rst b/.dae-devops/docs/testing.rst new file mode 100644 index 0000000..39021f9 --- /dev/null +++ b/.dae-devops/docs/testing.rst @@ -0,0 +1,38 @@ +.. # ********** Please don't edit this file! +.. # ********** It has been generated automatically by dae_devops version 0.5.2. +.. # ********** For repository_name chimpflow + +Testing +======================================================================= + +The package uses pytest for unit testing. + +If you want to run the tests, first get a copy of the code per the instructions in the Developing section. + +Then you can run all tests by:: + + $ pytest + +Or this, which is the command used by the CI runner. + + $ make -f .dae-devops/Makefile validate_pytest + +To run a single test you can do:: + + $ pytest tests/the_test_you_want.py + +If you want to see more output of the test while it's running you can do: + + $ pytest -sv -ra --tb=line tests/the_test_you_want.py + +Each test will write files into its own directory:: + + /tmp/chimpflow/tests/.... + +The tests clear their directory when they start, but not when they finish. +This allows peeking in there to see what's been written by the test. + + + + +.. # dae_devops_fingerprint 3787294ef4eef4f87161e07806f1f417 diff --git a/.dae-devops/prepare_git_dependencies.sh b/.dae-devops/prepare_git_dependencies.sh new file mode 100644 index 0000000..2879c49 --- /dev/null +++ b/.dae-devops/prepare_git_dependencies.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +# ********** Please don't edit this file! +# ********** It has been generated automatically by dae_devops version 0.5.2. +# ********** For repository_name chimpflow + +me=${BASH_SOURCE} +echo "${me}: installing 0 dependencies for repository_name chimpflow" + +function __install { + echo ${me}: "$@" + "$@" +} + + +# dae_devops_fingerprint 74a0f47063a55760445b6dba99ef5c79 \ No newline at end of file diff --git a/.dae-devops/project.yaml b/.dae-devops/project.yaml new file mode 100644 index 0000000..3cca941 --- /dev/null +++ b/.dae-devops/project.yaml @@ -0,0 +1,21 @@ +# Primary information needed for devops. +primary: + repository_name: chimpflow + package_name: chimpflow_lib + one_line_description: "XChem service to apply CHIMP detection for discovery of well center, crystal locatins and suggested drop target." + author: + name: David Erb + email: david.erb@diamond.ac.uk + project_urls: + GitLab: https://github.com/diamondlightsource/chimpflow + project_scripts: + chimpflow: "chimpflow_lib.__main__:main" + chimpflow.cli: "chimpflow_cli.main:main" + dependencies: + - type: pypi + list: + - xchembku + - dls_servbase + - dls_mainiac + - dls_multiconf + - dls_utilpack diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 0000000..36d74e3 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,43 @@ +# ********** Please don't edit this file! +# ********** It has been generated automatically by dae_devops version 0.5.2. +# ********** For repository_name chimpflow + +# This file is for use as a devcontainer and a runtime container +# +# The devcontainer should use the build target and run as root with podman +# or docker with user namespaces. +# +FROM python:3.11 as build + +ARG PIP_OPTIONS + +# Add any system dependencies for the developer/build environment here e.g. +# RUN apt-get update && apt-get upgrade -y && \ +# apt-get install -y --no-install-recommends \ +# desired-packages \ +# && rm -rf /var/lib/apt/lists/* + +# set up a virtual environment and put it in PATH +RUN python -m venv /venv +ENV PATH=/venv/bin:$PATH + +# Copy any required context for the pip install over +COPY . /context +WORKDIR /context + +# install python package into /venv +RUN pip install ${PIP_OPTIONS} + +FROM python:3.11-slim as runtime + +# Add apt-get system dependecies for runtime here if needed + +# copy the virtual environment from the build stage and put it in PATH +COPY --from=build /venv/ /venv/ +ENV PATH=/venv/bin:$PATH + +# change this entrypoint if it is not the same as the repo +ENTRYPOINT ["chimpflow"] +CMD ["--version"] + +# dae_devops_fingerprint 5785b1775d09431a8a064cb67aa564cf diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000..1690aff --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,60 @@ +// ********** Please don't edit this file! +// ********** It has been generated automatically by dae_devops version 0.5.2. +// ********** For repository_name chimpflow + +// For format details, see https://containers.dev/implementors/json_reference/ +{ + "name": "Python 3 Developer Container", + "build": { + "dockerfile": "Dockerfile", + "target": "build", + // Only upgrade pip, we will install the project below + "args": { + "PIP_OPTIONS": "--upgrade pip" + } + }, + "remoteEnv": { + "DISPLAY": "${localEnv:DISPLAY}" + }, + // Add the URLs of features you want added when the container is built. + "features": { + "ghcr.io/devcontainers/features/common-utils:1": { + "username": "none", + "upgradePackages": false + } + }, + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/venv/bin/python" + }, + "customizations": { + "vscode": { + // Add the IDs of extensions you want installed when the container is created. + "extensions": [ + "ms-python.python", + "tamasfe.even-better-toml", + "redhat.vscode-yaml", + "ryanluker.vscode-coverage-gutters" + ] + } + }, + // Make sure the files we are mapping into the container exist on the host + "initializeCommand": "bash -c 'for i in $HOME/.inputrc; do [ -f $i ] || touch $i; done'", + "runArgs": [ + "--net=host", + "--security-opt=label=type:container_runtime_t" + ], + "mounts": [ + "source=${localEnv:HOME}/.ssh,target=/root/.ssh,type=bind", + "source=${localEnv:HOME}/.inputrc,target=/root/.inputrc,type=bind", + // map in home directory - not strictly necessary but useful + "source=${localEnv:HOME},target=${localEnv:HOME},type=bind,consistency=cached" + ], + // make the workspace folder the same inside and outside of the container + "workspaceMount": "source=${localWorkspaceFolder},target=${localWorkspaceFolder},type=bind", + "workspaceFolder": "${localWorkspaceFolder}", + // After the container is created, install the python project in editable form + "postCreateCommand": "pip install -e .[dev]" +} + +// dae_devops_fingerprint 045d41997d413a169ed082821ce0dccf diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst new file mode 100644 index 0000000..73d316f --- /dev/null +++ b/.github/CONTRIBUTING.rst @@ -0,0 +1,41 @@ +.. # ********** Please don't edit this file! +.. # ********** It has been generated automatically by dae_devops version 0.5.2. +.. # ********** For repository_name chimpflow + +Contributing to the project +=========================== + +Contributions and issues are most welcome! All issues and pull requests are +handled through GitHub_. Also, please check for any existing issues before +filing a new one. If you have a great idea but it involves big changes, please +file a ticket before making a pull request! We want to make sure you don't spend +your time coding something that might not fit the scope of the project. + +.. _GitHub: https://github.com/DiamondLightSource/chimpflow/issues + +Issue or Discussion? +-------------------- + +Github also offers discussions_ as a place to ask questions and share ideas. If +your issue is open ended and it is not obvious when it can be "closed", please +raise it as a discussion instead. + +.. _discussions: https://github.com/DiamondLightSource/chimpflow/discussions + +Code coverage +------------- + +While 100% code coverage does not make a library bug-free, it significantly +reduces the number of easily caught bugs! Please make sure coverage remains the +same or is improved by a pull request! + +Developer guide +--------------- + +The `Developer Guide`_ contains information on setting up a development +environment, running the tests and what standards the code and documentation +should follow. + +.. _Developer Guide: https://diamondlightsource.github.io/chimpflow/main/developer/how-to/contribute.html + +.. # dae_devops_fingerprint 303160a2b536f20a8c1a1ec134e82f71 diff --git a/.github/actions/install_requirements/action.yml b/.github/actions/install_requirements/action.yml new file mode 100644 index 0000000..9f359a0 --- /dev/null +++ b/.github/actions/install_requirements/action.yml @@ -0,0 +1,64 @@ +# ********** Please don't edit this file! +# ********** It has been generated automatically by dae_devops version 0.5.2. +# ********** For repository_name chimpflow + +name: Install requirements +description: Run pip install with requirements and upload resulting requirements +inputs: + requirements_file: + description: Name of requirements file to use and upload + required: true + install_options: + description: Parameters to pass to pip install + required: true + python_version: + description: Python version to install + default: "3.x" + +runs: + using: composite + + steps: + - name: Setup python + uses: actions/setup-python@v4 + with: + python-version: ${{ inputs.python_version }} + + - name: Pip install + run: | + touch ${{ inputs.requirements_file }} + # -c uses requirements.txt as constraints, see 'Validate requirements file' + pip install -c ${{ inputs.requirements_file }} ${{ inputs.install_options }} + shell: bash + + - name: Create lockfile + run: | + mkdir -p lockfiles + pip freeze --exclude-editable > lockfiles/${{ inputs.requirements_file }} + # delete the self referencing line and make sure it isn't blank + sed -i '/file:/d' lockfiles/${{ inputs.requirements_file }} + shell: bash + + - name: Upload lockfiles + uses: actions/upload-artifact@v3 + with: + name: lockfiles + path: lockfiles + + # This eliminates the class of problems where the requirements being given no + # longer match what the packages themselves dictate. E.g. In the rare instance + # where I install some-package which used to depend on vulnerable-dependency + # but now uses good-dependency (despite being nominally the same version) + # pip will install both if given a requirements file with -r + - name: If requirements file exists, check it matches pip installed packages + run: | + if [ -s ${{ inputs.requirements_file }} ]; then + if ! diff -u ${{ inputs.requirements_file }} lockfiles/${{ inputs.requirements_file }}; then + echo "Error: ${{ inputs.requirements_file }} need the above changes to be exhaustive" + exit 1 + fi + fi + shell: bash + + +# dae_devops_fingerprint e7e586c2794a8c6979edef0f62c7ba89 diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..696d4f2 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,22 @@ +# ********** Please don't edit this file! +# ********** It has been generated automatically by dae_devops version 0.5.2. +# ********** For repository_name chimpflow + +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" + +# dae_devops_fingerprint f732df8e00cb502c9de0f1a614c9351e diff --git a/.github/pages/index.html b/.github/pages/index.html new file mode 100644 index 0000000..4991518 --- /dev/null +++ b/.github/pages/index.html @@ -0,0 +1,17 @@ + + + + + + + + + Redirecting to main branch + + + + + + + + diff --git a/.github/pages/make_switcher.py b/.github/pages/make_switcher.py new file mode 100644 index 0000000..cce0153 --- /dev/null +++ b/.github/pages/make_switcher.py @@ -0,0 +1,105 @@ +# ********** Please don't edit this file! +# ********** It has been generated automatically by dae_devops version 0.5.2. +# ********** For repository_name chimpflow + +import json +import logging +from argparse import ArgumentParser +from pathlib import Path +from subprocess import CalledProcessError, check_output +from typing import List, Optional + + +def report_output(stdout: bytes, label: str) -> List[str]: + ret = stdout.decode().strip().split("\n") + print(f"{label}: {ret}") + return ret + + +def get_branch_contents(ref: str) -> List[str]: + """Get the list of directories in a branch.""" + stdout = check_output(["git", "ls-tree", "-d", "--name-only", ref]) + return report_output(stdout, "Branch contents") + + +def get_sorted_tags_list() -> List[str]: + """Get a list of sorted tags in descending order from the repository.""" + stdout = check_output(["git", "tag", "-l", "--sort=-v:refname"]) + return report_output(stdout, "Tags list") + + +def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[str]: + """Generate the file containing the list of all GitHub Pages builds.""" + # Get the directories (i.e. builds) from the GitHub Pages branch + try: + builds = set(get_branch_contents(ref)) + except CalledProcessError: + builds = set() + logging.warning(f"Cannot get {ref} contents") + + # Add and remove from the list of builds + if add: + builds.add(add) + if remove: + assert remove in builds, f"Build '{remove}' not in {sorted(builds)}" + builds.remove(remove) + + # Get a sorted list of tags + tags = get_sorted_tags_list() + + # Make the sorted versions list from main branches and tags + versions: List[str] = [] + for version in ["master", "main"] + tags: + if version in builds: + versions.append(version) + builds.remove(version) + + # Add in anything that is left to the bottom + versions += sorted(builds) + print(f"Sorted versions: {versions}") + return versions + + +def write_json(path: Path, repository: str, versions: str): + org, repo_name = repository.split("/") + struct = [ + dict(version=version, url=f"https://{org}.github.io/{repo_name}/{version}/") + for version in versions + ] + text = json.dumps(struct, indent=2) + print(f"JSON switcher:\n{text}") + path.write_text(text) + + +def main(args=None): + parser = ArgumentParser( + description="Make a versions.txt file from gh-pages directories" + ) + parser.add_argument( + "--add", + help="Add this directory to the list of existing directories", + ) + parser.add_argument( + "--remove", + help="Remove this directory from the list of existing directories", + ) + parser.add_argument( + "repository", + help="The GitHub org and repository name: ORG/REPO", + ) + parser.add_argument( + "output", + type=Path, + help="Path of write switcher.json to", + ) + args = parser.parse_args(args) + + # Write the versions file + versions = get_versions("origin/gh-pages", args.add, args.remove) + write_json(args.output, args.repository, versions) + + +if __name__ == "__main__": + main() + +# dae_devops_fingerprint bf1e6514a2184d0710558edb524fb309 diff --git a/.github/workflows/code.yml b/.github/workflows/code.yml new file mode 100644 index 0000000..0258d15 --- /dev/null +++ b/.github/workflows/code.yml @@ -0,0 +1,215 @@ +# ********** Please don't edit this file! +# ********** It has been generated automatically by dae_devops version 0.5.2. +# ********** For repository_name chimpflow + +name: Code CI + +on: + push: + pull_request: + schedule: + # Run weekly to check latest versions of dependencies + - cron: "0 8 * * WED" +env: + # The target python version, which must match the Dockerfile version + CONTAINER_PYTHON: "3.11" + +jobs: + lint: + # pull requests are a duplicate of a branch push if within the same repo. + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Install python packages + uses: ./.github/actions/install_requirements + with: + requirements_file: requirements-dev-3.x.txt + install_options: -e .[dev,docs] + + - name: Lint + run: tox -e pre-commit,mypy + + test: + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository + strategy: + fail-fast: false + matrix: + os: ["ubuntu-latest"] # can add windows-latest, macos-latest + python: ["3.10", "3.11"] + install: ["-e .[dev,docs]"] + # Make one version be non-editable to test both paths of version code + include: + - os: "ubuntu-latest" + python: "3.9" + install: ".[dev,docs]" + + runs-on: ${{ matrix.os }} + env: + # https://github.com/pytest-dev/pytest/issues/2042 + PY_IGNORE_IMPORTMISMATCH: "1" + + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + # Need this to get version number from last tag + fetch-depth: 0 + + - name: Install python packages + uses: ./.github/actions/install_requirements + with: + python_version: ${{ matrix.python }} + requirements_file: requirements-test-${{ matrix.os }}-${{ matrix.python }}.txt + install_options: ${{ matrix.install }} + + - name: List dependency tree + run: pipdeptree + + - name: Run tests + run: | + sudo apt install environment-modules + export MODULESHOME=/usr/share/modules + source $MODULESHOME/init/bash + pytest + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + with: + name: ${{ matrix.python }}/${{ matrix.os }} + files: cov.xml + + dist: + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository + runs-on: "ubuntu-latest" + + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + # Need this to get version number from last tag + fetch-depth: 0 + + - name: Build sdist and wheel + run: | + export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && \ + pipx run build + + - name: Upload sdist and wheel as artifacts + uses: actions/upload-artifact@v3 + with: + name: dist + path: dist + + - name: Check for packaging errors + run: pipx run twine check --strict dist/* + + - name: Install python packages + uses: ./.github/actions/install_requirements + with: + python_version: ${{env.CONTAINER_PYTHON}} + requirements_file: requirements.txt + install_options: dist/*.whl + + - name: Test module --version works using the installed wheel + # If more than one module in src/ replace with module name to test + run: chimpflow --version-json + + container: + needs: [lint, dist, test] + runs-on: ubuntu-latest + + permissions: + contents: read + packages: write + + steps: + - name: Checkout + uses: actions/checkout@v3 + + # image names must be all lower case + - name: Generate image repo name + run: echo IMAGE_REPOSITORY=ghcr.io/$(tr '[:upper:]' '[:lower:]' <<< "${{ github.repository }}") >> $GITHUB_ENV + + - name: Download wheel and lockfiles + uses: actions/download-artifact@v3 + with: + path: .devcontainer + + - name: Log in to GitHub Docker Registry + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Docker meta + id: meta + uses: docker/metadata-action@v4 + with: + images: ${{ env.IMAGE_REPOSITORY }} + tags: | + type=ref,event=tag + type=raw,value=latest + + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v2 + + - name: Build runtime image + uses: docker/build-push-action@v4 + with: + build-args: | + PIP_OPTIONS=-r lockfiles/requirements.txt dist/*.whl + push: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags') }} + load: ${{ ! (github.event_name == 'push' && startsWith(github.ref, 'refs/tags')) }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + context: .devcontainer + # If you have a long docker build, uncomment the following to turn on caching + # For short build times this makes it a little slower + #cache-from: type=gha + #cache-to: type=gha,mode=max + + - name: Test cli works in runtime image + run: docker run ${{ env.IMAGE_REPOSITORY }} --version + + release: + # upload to PyPI and make a release on every tag + needs: [lint, dist, test] + if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags') }} + runs-on: ubuntu-latest + env: + HAS_PYPI_TOKEN: ${{ secrets.PYPI_TOKEN != '' }} + + steps: + - uses: actions/download-artifact@v3 + + - name: Fixup blank lockfiles + # Github release artifacts can't be blank + run: for f in lockfiles/*; do [ -s $f ] || echo '# No requirements' >> $f; done + + - name: Github Release + # We pin to the SHA, not the tag, for security reasons. + # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions + uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15 + with: + prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} + files: | + dist/* + lockfiles/* + generate_release_notes: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Publish to PyPI + if: ${{ env.HAS_PYPI_TOKEN }} + uses: pypa/gh-action-pypi-publish@release/v1 + with: + password: ${{ secrets.PYPI_TOKEN }} + +# dae_devops_fingerprint 618b4ddc2d21af6824e6444a1c40caa7 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000..d45dc59 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,59 @@ +# ********** Please don't edit this file! +# ********** It has been generated automatically by dae_devops version 0.5.2. +# ********** For repository_name chimpflow + +name: Docs CI + +on: + push: + pull_request: + +jobs: + docs: + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository + runs-on: ubuntu-latest + + steps: + - name: Avoid git conflicts when tag and branch pushed at same time + if: startsWith(github.ref, 'refs/tags') + run: sleep 60 + + - name: Checkout + uses: actions/checkout@v3 + with: + # Need this to get version number from last tag + fetch-depth: 0 + + - name: Install system packages + # Can delete this if you don't use graphviz in your docs + run: sudo apt-get install graphviz + + - name: Install python packages + uses: ./.github/actions/install_requirements + with: + requirements_file: requirements-dev-3.x.txt + install_options: -e .[dev,docs] + + - name: Build docs + run: tox -e docs + + - name: Sanitize ref name for docs version + run: echo "DOCS_VERSION=${GITHUB_REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV + + - name: Move to versioned directory + run: mv build/html .github/pages/$DOCS_VERSION + + - name: Write switcher.json + run: python .github/pages/make_switcher.py --add $DOCS_VERSION ${{ github.repository }} .github/pages/switcher.json + + - name: Publish Docs to gh-pages + if: github.event_name == 'push' && github.actor != 'dependabot[bot]' + # We pin to the SHA, not the tag, for security reasons. + # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions + uses: peaceiris/actions-gh-pages@bd8c6b06eba6b3d25d72b7a1767993c0aeee42e7 # v3.9.2 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: .github/pages + keep_files: true + +# dae_devops_fingerprint a146e4a2bb02a7386ed24898b5d29200 diff --git a/.github/workflows/docs_clean.yml b/.github/workflows/docs_clean.yml new file mode 100644 index 0000000..aa6854b --- /dev/null +++ b/.github/workflows/docs_clean.yml @@ -0,0 +1,49 @@ +# ********** Please don't edit this file! +# ********** It has been generated automatically by dae_devops version 0.5.2. +# ********** For repository_name chimpflow + +name: Docs Cleanup CI + +# delete branch documentation when a branch is deleted +# also allow manually deleting a documentation version +on: + delete: + workflow_dispatch: + inputs: + version: + description: "documentation version to DELETE" + required: true + type: string + +jobs: + remove: + if: github.event.ref_type == 'branch' || github.event_name == 'workflow_dispatch' + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + ref: gh-pages + + - name: removing documentation for branch ${{ github.event.ref }} + if: ${{ github.event_name != 'workflow_dispatch' }} + run: echo "REF_NAME=${{ github.event.ref }}" >> $GITHUB_ENV + + - name: manually removing documentation version ${{ github.event.inputs.version }} + if: ${{ github.event_name == 'workflow_dispatch' }} + run: echo "REF_NAME=${{ github.event.inputs.version }}" >> $GITHUB_ENV + + - name: Sanitize ref name for docs version + run: echo "DOCS_VERSION=${REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV + + - name: update index and push changes + run: | + rm -r $DOCS_VERSION + python make_switcher.py --remove $DOCS_VERSION ${{ github.repository }} switcher.json + git config --global user.name 'GitHub Actions Docs Cleanup CI' + git config --global user.email 'GithubActionsCleanup@noreply.github.com' + git commit -am "Removing redundant docs version $DOCS_VERSION" + git push + +# dae_devops_fingerprint 9d06f858b84de2523c688330e687075b diff --git a/.github/workflows/linkcheck.yml b/.github/workflows/linkcheck.yml new file mode 100644 index 0000000..fd86904 --- /dev/null +++ b/.github/workflows/linkcheck.yml @@ -0,0 +1,30 @@ +# ********** Please don't edit this file! +# ********** It has been generated automatically by dae_devops version 0.5.2. +# ********** For repository_name chimpflow + +name: Link Check + +on: + workflow_dispatch: + schedule: + # Run weekly to check URL links still resolve + - cron: "0 8 * * WED" + +jobs: + docs: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Install python packages + uses: ./.github/actions/install_requirements + with: + requirements_file: requirements-dev-3.x.txt + install_options: -e .[dev] + + - name: Check links + run: tox -e docs build -- -b linkcheck + +# dae_devops_fingerprint 918a51a02629220f46f3e8dad2e460eb diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..7515401 --- /dev/null +++ b/.gitignore @@ -0,0 +1,76 @@ +# Temporary excel files. +~$*.xlsm +excel/lab36/echo/** + +# Local database +tt00000-0 + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +.venv +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg +**/_version.py + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +cov.xml +.pytest_cache/ +.mypy_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# likely venv names +.venv* +venv* + +# further build artifacts +lockfiles/ + diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000..88b7e85 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,90 @@ +# ********** Please don't edit this file! +# ********** It has been generated automatically by dae_devops version 0.5.2. +# ********** For repository_name chimpflow + +variables: + # I put the package_pip artifacts int his place just until I can use the corporate internal pipserver. + # Maybe /dls_sw/work/python3/RHEL7-x86_64/distributions would be a better place? + PIP_FIND_LINKS: "/dls_sw/apps/bxflow/artifacts" + +default: + tags: + # Argus is the production Kubernetes. + - argus + image: + # Use official language image. + name: python:3.9 + before_script: + - df + - echo $PIP_FIND_LINKS + - pip install -e .[dev,docs] + # ----- + - git config --global credential.helper "/bin/bash /git_creds.sh" + - echo '#!/usr/bin/env bash' > /git_creds.sh + - echo "sleep 1" >> /git_creds.sh + - echo "echo username=gitlab-ci-token" >> /git_creds.sh + - echo "echo password=$CI_JOB_TOKEN" >> /git_creds.sh + # ----- + - make -f .dae-devops/Makefile prepare_git_dependencies + +stages: + - validate + - package + # Disabling the publish part until a runner has a place to publish to. + # - publish + +# --------------------------------------------------------------------- +# The validate stage makes sure code is ready to package and release. + +validate_pre_commit: + stage: validate + script: + - make -f .dae-devops/Makefile validate_pre_commit + +validate_mypy: + stage: validate + script: + - make -f .dae-devops/Makefile validate_mypy + +validate_pytest: + stage: validate + script: + - make -f .dae-devops/Makefile validate_pytest + +validate_docs: + stage: validate + script: + - make -f .dae-devops/Makefile validate_docs + artifacts: + paths: + - build/html + +# --------------------------------------------------------------------- +# The pip state packages and publishes for pip. + +package_pip: + stage: package + script: + - make -f .dae-devops/Makefile package_pip + artifacts: + paths: + - dist/* +# --------------------------------------------------------------------- +# The publish stage publishes stuff for general consumption. +# Will use artifacts from previous stages. + +# Disabling the publish part until a runner has a place to publish to. +# It can be done manually from the command line using the make commands. +# publish_pip: +# stage: publish +# script: +# # The package_pip artifacts are in the dist folder. +# - make -f .dae-devops/Makefile publish_pip + +# publish_docs: +# stage: publish +# script: +# # The validate_docs artifacts are in the build/html folder. +# - make -f .dae-devops/Makefile publish_docs + +# dae_devops_fingerprint 8ef619190b6341d741696f65e16ef3d5 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..5e270b0 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,23 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.3.0 + hooks: + - id: check-added-large-files + - id: check-yaml + - id: check-merge-conflict + + - repo: local + hooks: + - id: black + name: Run black + stages: [commit] + language: system + entry: black --check --diff + types: [python] + + - id: flake8 + name: Run flake8 + stages: [commit] + language: system + entry: flake8 + types: [python] diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000..8192299 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,9 @@ +{ + "recommendations": [ + "ms-vscode-remote.remote-containers", + "ms-python.python", + "tamasfe.even-better-toml", + "redhat.vscode-yaml", + "ryanluker.vscode-coverage-gutters" + ] +} \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..f8fcdb4 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,25 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Debug Unit Test", + "type": "python", + "request": "launch", + "justMyCode": false, + "program": "${file}", + "purpose": [ + "debug-test" + ], + "console": "integratedTerminal", + "env": { + // The default config in setup.cfg's "[tool:pytest]" adds coverage. + // Cannot have coverage and debugging at the same time. + // https://github.com/microsoft/vscode-python/issues/693 + "PYTEST_ADDOPTS": "--no-cov" + }, + } + ] +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..2472acf --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,15 @@ +{ + "python.linting.pylintEnabled": false, + "python.linting.flake8Enabled": true, + "python.linting.mypyEnabled": true, + "python.linting.enabled": true, + "python.testing.pytestArgs": [], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "python.formatting.provider": "black", + "python.languageServer": "Pylance", + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": true + } +} \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000..946e69d --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,16 @@ +// See https://go.microsoft.com/fwlink/?LinkId=733558 +// for the documentation about the tasks.json format +{ + "version": "2.0.0", + "tasks": [ + { + "type": "shell", + "label": "Tests, lint and docs", + "command": "tox -p", + "options": { + "cwd": "${workspaceRoot}" + }, + "problemMatcher": [], + } + ] +} \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..8dada3e --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..0e9babb --- /dev/null +++ b/Makefile @@ -0,0 +1,30 @@ +VERSION = $(shell soakdb3 --version) + +# Shell to use instead of /bin/sh. +SHELL := /bin/bash + +# Run all commands in the target in a single shell. +.ONESHELL: + +# ------------------------------------------------------------------ +# Install into conda. +# To keep /home/$(USER)/.conda/pkgs from being used (and filling up disk quota) do this: +# conda config --add pkgs_dirs /scratch/kbp43231/conda/pkgs + +CONDA_PUBLIC_PREFIX = /dls_sw/apps/xchem/conda/envs/xchem_chimpflow/$(VERSION) +CONDA_LOCAL_PREFIX = /scratch/$(USER)/conda/envs/xchem_chimpflow +PYTHON_VERSION = 3.9 +CONDA_ACTIVATE = source $$(conda info --base)/etc/profile.d/conda.sh ; conda activate ; conda activate + +# Create the conda environment for local development. +create_local_conda: + rm -rf $(CONDA_LOCAL_PREFIX) + module load mamba && \ + mamba create -y --prefix $(CONDA_LOCAL_PREFIX) python=$(PYTHON_VERSION) + +# Install the packages into the conda environment. +provision_local_conda: + $(CONDA_ACTIVATE) $(CONDA_LOCAL_PREFIX) + mamba env update -f conda.yaml + + diff --git a/README.md b/README.md deleted file mode 100644 index a3f9375..0000000 --- a/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# xchem_chimpflow -Orchestrating chimp jobs triggered by incoming images. diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..e2b8101 --- /dev/null +++ b/README.rst @@ -0,0 +1,38 @@ +chimpflow +======================================================================= + +Orchestrating chimp crystal detection triggered by incoming images. + +Installation +----------------------------------------------------------------------- +:: + + pip install chimpflow + + chimpflow --version + +Documentation +----------------------------------------------------------------------- + +See https://www.cs.diamond.ac.uk/chimpflow for more detailed documentation. + +Building and viewing the documents locally:: + + git clone git+https://gitlab.diamond.ac.uk/scisoft/bxflow/chimpflow.git + cd chimpflow + virtualenv /scratch/$USER/venv/chimpflow + source /scratch/$USER/venv/chimpflow/bin/activate + pip install -e .[dev] + make -f .chimpflow/Makefile validate_docs + browse to file:///scratch/$USER/venvs/chimpflow/build/html/index.html + +Topics for further documentation: + +- TODO list of improvements +- change log + + +.. + Anything below this line is used when viewing README.rst and will be replaced + when included in index.rst + diff --git a/conda.yaml b/conda.yaml new file mode 100644 index 0000000..7ce0b63 --- /dev/null +++ b/conda.yaml @@ -0,0 +1,14 @@ +channels: + - pytorch + - conda-forge + - defaults +dependencies: + - imageio + - matplotlib + - scikit-image + - pip + - pytorch + - torchvision + - yaml + - pip: + - albumentations diff --git a/configurations/development.yaml b/configurations/development.yaml new file mode 100644 index 0000000..778dcf3 --- /dev/null +++ b/configurations/development.yaml @@ -0,0 +1,74 @@ +type: chimpflow_lib.chimpflow_contexts.classic + +logging_settings: + console: + enabled: True + verbose: True + logfile: + enabled: True + directory: chimpflow + graypy: + enabled: False + host: 172.23.7.128 + port: 12201 + protocol: UDP + +# The external access bits. +external_access_bits: + dls_servbase_database_filename: &DLS_BILLY_DATABASE_FILENAME /scratch/${USER}/chimpflow/databases/dls_servbase_dataface.sqlite + database_filename: &DATABASE_FILENAME /scratch/${USER}/chimpflow/databases/runtime/chimpflow.sqlite + dls_servbase_dataface_server: &DLS_BILLY_DATAFACE_SERVER http://*:27620 + dls_servbase_dataface_client: &DLS_BILLY_DATAFACE_CLIENT http://localhost:27620 + dataface_server: &CHIMPFLOW_DATAFACE_SERVER http://*:27621 + dataface_client: &CHIMPFLOW_DATAFACE_CLIENT http://localhost:27621 + collector_server: &COLLECTOR_SERVER http://*:27627 + collector_client: &COLLECTOR_CLIENT http://localhost:27627 + chimpflow_gui_server: &CHIMPFLOW_GUI_SERVER http://*:27622 + chimpflow_gui_client: &CHIMPFLOW_GUI_CLIENT http://127.0.0.1:27622 + +# The dls_servbase_dataface client/server composite. +dls_servbase_dataface_specification: &DLS_BILLY_DATAFACE_SPECIFICATION + type: "dls_servbase_lib.datafaces.aiohttp" + type_specific_tbd: + # The remote dataface server access. + aiohttp_specification: + server: *DLS_BILLY_DATAFACE_SERVER + client: *DLS_BILLY_DATAFACE_CLIENT + # The local implementation of the dataface. + actual_dataface_specification: + type: "dls_servbase_lib.datafaces.aiosqlite" + database: + type: "dls_servbase_lib.databases.aiosqlite" + filename: *DLS_BILLY_DATABASE_FILENAME + log_level: "WARNING" + +# The chimpflow_dataface client/server composite. +chimpflow_dataface_specification: &CHIMPFLOW_DATAFACE_SPECIFICATION + type: "chimpflow_lib.chimpflow_datafaces.aiohttp" + type_specific_tbd: + # The remote chimpflow_dataface server access. + aiohttp_specification: + server: *CHIMPFLOW_DATAFACE_SERVER + client: *CHIMPFLOW_DATAFACE_CLIENT + # The local implementation of the chimpflow_dataface. + actual_chimpflow_dataface_specification: + type: "chimpflow_lib.chimpflow_datafaces.aiosqlite" + database: + type: "chimpflow_lib.chimpflow_databases.aiosqlite" + filename: *DATABASE_FILENAME + log_level: "WARNING" + +# The chimpflow_collector which scans for incoming images. +chimpflow_collector_specification: + type: "dls_chimpflow_lib.chimpflow_collectors.aiohttp" + type_specific_tbd: + aiohttp_specification: + server: *COLLECTOR_SERVER + client: *COLLECTOR_CLIENT + actual_collector_specification: + type: dls_chimpflow_lib.chimpflow_collectors.scrape_to_database + type_specific_tbd: + directories: + - "/27/chimpflow/tests/images" + - "/dls/science/groups/i04-1/software/luigi_pipeline/imager_pipe/SubwellImages" + recursive: True diff --git a/configurations/tutorial.yaml b/configurations/tutorial.yaml new file mode 100644 index 0000000..a3b47a0 --- /dev/null +++ b/configurations/tutorial.yaml @@ -0,0 +1,73 @@ +type: chimpflow_lib.chimpflow_contexts.classic + +logging_settings: + console: + enabled: True + verbose: False + logfile: + enabled: True + directory: chimpflow + graypy: + enabled: False + host: 172.23.7.128 + port: 12201 + protocol: UDP + +# The external access bits. +external_access_bits: + dls_servbase_database_filename: &DLS_BILLY_DATABASE_FILENAME ${CWD}/dls_servbase_dataface.sqlite + database_filename: &DATABASE_FILENAME ${CWD}/chimpflow.sqlite + dls_servbase_dataface_server: &DLS_BILLY_DATAFACE_SERVER http://*:27620 + dls_servbase_dataface_client: &DLS_BILLY_DATAFACE_CLIENT http://localhost:27620 + dataface_server: &CHIMPFLOW_DATAFACE_SERVER http://*:27621 + dataface_client: &CHIMPFLOW_DATAFACE_CLIENT http://localhost:27621 + collector_server: &COLLECTOR_SERVER http://*:27627 + collector_client: &COLLECTOR_CLIENT http://localhost:27627 + chimpflow_gui_server: &CHIMPFLOW_GUI_SERVER http://*:27622 + chimpflow_gui_client: &CHIMPFLOW_GUI_CLIENT http://127.0.0.1:27622 + +# The dls_servbase_dataface client/server composite. +dls_servbase_dataface_specification: &DLS_BILLY_DATAFACE_SPECIFICATION + type: "dls_servbase_lib.datafaces.aiohttp" + type_specific_tbd: + # The remote dataface server access. + aiohttp_specification: + server: *DLS_BILLY_DATAFACE_SERVER + client: *DLS_BILLY_DATAFACE_CLIENT + # The local implementation of the dataface. + actual_dataface_specification: + type: "dls_servbase_lib.datafaces.aiosqlite" + database: + type: "dls_servbase_lib.databases.aiosqlite" + filename: *DLS_BILLY_DATABASE_FILENAME + log_level: "WARNING" + +# The chimpflow_dataface client/server composite. +chimpflow_dataface_specification: &CHIMPFLOW_DATAFACE_SPECIFICATION + type: "chimpflow_lib.chimpflow_datafaces.aiohttp" + type_specific_tbd: + # The remote chimpflow_dataface server access. + aiohttp_specification: + server: *CHIMPFLOW_DATAFACE_SERVER + client: *CHIMPFLOW_DATAFACE_CLIENT + # The local implementation of the chimpflow_dataface. + actual_chimpflow_dataface_specification: + type: "chimpflow_lib.chimpflow_datafaces.aiosqlite" + database: + type: "chimpflow_lib.chimpflow_databases.aiosqlite" + filename: *DATABASE_FILENAME + log_level: "WARNING" + +# The chimpflow_collector which scans for incoming images. +chimpflow_collector_specification: + type: "dls_chimpflow_lib.chimpflow_collectors.aiohttp" + type_specific_tbd: + aiohttp_specification: + server: *COLLECTOR_SERVER + client: *COLLECTOR_CLIENT + actual_collector_specification: + type: dls_chimpflow_lib.chimpflow_collectors.scrape_to_database + type_specific_tbd: + directories: + - ${CWD}/example_images + recursive: True diff --git a/docs/_static/css/custom.css b/docs/_static/css/custom.css new file mode 100644 index 0000000..951e75d --- /dev/null +++ b/docs/_static/css/custom.css @@ -0,0 +1,18 @@ +/* ********** Please don't edit this file! */ +/* ********** It has been generated automatically by dae_devops version 0.5.2. */ +/* ********** For repository_name chimpflow */ + +/* The theme normally has this, but I think it's ok to use the full width of the window in all @media sizes. + +@media (min-width: 960px) +.bd-page-width { + max-width: 88rem; +} + +*/ + +.bd-page-width { + max-width: 100%; +} + +/* dae_devops_fingerprint a9941a0b184be2345dd5b19aaac7ac1f */ diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..2096373 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,200 @@ +# ********** Please don't edit this file! +# ********** It has been generated automatically by dae_devops version 0.5.2. +# ********** For repository_name chimpflow + +from pathlib import Path +from subprocess import check_output + +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html +from sphinx.domains.python import PythonDomain + +import chimpflow_lib + +# -- General configuration ------------------------------------------------ + +# General information about the project. +project = "chimpflow" + +# The full version, including alpha/beta/rc tags. +release = chimpflow_lib.__version__ + +# The short X.Y version. +if "+" in release: + # Not on a tag, use branch name + root = Path(__file__).absolute().parent.parent + git_branch = check_output("git branch --show-current".split(), cwd=root) + version = git_branch.decode().strip() +else: + version = release + +extensions = [ + # Use this for generating API docs + "sphinx.ext.autodoc", + # This can parse google style docstrings + "sphinx.ext.napoleon", + # For linking to external sphinx documentation + "sphinx.ext.intersphinx", + # Add links to source code in API docs + "sphinx.ext.viewcode", + # Adds the inheritance-diagram generation directive + "sphinx.ext.inheritance_diagram", + # Add a copy button to each code block + "sphinx_copybutton", + # For the card element + "sphinx_design", + # For command line tools autodoc. + "sphinxarg.ext", + # Create pages from jupyter notebooks + "nbsphinx", + "IPython.sphinxext.ipython_console_highlighting", +] + +# If true, Sphinx will warn about all references where the target cannot +# be found. +nitpicky = True + +# A list of (type, target) tuples (by default empty) that should be ignored when +# generating warnings in "nitpicky mode". Note that type should include the +# domain name if present. Example entries would be ('py:func', 'int') or +# ('envvar', 'LD_LIBRARY_PATH'). +nitpick_ignore = [("py:class", "numpy.ma.core.MaskedArray")] + +# Workaround for NewType as autodata, to be removed when issue is resolved +# see: https://github.com/sphinx-doc/sphinx/issues/9560 +assert PythonDomain.object_types["data"].roles == ("data", "obj") +PythonDomain.object_types["data"].roles = ("data", "class", "obj") +# Both the class’ and the __init__ method’s docstring are concatenated and +# inserted into the main body of the autoclass directive +autoclass_content = "both" + +# Order the members by the order they appear in the source code +autodoc_member_order = "bysource" + +# Don't inherit docstrings from baseclasses +autodoc_inherit_docstrings = False + +# Don't show the typehints in the function/method signature. +autodoc_typehints = "description" + +# Output graphviz directive produced images in a scalable format +graphviz_output_format = "svg" + +# The name of a reST role (builtin or Sphinx extension) to use as the default +# role, that is, for text marked up `like this` +default_role = "any" + +# The suffix of source filenames. +source_suffix = ".rst" + +# The master toctree document. +master_doc = "index" + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# These patterns also affect html_static_path and html_extra_path +exclude_patterns = ["_build"] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# This means you can link things like `str` and `asyncio` to the relevant +# docs in the python documentation. +intersphinx_mapping = dict( + python=("https://docs.python.org/3/", None), + numpy=("https://numpy.org/doc/stable/", None), +) + +# A dictionary of graphviz graph attributes for inheritance diagrams. +inheritance_graph_attrs = dict(rankdir="TB") + +# Common links that should be available on every page +rst_epilog = """ +.. _Diamond Light Source: http://www.diamond.ac.uk +.. _black: https://github.com/psf/black +.. _flake8: https://flake8.pycqa.org/en/latest/ +.. _isort: https://github.com/PyCQA/isort +.. _mypy: http://mypy-lang.org/ +.. _pre-commit: https://pre-commit.com/ +""" + +# Ignore localhost links for periodic check that links in docs are valid +linkcheck_ignore = [r"http://localhost:\d+/"] + +# Set copy-button to ignore python and bash prompts +# https://sphinx-copybutton.readthedocs.io/en/latest/use.html#using-regexp-prompt-identifiers +copybutton_prompt_text = r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: " +copybutton_prompt_is_regexp = True + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "pydata_sphinx_theme" + +# These folders are copied to the documentation's HTML output +html_static_path = ["_static"] + +# Paths are either relative to html_static_path or fully qualified paths (eg. https://...) +html_css_files = [ + # Custom css to allow use of full window width in the browser. + "css/custom.css", +] + +# Theme options for pydata_sphinx_theme +html_theme_options = dict( + logo=dict( + text=project, + ), + gitlab_url="https://github.com/diamondlightsource/chimpflow/chimpflow", + icon_links=[], + navbar_end=["theme-switcher", "icon-links"], +) + +# A dictionary of values to pass into the template engine’s context for all pages +html_context = dict( + doc_path="docs", +) + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +html_show_sphinx = False + +# Disable the link to show the rst source. +# I did this since it's noise on the page and most audience doesn't care to see the raw rst. +html_show_sourcelink = False + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +html_show_copyright = False + +# Logo +html_logo = "images/dls-logo.svg" +html_favicon = "images/dls-favicon.ico" + + +def ultimateReplace(app, docname, source): + result = source[0] + for key in app.config.ultimate_replacements: + result = result.replace(key, app.config.ultimate_replacements[key]) + source[0] = result + + +# I got this from https://github.com/sphinx-doc/sphinx/issues/4054. +# It will allow the ${token} replacement in the rst documents. +ultimate_replacements = { + "$" + "{repository_name}": "chimpflow", + "$" + "{package_name}": "chimpflow_lib", + "$" + "{git_url}": "https://github.com/diamondlightsource/chimpflow", + "$" + "{python_version_at_least}": "3.9", +} + + +def setup(app): + app.add_config_value("ultimate_replacements", {}, True) + app.connect("source-read", ultimateReplace) + + +# dae_devops_fingerprint b80ddf31fa2e6cb77cfd3f049739c5c3 diff --git a/docs/images/dls-favicon.ico b/docs/images/dls-favicon.ico new file mode 100644 index 0000000..9a11f50 Binary files /dev/null and b/docs/images/dls-favicon.ico differ diff --git a/docs/images/dls-logo.svg b/docs/images/dls-logo.svg new file mode 100644 index 0000000..0af1a17 --- /dev/null +++ b/docs/images/dls-logo.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..7ae3783 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: ../README.rst + :end-before: when included in index.rst + +Source repository +----------------------------------------------------------------------- + +${git_url}/${repository_name} + +Documentation +----------------------------------------------------------------------- + +.. grid:: 2 + + .. grid-item-card:: :material-regular:`person;4em` + :link: user/index + :link-type: doc + + The User Guide contains documentation on how to install and use the software. + +.. toctree:: + :hidden: + + user/index diff --git a/docs/user/explanations/22-developing.rst b/docs/user/explanations/22-developing.rst new file mode 100644 index 0000000..735ac5c --- /dev/null +++ b/docs/user/explanations/22-developing.rst @@ -0,0 +1,4 @@ +.. + Use the file automatically generated by dae-devops. + +.. include:: ../../../.dae-devops/docs/developing.rst \ No newline at end of file diff --git a/docs/user/explanations/23-testing.rst b/docs/user/explanations/23-testing.rst new file mode 100644 index 0000000..35336eb --- /dev/null +++ b/docs/user/explanations/23-testing.rst @@ -0,0 +1,4 @@ +.. + Use the file automatically generated by dae-devops. + +.. include:: ../../../.dae-devops/docs/testing.rst \ No newline at end of file diff --git a/docs/user/explanations/24-devops.rst b/docs/user/explanations/24-devops.rst new file mode 100644 index 0000000..f426f7a --- /dev/null +++ b/docs/user/explanations/24-devops.rst @@ -0,0 +1,4 @@ +.. + Use the file automatically generated by dae-devops. + +.. include:: ../../../.dae-devops/docs/devops.rst \ No newline at end of file diff --git a/docs/user/explanations/25-docs-structure.rst b/docs/user/explanations/25-docs-structure.rst new file mode 100644 index 0000000..f25a09b --- /dev/null +++ b/docs/user/explanations/25-docs-structure.rst @@ -0,0 +1,18 @@ +About the documentation +----------------------- + + :material-regular:`format_quote;2em` + + The Grand Unified Theory of Documentation + + -- David Laing + +There is a secret that needs to be understood in order to write good software +documentation: there isn't one thing called *documentation*, there are four. + +They are: *tutorials*, *how-to guides*, *technical reference* and *explanation*. +They represent four different purposes or functions, and require four different +approaches to their creation. Understanding the implications of this will help +improve most documentation - often immensely. + +`More information on this topic. `_ diff --git a/docs/user/how-to/01-installing_development.rst b/docs/user/how-to/01-installing_development.rst new file mode 100644 index 0000000..d398810 --- /dev/null +++ b/docs/user/how-to/01-installing_development.rst @@ -0,0 +1,4 @@ +.. + Use the file automatically generated by dae-devops. + +.. include:: ../../../.dae-devops/docs/installing.rst \ No newline at end of file diff --git a/docs/user/index.rst b/docs/user/index.rst new file mode 100644 index 0000000..9e70993 --- /dev/null +++ b/docs/user/index.rst @@ -0,0 +1,58 @@ +User Guide +========== + +Documentation is split into four categories, also accessible from links in the side-bar. + +.. grid:: 2 + :gutter: 4 + + .. grid-item-card:: :material-regular:`directions_walk;3em` + + .. toctree:: + :caption: Tutorials + :maxdepth: 1 + + tutorials/tbd + + +++ + + Tutorials for installation and typical usage. New users start here. + + .. grid-item-card:: :material-regular:`directions;3em` + + .. toctree:: + :caption: How-to Guides + :maxdepth: 1 + + how-to/01-installing_development + + +++ + + Practical step-by-step guides for the more experienced user. + + .. grid-item-card:: :material-regular:`info;3em` + + .. toctree:: + :caption: Explanations + :maxdepth: 1 + + explanations/22-developing + explanations/23-testing + explanations/24-devops + explanations/25-docs-structure + + +++ + + Explanations of how the library works and why it works that way. + + .. grid-item-card:: :material-regular:`menu_book;3em` + + .. toctree:: + :caption: Reference + :maxdepth: 1 + + reference/api + + +++ + + Technical reference material including APIs and release notes. diff --git a/docs/user/reference/api.rst b/docs/user/reference/api.rst new file mode 100644 index 0000000..313507a --- /dev/null +++ b/docs/user/reference/api.rst @@ -0,0 +1,9 @@ +API +======================================================================= + +.. toctree:: + :maxdepth: 1 + + api/command_line + api/classes + api/modules diff --git a/docs/user/reference/api/classes.rst b/docs/user/reference/api/classes.rst new file mode 100644 index 0000000..d96d810 --- /dev/null +++ b/docs/user/reference/api/classes.rst @@ -0,0 +1,5 @@ +Classes +======================================================================= + +.. autoclass:: dls_multiconf_lib.multiconfs.Multiconfs + :members: diff --git a/docs/user/reference/api/command_line.rst b/docs/user/reference/api/command_line.rst new file mode 100644 index 0000000..85eeef3 --- /dev/null +++ b/docs/user/reference/api/command_line.rst @@ -0,0 +1,9 @@ +Command Line +======================================================================= + +dls-multiconf +----------------------------------------------------------------------- +.. argparse:: + :module: dls_multiconf_lib.__main__ + :func: get_parser + :prog: dls-multiconf diff --git a/docs/user/reference/api/modules.rst b/docs/user/reference/api/modules.rst new file mode 100644 index 0000000..8ec9804 --- /dev/null +++ b/docs/user/reference/api/modules.rst @@ -0,0 +1,4 @@ +Functions +======================================================================= +.. automodule:: dls_multiconf_lib.version + :members: diff --git a/docs/user/tutorials/tbd.rst b/docs/user/tutorials/tbd.rst new file mode 100644 index 0000000..aae88a1 --- /dev/null +++ b/docs/user/tutorials/tbd.rst @@ -0,0 +1,4 @@ +Tutorials +======================================================================= + +TBD \ No newline at end of file diff --git a/modulefiles/conda b/modulefiles/conda new file mode 100644 index 0000000..ca77415 --- /dev/null +++ b/modulefiles/conda @@ -0,0 +1,57 @@ +#%Module -*- tcl -*- + +proc ModulesHelp { } { + puts stderr "\tSets up the environment for chimpflow commands." +} + +module-whatis "sets up the environment for chimpflow commands" + +set mach $tcl_platform(machine) +set arch 64 + +if { [module-info mode load]} { + # load the default directories + if { ! [is-loaded global/directories] } { + module load global/directories + } +} + +set anaconda_version 4.6.14 +set pymajorversion 3.9 + +set PYTHON_HOME $env(softwaredir)/python/anaconda/$anaconda_version/$arch/envs/python$pymajorversion +set PYTHON_BASE_HOME $env(softwaredir)/python/anaconda/$anaconda_version/$arch + +setenv PYTHON_HOME $PYTHON_HOME +setenv PYTHON_BASE_HOME $PYTHON_BASE_HOME + +# The chimpflow conda environment we want to use. +set chimpflow_version 1.4.1 + +if { [module-info mode remove] && ![module-info mode switch3] } { + puts stdout "conda deactivate;" +} + +# Function to display versions. +set about_command "python -m chimpflow_cli.main --about" +set-alias "chimpflow_about" $about_command + +# Function to start all the services. +set-alias "chimpflow_start" "python -m chimpflow_cli.main start_services dls_servbase_dataface dataface collector gui" + +# Function to submit a workflow. +set-alias "chimpflow_submit" "python -m chimpflow_cli.main submit" + +# Function to tail the log file. +set-alias "chimpflow_logtail" "tail -f /tmp/logs/chimpflow/logformatter.log" + +# instructions based on https://github.com/conda/conda/blob/master/CHANGELOG.md#440-2017-12-20 +set source_command "source $PYTHON_BASE_HOME/etc/profile.d/conda.sh" +set conda_command "conda activate /dls_sw/apps/xchem/conda/envs/chimpflow/$chimpflow_version" + +if { [module-info mode load] || [module-info mode switch2] } { + puts stdout "$source_command;" + puts stdout "$conda_command;" + puts stderr "Chimpflow is loaded. Components are..." + puts stdout "$about_command;" +} \ No newline at end of file diff --git a/modulefiles/paths b/modulefiles/paths new file mode 100644 index 0000000..1df926d --- /dev/null +++ b/modulefiles/paths @@ -0,0 +1,15 @@ +#%Module -*- tcl -*- + +proc ModulesHelp { } { + puts stderr "\tSets up the PYTHONPATH for chimpflow commands." +} + +module-whatis "sets up the PYTHONPATH for chimpflow commands" + +# The chimpflow pip environment we want to use. +set chimpflow_version 1.4.1 + +# Provide chimpflow runtime python modules. +# This is python 3.9, so must agree with whatever the science environment has installed. +prepend-path PYTHONPATH /dls_sw/apps/xchem/pippy_place/chimpflow/$chimpflow_version + \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..823b26a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,106 @@ +# ********** Please don't edit this file! +# ********** It has been generated automatically by dae_devops version 0.5.2. +# ********** For repository_name chimpflow + +[build-system] +requires = ["setuptools>=64", "setuptools_scm[toml]>=6.2", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "chimpflow" +classifiers = [ + "Development Status :: 3 - Alpha", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", +] +description = "XChem service to apply CHIMP detection to newly arrived Formulatrix Rockmaker images." +dependencies = ["xchembku", "dls_servbase", "dls_mainiac", "dls_multiconf", "dls_utilpack"] +dynamic = ["version"] +license.file = "LICENSE" +readme = "README.rst" +requires-python = ">=3.9" + +[project.optional-dependencies] +dev = [ + "black==22.12.0", + "mypy", + "flake8-isort", + "Flake8-pyproject", + "pipdeptree", + "pre-commit", + "pytest-cov", + "pydocstyle[toml]", + "tox-direct", + "types-mock", + "types-PyYAML", +] +docs = [ + "pydata-sphinx-theme>=0.12", + "sphinx-argparse", + "sphinx-autobuild", + "sphinx-copybutton", + "sphinx-design", + "tox-direct", + "nbsphinx", + "pandoc", + "ipykernel", + "h5py", + "matplotlib", +] + +[project.scripts] +chimpflow = "chimpflow_lib.__main__:main" +"chimpflow.cli" = "chimpflow_cli.main:main" + +[project.urls] +GitLab = "https://github.com/diamondlightsource/chimpflow/chimpflow" + +[[project.authors]] # Further authors may be added by duplicating this section +email = "david.erb@diamond.ac.uk" +name = "David Erb" + + +[tool.setuptools_scm] +write_to = "src/chimpflow_lib/_version.py" + +[tool.mypy] +ignore_missing_imports = true # Ignore missing stubs in imported modules + +[tool.isort] +float_to_top = true +profile = "black" + +[tool.flake8] +extend-ignore = [ + "E501", # Allow long lines that black doesn't fix. + "E203", # See https://github.com/PyCQA/pycodestyle/issues/373 + "F811", # support typing.overload decorator + "F722", # allow Annotated[typ, some_func("some string")] +] +max-line-length = 88 # Respect black's line length (default 88), +exclude = [".tox", "venv"] + + +[tool.pytest.ini_options] +# Run pytest with all our checkers, and don't spam us with massive tracebacks on error +addopts = " --tb=native -vv --doctest-modules --doctest-glob=\"*.rst\"\n --cov=chimpflow_lib --cov-report term --cov-report xml:cov.xml\n " +# https://iscinumpy.gitlab.io/post/bound-version-constraints/#watch-for-warnings +filterwarnings = "error" +# Doctest python code in docs, python code in src docstrings, test functions in tests +testpaths = "docs src tests" + +[tool.coverage.run] +data_file = "/tmp/chimpflow_lib.coverage" + +[tool.coverage.paths] +# Tests are run from installed location, map back to the src directory +source = ["src", "**/site-packages/"] + +# tox must currently be configured via an embedded ini string +# See: https://github.com/tox-dev/tox/issues/999 +[tool.tox] +legacy_tox_ini = "[tox]\nskipsdist=True\n\n[testenv:{pre-commit,mypy,pytest,docs}]\n# Don't create a virtualenv for the command, requires tox-direct plugin\ndirect = True\npassenv = *\nallowlist_externals = \n pytest \n pre-commit\n mypy\n sphinx-build\n sphinx-autobuild\ncommands =\n pytest: pytest {posargs}\n mypy: mypy src tests {posargs}\n pre-commit: pre-commit run --all-files {posargs}\n docs: sphinx-{posargs:build -EW --keep-going} -T docs build/html\n" + +# dae_devops_fingerprint fdee84354ecb2edb777827c3a91cef41 diff --git a/src/chimpflow_cli/__init__.py b/src/chimpflow_cli/__init__.py new file mode 100644 index 0000000..e44b73b --- /dev/null +++ b/src/chimpflow_cli/__init__.py @@ -0,0 +1,6 @@ +from importlib.metadata import version + +__version__ = version("chimpflow") +del version + +__all__ = ["__version__"] diff --git a/src/chimpflow_cli/main.py b/src/chimpflow_cli/main.py new file mode 100644 index 0000000..0b6c37b --- /dev/null +++ b/src/chimpflow_cli/main.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python + +import argparse +import logging +import multiprocessing + +# Base class with methods supporting MaxIV command-line programs. +from dls_mainiac_lib.mainiac import Mainiac + +# The subcommands. +from chimpflow_cli.subcommands.start_services import StartServices + +# The package version. +from chimpflow_cli.version import meta as version_meta +from chimpflow_cli.version import version + +logger = logging.getLogger(__name__) + + +# -------------------------------------------------------------- +class Main(Mainiac): + def __init__(self, app_name): + super().__init__(app_name) + + # ---------------------------------------------------------- + def run(self): + """""" + + if self._args.subcommand == "start_services": + StartServices(self._args, self).run() + + else: + raise RuntimeError("unhandled subcommand %s" % (self._args.subcommand)) + + # ---------------------------------------------------------- + def build_parser(self, arglist=None): + """ + Method called from mainiac command line parsing. + Should return argparser for this program. + """ + + # Make a parser. + parser = argparse.ArgumentParser( + description="Command line app for checking quality of femtoscan file in progress.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + + # -------------------------------------------------------------------- + + parser.add_argument( + "--log_name", + help="basis name for log", + type=str, + metavar="string", + dest="log_name", + default=None, + ) + + # -------------------------------------------------------------------- + subparsers = parser.add_subparsers(help="subcommands", dest="subcommand") + subparsers.required = True + + # -------------------------------------------------------------------- + subparser = subparsers.add_parser("start_services", help="Start service(s).") + StartServices.add_arguments(subparser) + + return parser + + # -------------------------------------------------------------------------- + def configure_logging(self, settings=None): + """ + Configure runtime logging, override base class. + Presume that self._args is already set. + """ + + if self._args.log_name is None: + self._args.log_name = self._args.subcommand + + # Name as it appears in logging. + multiprocessing.current_process().name = self._args.log_name + + # Set mainaic's program name to include the subcommand. + self.program_name("%s/%s" % (self.program_name(), self._args.log_name)) + + # Enable the multiprocessing queue listener. + settings = { + "mpqueue": {"enabled": True}, + } + + # Let the base class do most of the work. + Mainiac.configure_logging(self, settings) + + # Don't show specific asyncio debug. + logging.getLogger("asyncio").addFilter(_asyncio_logging_filter()) + + # Don't show matplotlib font debug. + logging.getLogger("matplotlib.font_manager").setLevel("INFO") + + # Set filter on the ispyb logger to ignore the annoying NOTICE. + logging.getLogger("ispyb").addFilter(_ispyb_logging_filter()) + + # ---------------------------------------------------------- + def version(self): + """ + Method called from mainiac command line parsing. + Should return string in form of N.N.N. + """ + return version() + + # ---------------------------------------------------------- + def about(self): + """ + Method called from mainiac command line parsing. + Should return dict which can be serialized by json. + """ + + return {"versions": version_meta()} + + +# -------------------------------------------------------------------------------- +class _ispyb_logging_filter: + """ + Python logging filter to remove annoying traitlets messages. + These are not super useful to see all the time at the DEBUG level. + """ + + def filter(self, record): + + if record.msg.startswith( + "NOTICE: This code uses __future__ functionality in the ISPyB API." + ): + return 0 + + return 1 + + +# -------------------------------------------------------------------------------- +class _asyncio_logging_filter: + """ + Python logging filter to remove annoying asyncio messages. + These are not super useful to see all the time at the DEBUG level. + """ + + def filter(self, record): + + if "Using selector" in record.msg: + return 0 + + return 1 + + +# # -------------------------------------------------------------------------------- +# class _matplotlib_logging_filter: +# """ +# Python logging filter to remove annoying matplotlib messages. +# These are not super useful to see all the time at the INIT level. +# """ + +# def filter(self, record): +# if "loaded modules" in record.msg: +# return 0 + +# return 1 + + +# --------------------------------------------------------------- +def main(): + + # Instantiate the app. + main = Main("chimpflow_cli") + + # Configure the app from command line arguments. + main.parse_args_and_configure_logging() + + # Run the main wrapped in a try/catch. + main.try_run_catch() + + +# --------------------------------------------------------------- +# From command line, invoke the main method. +if __name__ == "__main__": + main() diff --git a/src/chimpflow_cli/subcommands/__init__.py b/src/chimpflow_cli/subcommands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/chimpflow_cli/subcommands/base.py b/src/chimpflow_cli/subcommands/base.py new file mode 100644 index 0000000..c6280bd --- /dev/null +++ b/src/chimpflow_cli/subcommands/base.py @@ -0,0 +1,59 @@ +import logging +import os +import tempfile + +# Configurator. +from dls_multiconf_lib.multiconfs import Multiconfs, multiconfs_set_default + +# Utilities. +from dls_utilpack.visit import get_visit_year + +logger = logging.getLogger(__name__) + + +class Base: + """ + Base class for femtocheck subcommands. Handles details like configuration. + """ + + def __init__(self, args): + self._args = args + + self.__temporary_directory = None + + # ---------------------------------------------------------------------------------------- + def get_multiconf(self): + + chimpflow_multiconf = Multiconfs().build_object_from_environment() + + # For convenience, make a temporary directory for this test. + self.__temporary_directory = tempfile.TemporaryDirectory() + + # Make the temporary directory available to the multiconf. + chimpflow_multiconf.substitute( + {"temporary_directory": self.__temporary_directory.name} + ) + + substitutions = { + "CWD": os.getcwd(), + "HOME": os.environ.get("HOME", "HOME"), + "USER": os.environ.get("USER", "USER"), + "PATH": os.environ.get("PATH", "PATH"), + "PYTHONPATH": os.environ.get("PYTHONPATH", "PYTHONPATH"), + } + + if hasattr(self._args, "visit") and self._args.visit != "VISIT": + BEAMLINE = os.environ.get("BEAMLINE") + if BEAMLINE is None: + raise RuntimeError("BEAMLINE environment variable is not defined") + year = get_visit_year(BEAMLINE, self._args.visit) + substitutions["BEAMLINE"] = BEAMLINE + substitutions["VISIT"] = self._args.visit + substitutions["YEAR"] = year + + chimpflow_multiconf.substitute(substitutions) + + # Set this as the default multiconf so it is available everywhere. + multiconfs_set_default(chimpflow_multiconf) + + return chimpflow_multiconf diff --git a/src/chimpflow_cli/subcommands/start_services.py b/src/chimpflow_cli/subcommands/start_services.py new file mode 100644 index 0000000..20545d2 --- /dev/null +++ b/src/chimpflow_cli/subcommands/start_services.py @@ -0,0 +1,94 @@ +import asyncio + +# Use standard logging in this module. +import logging + +# Base class for cli subcommands. +from chimpflow_cli.subcommands.base import Base + +# Context creator. +from chimpflow_lib.contexts.contexts import Contexts + +logger = logging.getLogger() + +# Specifications of services we can start, and their short names for parse args. +services = { + "chimpflow_collector_specification": "collector", +} + + +# -------------------------------------------------------------- +class StartServices(Base): + """ + Start one or more services and keep them running until ^C. + """ + + def __init__(self, args, mainiac): + super().__init__(args) + + self.__mainiac = mainiac + + # ---------------------------------------------------------------------------------------- + def run(self): + """ """ + + # Run in asyncio event loop. + asyncio.run(self.__run_coro()) + + # ---------------------------------------------------------- + async def __run_coro(self): + """""" + + # Load the configuration. + chimpflow_multiconf = self.get_multiconf() + + # Let the multiconf know about any mpqueue logging. + # chimpflow_multiconf.set_logging_mpqueue(self.__mainiac.mpqueue) + + context_configuration = await chimpflow_multiconf.load() + + if "all" in self._args.service_names: + selected_service_names = [] + for _, service_name in services.items(): + selected_service_names.append(service_name) + else: + selected_service_names = self._args.service_names + + # Change all start_as to None, except the one we are starting. + for keyword, specification in context_configuration.items(): + if keyword in services: + service_name = services[keyword] + if service_name in selected_service_names: + specification["context"] = {"start_as": "process"} + + # Make a services context from the configuration. + context = Contexts().build_object(context_configuration) + + # Open the context (servers and clients). + async with context: + + try: + # Stay up until all processes are dead. + # TODO: Use asyncio wait or sentinel for all started processes to be dead. + while True: + await asyncio.sleep(1.0) + if not await context.is_any_process_alive(): + logger.info("all processes have shutdown") + break + except KeyboardInterrupt: + pass + + # ---------------------------------------------------------- + def add_arguments(parser): + + services_list = list(services.values()) + + parser.add_argument( + help='"all" or any combination of {%s}' % (" ".join(services_list)), + nargs="+", + type=str, + metavar="service name(s)", + dest="service_names", + ) + + return parser diff --git a/src/chimpflow_cli/version.py b/src/chimpflow_cli/version.py new file mode 100644 index 0000000..19e3579 --- /dev/null +++ b/src/chimpflow_cli/version.py @@ -0,0 +1,36 @@ +import logging + +import dls_mainiac_lib.version + +import chimpflow_lib.version + +from . import __version__ + +logger = logging.getLogger(__name__) + + +# ---------------------------------------------------------- +def version(): + """ + Current version. + """ + + return __version__ + + +# ---------------------------------------------------------- +def meta(given_meta=None): + """ + Returns version information as a dict. + Adds version information to given meta, if any. + """ + s = {} + s["chimpflow_cli"] = version() + s.update(chimpflow_lib.version.meta()) + s.update(dls_mainiac_lib.version.meta()) + + if given_meta is not None: + given_meta.update(s) + else: + given_meta = s + return given_meta diff --git a/src/chimpflow_lib/__init__.py b/src/chimpflow_lib/__init__.py new file mode 100644 index 0000000..67fe80b --- /dev/null +++ b/src/chimpflow_lib/__init__.py @@ -0,0 +1,6 @@ +from importlib.metadata import version + +__version__ = version("chimpflow") +del version + +__all__ = ["__version__"] diff --git a/src/chimpflow_lib/__main__.py b/src/chimpflow_lib/__main__.py new file mode 100644 index 0000000..ba9f986 --- /dev/null +++ b/src/chimpflow_lib/__main__.py @@ -0,0 +1,34 @@ +import json +from argparse import ArgumentParser + +from chimpflow_lib.version import meta, version + + +def get_parser(): + parser = ArgumentParser( + description="XChem GUI for manually targeting drop points for the Echo dispenser." + ) + parser.add_argument( + "--version", + action="version", + version=version(), + help="Print version string.", + ) + parser.add_argument( + "--version-json", + action="store_true", + help="Print version stack in json.", + ) + return parser + + +def main(args=None): + parser = get_parser() + args = parser.parse_args(args) + + if args.version_json: + print(json.dumps(meta(), indent=4)) + + +if __name__ == "__main__": + main() diff --git a/src/chimpflow_lib/base_aiohttp.py b/src/chimpflow_lib/base_aiohttp.py new file mode 100644 index 0000000..13d19e3 --- /dev/null +++ b/src/chimpflow_lib/base_aiohttp.py @@ -0,0 +1,14 @@ +import logging + +from dls_servbase_lib.base_aiohttp import BaseAiohttp as DlsServbaseBaseAiohttp + +logger = logging.getLogger(__name__) + + +# ------------------------------------------------------------------------------------------ +class BaseAiohttp(DlsServbaseBaseAiohttp): + """ + Object representing a a process which receives requests from aiohttp. + """ + + pass diff --git a/src/chimpflow_lib/collectors/__init__.py b/src/chimpflow_lib/collectors/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/chimpflow_lib/collectors/aiohttp.py b/src/chimpflow_lib/collectors/aiohttp.py new file mode 100644 index 0000000..85329c5 --- /dev/null +++ b/src/chimpflow_lib/collectors/aiohttp.py @@ -0,0 +1,173 @@ +import logging +import multiprocessing +import threading + +# Utilities. +from dls_utilpack.callsign import callsign +from dls_utilpack.require import require + +# Base class which maps flask tasks to methods. +from dls_utilpack.thing import Thing + +# Base class for an aiohttp server. +from chimpflow_lib.base_aiohttp import BaseAiohttp + +# Factory to make a Collector. +from chimpflow_lib.collectors.collectors import Collectors + +# Collector protocolj things. +from chimpflow_lib.collectors.constants import Commands, Keywords + +logger = logging.getLogger(__name__) + +thing_type = "chimpflow_lib.collectors.aiohttp" + + +# ------------------------------------------------------------------------------------------ +class Aiohttp(Thing, BaseAiohttp): + """ + Object representing a collector which receives tasks from aiohttp. + """ + + # ---------------------------------------------------------------------------------------- + def __init__(self, specification=None, predefined_uuid=None): + Thing.__init__(self, thing_type, specification, predefined_uuid=predefined_uuid) + BaseAiohttp.__init__( + self, specification["type_specific_tbd"]["aiohttp_specification"] + ) + + self.__actual_collector = None + + # ---------------------------------------------------------------------------------------- + def callsign(self): + """""" + return "%s %s" % ("Collector.Aiohttp", BaseAiohttp.callsign(self)) + + # ---------------------------------------------------------------------------------------- + def activate_process(self): + """""" + + try: + multiprocessing.current_process().name = "collector" + + self.activate_process_base() + + except Exception as exception: + logger.exception("exception in collector process", exc_info=exception) + + # ---------------------------------------------------------------------------------------- + def activate_thread(self, loop): + """ + Called from inside a newly created thread. + """ + + try: + threading.current_thread().name = "collector" + + self.activate_thread_base(loop) + + except Exception as exception: + logger.exception( + f"unable to start {callsign(self)} thread", exc_info=exception + ) + + # ---------------------------------------------------------------------------------------- + async def activate_coro(self): + """""" + try: + # Build a local collector for our back-end. + self.__actual_collector = Collectors().build_object( + self.specification()["type_specific_tbd"][ + "actual_collector_specification" + ] + ) + + # Get the local implementation started. + await self.__actual_collector.activate() + + # ---------------------------------------------- + await BaseAiohttp.activate_coro_base(self) + + except Exception as exception: + raise RuntimeError( + "exception while starting collector server" + ) from exception + + # ---------------------------------------------------------------------------------------- + async def direct_shutdown(self): + """""" + + # ---------------------------------------------- + if self.__actual_collector is not None: + # Disconnect our local dataface connection, i.e. the one which holds the database connection. + await self.__actual_collector.deactivate() + + # ---------------------------------------------- + # Let the base class stop the server listener. + await self.base_direct_shutdown() + + # ---------------------------------------------------------------------------------------- + # From http client, request server to submit task for execution. + + # async def fire(self, message): + # """""" + # # Build a local collector for our client side. + # actual_collector = Collectors().build_object( + # self.specification()["type_specific_tbd"][ + # "actual_collector_specification" + # ] + # ) + + # logger.debug(f"[DMOTF] firing actual {callsign(actual_collector)}") + # await actual_collector.fire(message) + # logger.debug("[DMOTF] firing complete") + + # ---------------------------------------------------------------------------------------- + async def fire(self, message): + """""" + return await self.__send_protocolj("fire", message) + + # ---------------------------------------------------------------------------------------- + async def __send_protocolj(self, function, *args, **kwargs): + """""" + + return await self.client_protocolj( + { + Keywords.COMMAND: Commands.EXECUTE, + Keywords.PAYLOAD: { + "function": function, + "args": args, + "kwargs": kwargs, + }, + }, + ) + + # ---------------------------------------------------------------------------------------- + async def __do_locally(self, function, args, kwargs): + """""" + + # logger.info(describe("function", function)) + # logger.info(describe("args", args)) + # logger.info(describe("kwargs", kwargs)) + + function = getattr(self.__actual_collector, function) + + response = await function(*args, **kwargs) + + return response + + # ---------------------------------------------------------------------------------------- + async def dispatch(self, request_dict, opaque): + """""" + + command = require("request json", request_dict, Keywords.COMMAND) + + if command == Commands.EXECUTE: + payload = require("request json", request_dict, Keywords.PAYLOAD) + response = await self.__do_locally( + payload["function"], payload["args"], payload["kwargs"] + ) + else: + raise RuntimeError("invalid command %s" % (command)) + + return response diff --git a/src/chimpflow_lib/collectors/base.py b/src/chimpflow_lib/collectors/base.py new file mode 100644 index 0000000..5edd957 --- /dev/null +++ b/src/chimpflow_lib/collectors/base.py @@ -0,0 +1,52 @@ +import inspect +import logging + +from dls_utilpack.import_class import import_class + +# Base class which maps flask tasks to methods. +from dls_utilpack.thing import Thing + +logger = logging.getLogger(__name__) + + +# ------------------------------------------------------------------------------------------ +class Base(Thing): + """ + Object representing a collector which receives triggers from aiohttp. + """ + + # ---------------------------------------------------------------------------------------- + def __init__(self, thing_type, specification=None, predefined_uuid=None): + Thing.__init__(self, thing_type, specification, predefined_uuid=predefined_uuid) + + # ---------------------------------------------------------------------------------------- + async def job_was_deleted(self, news_payload): + pass + + # ------------------------------------------------------------------------------------------ + async def trigger(self, workflow_filename_classname, **workflow_constructor_kwargs): + """Handle request to submit task for execution.""" + + logger.debug(f"[DMOTF] triggering workflow from {workflow_filename_classname}") + + class_object = import_class(workflow_filename_classname) + + logger.debug("[DMOTF] constructing") + + # Construct the workflow instance. + workflow = class_object(**workflow_constructor_kwargs) + + logger.debug("[DMOTF] building") + + # Let the workflow build itself. + if inspect.iscoroutinefunction(workflow.build): + await workflow.build() + else: + workflow.build() + + logger.debug("[DMOTF] starting") + + # Commit workflow to the database and enable it for scheduling. + await workflow.start() + + logger.debug("[DMOTF] started") diff --git a/src/chimpflow_lib/collectors/collectors.py b/src/chimpflow_lib/collectors/collectors.py new file mode 100644 index 0000000..a86f41d --- /dev/null +++ b/src/chimpflow_lib/collectors/collectors.py @@ -0,0 +1,79 @@ +# Use standard logging in this module. +import logging + +# Class managing list of things. +from dls_utilpack.things import Things + +# Exceptions. +from chimpflow_lib.exceptions import NotFound + +logger = logging.getLogger(__name__) + +# ----------------------------------------------------------------------------------------- +__default_collector = None + + +def collectors_set_default(collector): + global __default_collector + __default_collector = collector + + +def collectors_get_default(): + global __default_collector + if __default_collector is None: + raise RuntimeError("collectors_get_default instance is None") + return __default_collector + + +class Collectors(Things): + """ + List of available collectors. + """ + + # ---------------------------------------------------------------------------------------- + def __init__(self, name="collectors"): + Things.__init__(self, name) + + # ---------------------------------------------------------------------------------------- + def build_object(self, specification, predefined_uuid=None): + """""" + + collector_class = self.lookup_class(specification["type"]) + + try: + collector_object = collector_class( + specification, predefined_uuid=predefined_uuid + ) + except Exception as exception: + raise RuntimeError( + "unable to build collector object of class %s" + % (collector_class.__name__) + ) from exception + + return collector_object + + # ---------------------------------------------------------------------------------------- + def lookup_class(self, class_type): + """""" + + if class_type == "dls_chimpflow_lib.chimpflow_collectors.aiohttp": + from chimpflow_lib.collectors.aiohttp import Aiohttp + + return Aiohttp + + elif class_type == "dls_chimpflow_lib.chimpflow_collectors.manual": + from chimpflow_lib.collectors.manual import Manual + + return Manual + + elif class_type == "dls_chimpflow_lib.chimpflow_collectors.scrape_to_database": + from chimpflow_lib.collectors.scrape_to_database import ScrapeToDatabase + + return ScrapeToDatabase + + else: + try: + RuntimeClass = Things.lookup_class(self, class_type) + return RuntimeClass + except NotFound: + raise NotFound("unable to get collector class for %s" % (class_type)) diff --git a/src/chimpflow_lib/collectors/constants.py b/src/chimpflow_lib/collectors/constants.py new file mode 100644 index 0000000..0537eaf --- /dev/null +++ b/src/chimpflow_lib/collectors/constants.py @@ -0,0 +1,7 @@ +class Keywords: + COMMAND = "collectors::keywords::command" + PAYLOAD = "collectors::keywords::payload" + + +class Commands: + EXECUTE = "collectors::commands::execute" diff --git a/src/chimpflow_lib/collectors/context.py b/src/chimpflow_lib/collectors/context.py new file mode 100644 index 0000000..f60e332 --- /dev/null +++ b/src/chimpflow_lib/collectors/context.py @@ -0,0 +1,57 @@ +import logging + +# Things created in the context. +from chimpflow_lib.collectors.collectors import Collectors, collectors_set_default + +# Base class for an asyncio context +from chimpflow_lib.contexts.base import Base as ContextBase + +logger = logging.getLogger(__name__) + + +thing_type = "chimpflow_lib.collectors.context" + + +class Context(ContextBase): + """ + Asyncio context for a collector object. + On entering, it creates the object according to the specification (a dict). + If configured, it starts the server as a coroutine, thread or process. + On exiting, it commands the server to shut down and closes client connection. + + The enter and exit methods are exposed for use during testing. + """ + + # ---------------------------------------------------------------------------------------- + def __init__(self, specification): + ContextBase.__init__(self, thing_type, specification) + + # ---------------------------------------------------------------------------------------- + async def aenter(self): + """ """ + + # Build the object according to the specification. + self.server = Collectors().build_object(self.specification()) + + # If there is more than one collector, the last one defined will be the default. + collectors_set_default(self.server) + + if self.context_specification.get("start_as") == "coro": + await self.server.activate_coro() + + elif self.context_specification.get("start_as") == "thread": + await self.server.start_thread() + + elif self.context_specification.get("start_as") == "process": + await self.server.start_process() + + # ---------------------------------------------------------------------------------------- + async def aexit(self): + """ """ + + if self.server is not None: + # Put in request to shutdown the server. + await self.server.client_shutdown() + + # Clear the global variable. Important between pytests. + collectors_set_default(None) diff --git a/src/chimpflow_lib/collectors/manual.py b/src/chimpflow_lib/collectors/manual.py new file mode 100644 index 0000000..aa8cee3 --- /dev/null +++ b/src/chimpflow_lib/collectors/manual.py @@ -0,0 +1,44 @@ +import logging + +# Base class for collector instances. +from chimpflow_lib.collectors.base import Base as CollectorBase + +logger = logging.getLogger(__name__) + +thing_type = "chimpflow_lib.collectors.popener" + + +# ------------------------------------------------------------------------------------------ +class Manual(CollectorBase): + """ + Object representing a collector which launches a task using popen for onboard execution. + """ + + # ---------------------------------------------------------------------------------------- + def __init__(self, specification, predefined_uuid=None): + CollectorBase.__init__( + self, thing_type, specification, predefined_uuid=predefined_uuid + ) + + # ---------------------------------------------------------------------------------------- + async def activate(self): + """""" + pass + + # ---------------------------------------------------------------------------------------- + async def deactivate(self): + """""" + pass + + # ---------------------------------------------------------------------------------------- + async def fire( + self, + message, + ): + """""" + + await CollectorBase.trigger( + self, + message["workflow_filename_classname"], + **message["workflow_constructor_kwargs"], + ) diff --git a/src/chimpflow_lib/collectors/scrape_to_database.py b/src/chimpflow_lib/collectors/scrape_to_database.py new file mode 100644 index 0000000..759e021 --- /dev/null +++ b/src/chimpflow_lib/collectors/scrape_to_database.py @@ -0,0 +1,188 @@ +import asyncio +import glob +import logging +import os +import time + +from dls_utilpack.callsign import callsign +from dls_utilpack.explain import explain2 +from dls_utilpack.require import require +from PIL import Image +from xchembku_api.databases.constants import CrystalWellFieldnames, Tablenames + +# Global dataface. +from xchembku_api.datafaces.datafaces import xchembku_datafaces_get_default + +# Base class for collector instances. +from chimpflow_lib.collectors.base import Base as CollectorBase + +logger = logging.getLogger(__name__) + +thing_type = "chimpflow_lib.collectors.scrape_to_database" + + +# ------------------------------------------------------------------------------------------ +class ScrapeToDatabase(CollectorBase): + """ + Object representing a collector which launches a task using popen for onboard execution. + """ + + # ---------------------------------------------------------------------------------------- + def __init__(self, specification, predefined_uuid=None): + CollectorBase.__init__( + self, thing_type, specification, predefined_uuid=predefined_uuid + ) + + s = f"{callsign(self)} specification", self.specification() + + type_specific_tbd = require(s, self.specification(), "type_specific_tbd") + self.__directories = require(s, type_specific_tbd, "directories") + self.__recursive = require(s, type_specific_tbd, "recursive") + + # We will use the dataface to discover previously processed files. + self.__dataface = xchembku_datafaces_get_default() + + # This flag will stop the ticking async task. + self.__keep_ticking = True + self.__tick_future = None + + # ---------------------------------------------------------------------------------------- + async def activate(self): + """""" + + # Get all the jobs ever done. + records = await self.__dataface.query( + f"SELECT {CrystalWellFieldnames.FILENAME} FROM {Tablenames.CRYSTAL_WELLS}" + ) + + # Make an initial list of the data labels associated with any job. + self.__known_filenames = [] + for record in records: + filename = record["filename"] + if filename not in self.__known_filenames: + self.__known_filenames.append(filename) + + logger.debug(f"activating with {len(records)} known filenames") + + # Poll periodically. + self.__tick_future = asyncio.get_event_loop().create_task(self.tick()) + + # ---------------------------------------------------------------------------------------- + async def deactivate(self): + """""" + + if self.__tick_future is not None: + # Set flag to stop the periodic ticking. + self.__keep_ticking = False + # Wait for the ticking to stop. + await self.__tick_future + + # ---------------------------------------------------------------------------------------- + async def tick(self): + """ + Periodic ticking to check for new work. + """ + + while self.__keep_ticking: + try: + await self.scrape() + except Exception as exception: + logger.error(explain2(exception, "scraping"), exc_info=exception) + await asyncio.sleep(1.0) + + # ---------------------------------------------------------------------------------------- + async def scrape(self): + """ + Scrape the directories looking for new files. + """ + + inserts = [] + + # TODO: Use asyncio tasks to parellize scraping directories. + for directory in self.__directories: + await self.scrape_directory(directory, inserts) + + # Flush any remaining inserts to the database. + await self.flush_inserts(inserts) + + # ---------------------------------------------------------------------------------------- + async def scrape_directory(self, directory, inserts): + """ + Scrape the directory looking for new files. + """ + + if not os.path.isdir(directory): + return + + t0 = time.time() + filenames = glob.glob(f"{directory}/**", recursive=self.__recursive) + t1 = time.time() + + new_count = 0 + for filename in filenames: + if os.path.isdir(filename): + continue + + if filename not in self.__known_filenames: + # TODO: Use transaction to batch the insertions of collected images. + await self.add_insert(filename, inserts) + self.__known_filenames.append(filename) + new_count = new_count + 1 + + if new_count > 0: + seconds = "%0.3f" % (t1 - t0) + logger.info( + f"from {directory} found {new_count} files" + f" among {len(filenames)} total files in {seconds} seconds" + ) + + # ---------------------------------------------------------------------------------------- + async def add_insert(self, filename, inserts): + """ + Add new insert for later flush. + """ + + if len(inserts) >= 1000: + await self.flush_inserts(inserts) + + error = None + target_position_x = None + target_position_y = None + try: + image = Image.open(filename) + + width, height = image.size + except Exception as exception: + error = str(exception) + + width = None + height = None + + # Add a new insert with the fields in the proper order. + inserts.append( + [filename, error, width, height, target_position_x, target_position_y] + ) + + # ---------------------------------------------------------------------------------------- + async def flush_inserts(self, inserts): + """ + Do the actual inserts by executemany. + """ + + if len(inserts) == 0: + return + + logger.debug(f"flushing {len(inserts)} inserts") + await self.__dataface.execute( + f"INSERT INTO {Tablenames.CRYSTAL_WELLS}" + f" ({CrystalWellFieldnames.FILENAME}," + f" {CrystalWellFieldnames.ERROR}," + f" {CrystalWellFieldnames.WIDTH}," + f" {CrystalWellFieldnames.HEIGHT}," + f" {CrystalWellFieldnames.TARGET_POSITION_X}," + f" {CrystalWellFieldnames.TARGET_POSITION_Y})" + " VALUES (?, ?, ?, ?, ?, ?)", + subs=inserts, + ) + + inserts.clear() diff --git a/src/chimpflow_lib/contexts/__init__.py b/src/chimpflow_lib/contexts/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/chimpflow_lib/contexts/base.py b/src/chimpflow_lib/contexts/base.py new file mode 100644 index 0000000..4d91bf1 --- /dev/null +++ b/src/chimpflow_lib/contexts/base.py @@ -0,0 +1,62 @@ +import logging + +# Utilities. +from dls_utilpack.callsign import callsign + +# Base class for a Thing which has a name and traits. +from dls_utilpack.thing import Thing + +logger = logging.getLogger(__name__) + + +class Base(Thing): + """ """ + + # ---------------------------------------------------------------------------------------- + def __init__(self, thing_type, specification=None, predefined_uuid=None): + Thing.__init__(self, thing_type, specification, predefined_uuid=predefined_uuid) + + # Reference to object which is a server, such as BaseAiohttp. + self.server = None + + self.context_specification = self.specification().get("context", {}) + + # ---------------------------------------------------------------------------------------- + async def is_process_started(self): + """""" + + if self.server is None: + raise RuntimeError(f"{callsign(self)} a process has not been defined") + + try: + return await self.server.is_process_started() + except Exception: + raise RuntimeError( + f"unable to determing process started for server {callsign(self.server)}" + ) + + # ---------------------------------------------------------------------------------------- + async def is_process_alive(self): + """""" + + if self.server is None: + raise RuntimeError(f"{callsign(self)} a process has not been defined") + + try: + return await self.server.is_process_alive() + except Exception: + raise RuntimeError( + f"unable to determing dead or alive for server {callsign(self.server)}" + ) + + # ---------------------------------------------------------------------------------------- + async def __aenter__(self): + """ """ + + await self.aenter() + + # ---------------------------------------------------------------------------------------- + async def __aexit__(self, type, value, traceback): + """ """ + + await self.aexit() diff --git a/src/chimpflow_lib/contexts/classic.py b/src/chimpflow_lib/contexts/classic.py new file mode 100644 index 0000000..80f3fe3 --- /dev/null +++ b/src/chimpflow_lib/contexts/classic.py @@ -0,0 +1,130 @@ +import logging + +# Contexts. +from dls_utilpack.callsign import callsign + +# Utilities. +from dls_utilpack.explain import explain + +from chimpflow_lib.collectors.context import Context as CollectorContext + +# Base class which maps flask requests to methods. +from chimpflow_lib.contexts.base import Base + +logger = logging.getLogger(__name__) + + +thing_type = "chimpflow_lib.chimpflow_contexts.classic" + + +class Classic(Base): + """ + Object representing all the possible contexts. + """ + + # ---------------------------------------------------------------------------------------- + def __init__(self, specification): + Base.__init__(self, thing_type, specification) + + self.__collector = None + + # ---------------------------------------------------------------------------------------- + async def __dead_or_alive(self, context, dead, alive): + + if context is not None: + try: + # A server was defined for this context? + if await context.is_process_started(): + if await context.is_process_alive(): + alive.append(context) + else: + dead.append(context) + except Exception: + raise RuntimeError( + f"unable to determine dead or alive for context {callsign(context)}" + ) + + # ---------------------------------------------------------------------------------------- + async def __dead_or_alive_all(self): + """ + Return two lists, one for dead and one for alive processes. + TODO: Parallelize context process alive/dead checking. + """ + + dead = [] + alive = [] + + await self.__dead_or_alive(self.__collector, dead, alive) + + return dead, alive + + # ---------------------------------------------------------------------------------------- + async def is_any_process_alive(self): + """ + Check all configured processes, return if any alive. + """ + dead, alive = await self.__dead_or_alive_all() + + # logger.debug(f"[PIDAL] {len(dead)} processes are dead, {len(alive)} are alive") + + return len(alive) > 0 + + # ---------------------------------------------------------------------------------------- + async def is_any_process_dead(self): + """ + Check all configured processes, return if any alive. + """ + dead, alive = await self.__dead_or_alive_all() + + return len(dead) > 0 + + # ---------------------------------------------------------------------------------------- + async def __aenter__(self): + """ """ + logger.debug(f"entering {callsign(self)} context") + + try: + + try: + specification = self.specification().get( + "chimpflow_collector_specification" + ) + if specification is not None: + logger.debug(f"at entering position {callsign(self)} COLLECTOR") + self.__collector = CollectorContext(specification) + await self.__collector.aenter() + except Exception as exception: + raise RuntimeError( + explain(exception, f"creating {callsign(self)} collector context") + ) + + except Exception as exception: + await self.aexit() + raise RuntimeError(explain(exception, f"entering {callsign(self)} context")) + + logger.debug(f"entered {callsign(self)} context") + + # ---------------------------------------------------------------------------------------- + async def __aexit__(self, type, value, traceback): + """ """ + + await self.aexit() + + # ---------------------------------------------------------------------------------------- + async def aexit(self): + """ """ + + logger.debug(f"exiting {callsign(self)} context") + + if self.__collector is not None: + logger.debug(f"at exiting position {callsign(self)} COLLECTOR") + try: + await self.__collector.aexit() + except Exception as exception: + logger.error( + explain(exception, f"exiting {callsign(self.__collector)} context"), + exc_info=exception, + ) + self.__collector = None + + logger.debug(f"exited {callsign(self)} context") diff --git a/src/chimpflow_lib/contexts/contexts.py b/src/chimpflow_lib/contexts/contexts.py new file mode 100644 index 0000000..006e659 --- /dev/null +++ b/src/chimpflow_lib/contexts/contexts.py @@ -0,0 +1,57 @@ +# Use standard logging in this module. +import logging + +import yaml + +# Class managing list of things. +from dls_utilpack.things import Things + +# Exceptions. +from chimpflow_lib.exceptions import NotFound + +logger = logging.getLogger(__name__) + +# ----------------------------------------------------------------------------------------- + + +class Contexts(Things): + """ + Context loader. + """ + + # ---------------------------------------------------------------------------------------- + def __init__(self, name=None): + Things.__init__(self, name) + + # ---------------------------------------------------------------------------------------- + def build_object(self, specification): + """""" + + if not isinstance(specification, dict): + with open(specification, "r") as yaml_stream: + specification = yaml.safe_load(yaml_stream) + + chimpflow_context_class = self.lookup_class(specification["type"]) + + try: + chimpflow_context_object = chimpflow_context_class(specification) + except Exception as exception: + raise RuntimeError( + "unable to build chimpflow_context object for type %s" + % (chimpflow_context_class) + ) from exception + + return chimpflow_context_object + + # ---------------------------------------------------------------------------------------- + def lookup_class(self, class_type): + """""" + + if class_type == "chimpflow_lib.chimpflow_contexts.classic": + from chimpflow_lib.contexts.classic import Classic + + return Classic + + raise NotFound( + "unable to get chimpflow_context class for type %s" % (class_type) + ) diff --git a/src/chimpflow_lib/envvar.py b/src/chimpflow_lib/envvar.py new file mode 100644 index 0000000..1ad3673 --- /dev/null +++ b/src/chimpflow_lib/envvar.py @@ -0,0 +1,37 @@ +import logging +import os + +logger = logging.getLogger(__name__) + + +class Envvar: + """Class which covers environment variables, with default values.""" + + CHIMPFLOW_CONFIGFILE = "CHIMPFLOW_CONFIGFILE" + XCHEM_BEFLOW_DATA = "XCHEM_BEFLOW_DATA" + XCHEM_BEFLOW_DLS_ROOT = "XCHEM_BEFLOW_DLS_ROOT" + BEAMLINE = "BEAMLINE" + VISIT_YEAR = "VISIT_YEAR" + VISIT = "VISIT" + + def __init__(self, name, **kwargs): + + environ = kwargs.get("environ") + + if environ is None: + environ = os.environ + + self.name = name + self.is_set = False + self.value = None + + if name in environ: + self.is_set = True + self.value = environ[name] + else: + if "default" in kwargs: + self.is_set = True + self.value = kwargs["default"] + else: + self.is_set = False + self.value = None diff --git a/src/chimpflow_lib/exceptions.py b/src/chimpflow_lib/exceptions.py new file mode 100644 index 0000000..0502b93 --- /dev/null +++ b/src/chimpflow_lib/exceptions.py @@ -0,0 +1,34 @@ +class NotFound(RuntimeError): + pass + + +# When something has no value set yet. +class NotSet(RuntimeError): + pass + + +class RemoteSubmitFailed(RuntimeError): + pass + + +class CapacityReached(RuntimeError): + pass + + +class DuplicateLabelException(RuntimeError): + pass + + +class DuplicateUuidException(RuntimeError): + pass + + +class ClientConnectorError(RuntimeError): + pass + + +class Factory: + def build(qualname): + if qualname == "echolocator_api.exceptions.CapacityReached": + return CapacityReached + return None diff --git a/src/chimpflow_lib/version.py b/src/chimpflow_lib/version.py new file mode 100644 index 0000000..af5821c --- /dev/null +++ b/src/chimpflow_lib/version.py @@ -0,0 +1,48 @@ +import logging + +import dls_mainiac_lib.version +import dls_normsql.version +import dls_servbase_lib.version +import dls_utilpack.version + +import chimpflow_lib + +logger = logging.getLogger(__name__) + + +# ---------------------------------------------------------- +def version(): + """ + Current version. + """ + + return chimpflow_lib.__version__ + + +# ---------------------------------------------------------- +def meta(given_meta=None): + """ + Returns version information as a dict. + Adds version information to given meta, if any. + """ + s = {} + s["chimpflow_lib"] = version() + + s.update(dls_servbase_lib.version.meta()) + s.update(dls_utilpack.version.meta()) + s.update(dls_mainiac_lib.version.meta()) + s.update(dls_normsql.version.meta()) + + try: + import setproctitle + + setproctitle.__version__ + s["setproctitle"] = setproctitle.__version__ + except Exception: + s["setproctitle"] = "unavailable" + + if given_meta is not None: + given_meta.update(s) + else: + given_meta = s + return given_meta diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/base.py b/tests/base.py new file mode 100644 index 0000000..c59587a --- /dev/null +++ b/tests/base.py @@ -0,0 +1,34 @@ +import asyncio +import logging +import multiprocessing + +import pytest + +logger = logging.getLogger(__name__) + + +class Base: + + # ---------------------------------------------------------------------------------------- + def main(self, constants, infrastrcuture_context, output_directory): + """ """ + + multiprocessing.current_process().name = "main" + + failure_message = None + try: + # Run main test in asyncio event loop. + asyncio.run( + self._main_coroutine( + constants, infrastrcuture_context, output_directory + ) + ) + + except Exception as exception: + logger.exception( + "unexpected exception in the test method", exc_info=exception + ) + failure_message = str(exception) + + if failure_message is not None: + pytest.fail(failure_message) diff --git a/tests/base_context_tester.py b/tests/base_context_tester.py new file mode 100644 index 0000000..27deca1 --- /dev/null +++ b/tests/base_context_tester.py @@ -0,0 +1,76 @@ +import asyncio +import logging +import multiprocessing +import os + +import pytest + +# Configurator. +from dls_multiconf_lib.constants import ThingTypes as MulticonfThingTypes +from dls_multiconf_lib.multiconfs import Multiconfs, multiconfs_set_default + +logger = logging.getLogger(__name__) + + +# ---------------------------------------------------------------------------------------- +class BaseContextTester: + """ + This is a base class for tests which use Context. + """ + + def __init__(self): + self.tasks_execution_outputs = {} + self.residuals = ["stdout.txt", "stderr.txt", "main.log"] + + def main(self, constants, configuration_file, output_directory): + """ + This is the main program which calls the test using asyncio. + """ + + # Save these for when the configuration is loaded. + self.__configuration_file = configuration_file + self.__output_directory = output_directory + + multiprocessing.current_process().name = "main" + + # self.__blocked_event = asyncio.Event() + + failure_message = None + try: + # Run main test in asyncio event loop. + asyncio.run(self._main_coroutine(constants, output_directory)) + + except Exception as exception: + logger.exception( + "unexpected exception in the test method", exc_info=exception + ) + failure_message = str(exception) + + if failure_message is not None: + pytest.fail(failure_message) + + # ---------------------------------------------------------------------------------------- + def get_multiconf(self): + + chimpflow_multiconf = Multiconfs().build_object( + { + "type": MulticonfThingTypes.YAML, + "type_specific_tbd": {"filename": self.__configuration_file}, + } + ) + + # For convenience, always do these replacement. + chimpflow_multiconf.substitute({"output_directory": self.__output_directory}) + + # Add various things from the environment into the multiconf. + chimpflow_multiconf.substitute( + { + "CWD": os.getcwd(), + "PYTHONPATH": os.environ.get("PYTHONPATH", "PYTHONPATH"), + } + ) + + # Set the global value of our multiconf which might be used in other modules. + multiconfs_set_default(chimpflow_multiconf) + + return chimpflow_multiconf diff --git a/tests/base_specification_tester.py b/tests/base_specification_tester.py new file mode 100644 index 0000000..929e975 --- /dev/null +++ b/tests/base_specification_tester.py @@ -0,0 +1,37 @@ +import asyncio +import logging +import multiprocessing + +import pytest + +logger = logging.getLogger(__name__) + + +# ---------------------------------------------------------------------------------------- +class BaseSpecificationTester: + """ + This is a base class for tests which take a specification. + """ + + def main(self, constants, specification, output_directory): + """ + This is the main program which calls the test using asyncio. + """ + + multiprocessing.current_process().name = "main" + + failure_message = None + try: + # Run main test in asyncio event loop. + asyncio.run( + self._main_coroutine(constants, specification, output_directory) + ) + + except Exception as exception: + logger.exception( + "unexpected exception in the test method", exc_info=exception + ) + failure_message = str(exception) + + if failure_message is not None: + pytest.fail(failure_message) diff --git a/tests/base_tester.py b/tests/base_tester.py new file mode 100644 index 0000000..8bbdc02 --- /dev/null +++ b/tests/base_tester.py @@ -0,0 +1,35 @@ +import asyncio +import logging +import multiprocessing + +import pytest + +logger = logging.getLogger(__name__) + + +# ---------------------------------------------------------------------------------------- +class BaseTester: + """ + This is a base class for simplest tests. + """ + + def main(self, constants, output_directory): + """ + This is the main program which calls the test using asyncio. + """ + + multiprocessing.current_process().name = "main" + + failure_message = None + try: + # Run main test in asyncio event loop. + asyncio.run(self._main_coroutine(constants, output_directory)) + + except Exception as exception: + logger.exception( + "unexpected exception in the test method", exc_info=exception + ) + failure_message = str(exception) + + if failure_message is not None: + pytest.fail(failure_message) diff --git a/tests/configurations/multiconf.yaml b/tests/configurations/multiconf.yaml new file mode 100644 index 0000000..695b336 --- /dev/null +++ b/tests/configurations/multiconf.yaml @@ -0,0 +1,67 @@ +type: chimpflow_lib.chimpflow_contexts.classic + +visit: + beamline: b29 + year: 2022 + visit: cy29757-3 + directory: /dls/b29/data/2022/cy29757-3 + # Format to make actual data_filename using data_label as token. + data_path_pattern: "/dls/b29/data/2022/cy29757-3/Merlin/{data_label}_data.mib" + +logging_settings: + console: + enabled: True + verbose: True + logfile: + enabled: True + directory: ${output_directory}/logfile.log + graypy: + enabled: False + host: 172.23.7.128 + port: 12201 + protocol: UDP + +# The external access bits. +external_access_bits: + dls_servbase_dataface_server: &DLS_BILLY_DATAFACE_SERVER http://*:27620 + dls_servbase_dataface_client: &DLS_BILLY_DATAFACE_CLIENT http://localhost:27620 + dataface_port: &DATAFACE_PORT 27621 + chimpflow_gui_server: &CHIMPFLOW_GUI_SERVER http://*:27622 + chimpflow_gui_client: &CHIMPFLOW_GUI_CLIENT http://127.0.0.1:27622 + +# The dls_servbase_dataface client/server composite. +dls_servbase_dataface_specification: &DLS_BILLY_DATAFACE_SPECIFICATION + type: "dls_servbase_lib.datafaces.aiohttp" + type_specific_tbd: + # The remote dataface server access. + aiohttp_specification: + server: *DLS_BILLY_DATAFACE_SERVER + client: *DLS_BILLY_DATAFACE_CLIENT + # The local implementation of the dataface. + actual_dataface_specification: + type: "dls_servbase_lib.datafaces.aiosqlite" + database: + type: "dls_servbase_lib.databases.aiosqlite" + filename: "${output_directory}/dls_servbase_dataface.sqlite" + log_level: "WARNING" + context: + start_as: process + +# The chimpflow_dataface client/server composite. +chimpflow_dataface_specification: &CHIMPFLOW_DATAFACE_SPECIFICATION + type: "chimpflow_lib.chimpflow_datafaces.aiohttp" + type_specific_tbd: + # The remote chimpflow_dataface server access. + aiohttp_specification: + server_host: "*" + client_host: "127.0.0.1" + port: *DATAFACE_PORT + # The local implementation of the chimpflow_dataface. + actual_chimpflow_dataface_specification: + type: "chimpflow_lib.chimpflow_datafaces.aiosqlite" + database: + type: "chimpflow_lib.chimpflow_databases.aiosqlite" + filename: "${output_directory}/chimpflow_dataface.sqlite" + log_level: "WARNING" + context: + start_as: process diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..5f5156a --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,139 @@ +import logging +import os +import shutil + +import pytest + +# Formatting of testing log messages. +from dls_logformatter.dls_logformatter import DlsLogformatter + +# Version of the package. +# from chimpflow_lib.version import meta as version_meta + +logger = logging.getLogger(__name__) + + +# -------------------------------------------------------------------------------- +@pytest.fixture(scope="session") +def constants(request): + + constants = {} + + yield constants + + +# -------------------------------------------------------------------------------- +@pytest.fixture(scope="session") +def logging_setup(): + # print("") + + formatter = DlsLogformatter(type="long") + logger = logging.StreamHandler() + logger.setFormatter(formatter) + logging.getLogger().addHandler(logger) + + # Log level for all modules. + logging.getLogger().setLevel("DEBUG") + + # Turn off noisy debug. + logging.getLogger("asyncio").setLevel("WARNING") + logging.getLogger("pika").setLevel("WARNING") + logging.getLogger("stomp").setLevel("WARNING") + logging.getLogger("luigi-interface").setLevel("WARNING") + logging.getLogger("luigi.chimpflow_scheduler").setLevel("INFO") + logging.getLogger("urllib3.connectionpool").setLevel("INFO") + + logging.getLogger("chimpflow_lib.things").setLevel("INFO") + + # Messages about starting and stopping services. + logging.getLogger("chimpflow_lib.base_aiohttp").setLevel("INFO") + + # All chimpflow database sql commands. + # logging.getLogger("chimpflow_lib.chimpflow_databases.aiosqlite").setLevel("INFO") + + logging.getLogger("chimpflow_lib.chimpflow_contexts.classic").setLevel("INFO") + logging.getLogger("chimpflow_lib.chimpflow_datafaces.context").setLevel("INFO") + + # Registering signal handler. + logging.getLogger("dls_siggy_lib.signal").setLevel("INFO") + + # Set filter on the ispyb logger to ignore the annoying NOTICE. + logging.getLogger("ispyb").addFilter(_ispyb_logging_filter()) + + # Cover the version. + # logger.info("\n%s", (json.dumps(version_meta(), indent=4))) + + yield None + + +# -------------------------------------------------------------------------------- +class _traitlets_logging_filter: + """ + Python logging filter to remove annoying traitlets messages. + These are not super useful to see all the time at the DEBUG level. + """ + + def filter(self, record): + + if record.levelno == 10: + if "jupyter_client/client.py" in record.pathname: + return 0 + if "jupyter_client/connect.py" in record.pathname: + return 0 + if "jupyter_client/manager.py" in record.pathname: + return 0 + if "jupyter_client/provisioning/factory.py" in record.pathname: + return 0 + if "nbclient/client.py" in record.pathname: + return 0 + if "nbconvert/exporters/templateexporter.py" in record.pathname: + return 0 + if "nbconvert/preprocessors/base.py" in record.pathname: + return 0 + if "/nbconvert/preprocessors/coalescestreams.py" in record.pathname: + return 0 + + # if "" in record.pathname: + # return 0 + + return 1 + + +# -------------------------------------------------------------------------------- +class _ispyb_logging_filter: + """ + Python logging filter to remove annoying traitlets messages. + These are not super useful to see all the time at the DEBUG level. + """ + + def filter(self, record): + + if record.msg.startswith( + "NOTICE: This code uses __future__ functionality in the ISPyB API." + ): + return 0 + + return 1 + + +# -------------------------------------------------------------------------------- +@pytest.fixture(scope="function") +def output_directory(request): + # TODO: Better way to get a newline in conftest after pytest emits the test class name. + print("") + + # Tmp directory which we can write into. + output_directory = "/tmp/%s/%s/%s" % ( + "/".join(__file__.split("/")[-3:-1]), + request.cls.__name__, + request.function.__name__, + ) + + # Tmp directory which we can write into. + if os.path.exists(output_directory): + shutil.rmtree(output_directory, ignore_errors=False, onerror=None) + os.makedirs(output_directory) + + # logger.debug("output_directory is %s" % (output_directory)) + + yield output_directory diff --git a/tests/example_images/1.jpg b/tests/example_images/1.jpg new file mode 100644 index 0000000..0d57a4a Binary files /dev/null and b/tests/example_images/1.jpg differ diff --git a/tests/example_images/2.jpg b/tests/example_images/2.jpg new file mode 100644 index 0000000..fa46521 Binary files /dev/null and b/tests/example_images/2.jpg differ diff --git a/tests/example_images/3.jpg b/tests/example_images/3.jpg new file mode 100644 index 0000000..da76a16 Binary files /dev/null and b/tests/example_images/3.jpg differ diff --git a/tests/example_images/4.png b/tests/example_images/4.png new file mode 100644 index 0000000..ac40503 Binary files /dev/null and b/tests/example_images/4.png differ diff --git a/tests/images/1.jpg b/tests/images/1.jpg new file mode 100644 index 0000000..0d57a4a Binary files /dev/null and b/tests/images/1.jpg differ diff --git a/tests/images/2.jpg b/tests/images/2.jpg new file mode 100644 index 0000000..fa46521 Binary files /dev/null and b/tests/images/2.jpg differ diff --git a/tests/images/3.jpg b/tests/images/3.jpg new file mode 100644 index 0000000..da76a16 Binary files /dev/null and b/tests/images/3.jpg differ diff --git a/tests/test_tutorial.py b/tests/test_tutorial.py new file mode 100644 index 0000000..deaaf76 --- /dev/null +++ b/tests/test_tutorial.py @@ -0,0 +1,99 @@ +import json +import logging +import os +import subprocess + +from xchembku_api.databases.constants import CrystalWellFieldnames, Tablenames +from xchembku_api.databases.constants import Types as XchembkuDatabaseTypes +from xchembku_api.datafaces.constants import Types as XchembkuDatafaceTypes +from xchembku_api.datafaces.context import Context as XchembkuContext + +# Context creator. +from chimpflow_lib.contexts.contexts import Contexts + +# Base class for the tester. +from tests.base_context_tester import BaseContextTester + +logger = logging.getLogger(__name__) + + +# ---------------------------------------------------------------------------------------- +class TestTutorial: + def test_dataface_multiconf(self, constants, logging_setup, output_directory): + """ """ + + configuration_file = "tests/configurations/multiconf.yaml" + TutorialTester().main(constants, configuration_file, output_directory) + + +# ---------------------------------------------------------------------------------------- +class TutorialTester(BaseContextTester): + """ + Class to test the tutorial. + """ + + async def _main_coroutine(self, constants, output_directory): + """ """ + + # Specify the xchembku client type to be a local database. + client_specification = { + "type": XchembkuDatafaceTypes.AIOSQLITE, + "database": { + "type": XchembkuDatabaseTypes.AIOSQLITE, + "filename": f"{output_directory}/database/xchembku_dataface.sqlite", + }, + } + + # Establish a context to the xchembku implementation. + async with XchembkuContext(client_specification) as client_interface: + # Write two records which will be read by the tutorial. + await client_interface.insert( + Tablenames.CRYSTAL_WELLS, + [ + { + CrystalWellFieldnames.FILENAME: "1.jpg", + CrystalWellFieldnames.TARGET_POSITION_X: 1, + CrystalWellFieldnames.TARGET_POSITION_Y: 2, + }, + { + CrystalWellFieldnames.FILENAME: "2.jpg", + CrystalWellFieldnames.TARGET_POSITION_X: 3, + CrystalWellFieldnames.TARGET_POSITION_Y: 4, + }, + ], + ) + + # Get the testing configuration. + chimpflow_multiconf = self.get_multiconf() + context_configuration = await chimpflow_multiconf.load() + + # Establish a context in which the chimpflow service is running. + chimpflow_context = Contexts().build_object(context_configuration) + async with chimpflow_context: + + # Run the tutorial and capture the output. + command = ["python", f"{os.getcwd()}/tests/tutorials/tutorial2.py"] + process = subprocess.run( + command, cwd=output_directory, capture_output=True + ) + if process.returncode != 0: + stderr = process.stderr.decode().replace("\\n", "\n") + logger.debug(f"stderr is:\n{stderr}") + assert process.returncode == 0 + + stdout = process.stdout.decode().replace("\\n", "\n") + logger.debug(f"stdout is:\n{stdout}") + try: + result = json.loads(stdout) + assert result["count"] == 1 + except Exception: + assert False, "stdout is not json" + + # Check the tutorial ran. + all_sql = f"SELECT * FROM {Tablenames.CRYSTAL_WELLS}" + records = await client_interface.query(all_sql) + + assert len(records) == 2 + assert records[0][CrystalWellFieldnames.FILENAME] == "1.jpg" + assert records[0][CrystalWellFieldnames.TARGET_POSITION_X] == 1 + assert records[0][CrystalWellFieldnames.TARGET_POSITION_Y] == 2 diff --git a/tests/tutorials/tutorial2.py b/tests/tutorials/tutorial2.py new file mode 100644 index 0000000..f9337a8 --- /dev/null +++ b/tests/tutorials/tutorial2.py @@ -0,0 +1,39 @@ +# This tutorial program shows you how to update an image record. + +import asyncio +import json + +from xchembku_api.databases.constants import CrystalWellFieldnames +from xchembku_api.databases.constants import Types as XchembkuDatabaseTypes +from xchembku_api.datafaces.constants import Types as XchembkuDatafaceTypes +from xchembku_api.datafaces.context import Context as XchembkuContext + +# Context creator. + +# Specify the xchembku client type to be a local database. +client_specification = { + "type": XchembkuDatafaceTypes.AIOSQLITE, + "database": { + "type": XchembkuDatabaseTypes.AIOSQLITE, + "filename": "database/xchembku_dataface.sqlite", + }, +} + + +async def tutorial(): + async with XchembkuContext(client_specification) as client_interface: + # This is the request which is sent to update the image. + request = { + "filename": ".*1.jpg", + CrystalWellFieldnames.CRYSTAL_PROBABILITY: 0.9, + } + + # Send the request to the server and get the response. + response = await client_interface.update_crystal_well(request) + + # Show the response, which is None if success, otherwise a dict with errors in it. + print(json.dumps(response, indent=4)) + + +if __name__ == "__main__": + asyncio.run(tutorial())