diff --git a/.github/workflows/build-test-publish.yml b/.github/workflows/build-test-publish.yml index d3d5a03551..b77ac0196e 100644 --- a/.github/workflows/build-test-publish.yml +++ b/.github/workflows/build-test-publish.yml @@ -49,18 +49,32 @@ jobs: AFNI_IMSAVE_WARNINGS: NO AFNI_TTATLAS_DATASET: /opt/afni/atlases AFNI_PLUGINPATH: /opt/afni/plugins + MARKS: ${{ matrix.marks }} + DEPENDS: ${{ matrix.dependencies }} strategy: - max-parallel: 6 + fail-fast: false matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] - marks: ["not slow"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + dependencies: ["latest", "pre"] + marks: ["fast"] include: - python-version: "3.9" - marks: "slow and not veryslow" + dependencies: "min" + marks: "fast" + - python-version: "3.9" + dependencies: "latest" + marks: "slow" - python-version: "3.12" + dependencies: "latest" marks: "veryslow" + exclude: + - python-version: "3.9" + dependencies: "pre" + - python-version: "3.10" + dependencies: "pre" steps: + - uses: actions/checkout@v4 - uses: actions/cache@v4 with: path: /var/lib/apt @@ -89,11 +103,15 @@ jobs: curl -O https://afni.nimh.nih.gov/pub/dist/bin/misc/@update.afni.binaries && \ tcsh @update.afni.binaries -package linux_ubuntu_16_64 -bindir ${AFNI_HOME} fi + ls -l ${AFNI_HOME} + echo "PATH=${AFNI_HOME}:$PATH" | tee -a $GITHUB_ENV - name: Git settings (pacify DataLad) run: | git config --global user.name 'NiPreps Bot' git config --global user.email 'nipreps@gmail.com' + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v4 - name: Set up Python ${{ matrix.python-version }} uses: conda-incubator/setup-miniconda@v3 with: @@ -110,22 +128,14 @@ jobs: ~/conda_pkgs_dir /home/runner/.cache/pip key: python-${{ matrix.python-version }}-${{ env.CACHE_NUM }} - restore-keys: | - python-${{ matrix.python-version }}-${{ env.CACHE_NUM }} - name: Install DataLad run: | - conda install git-annex=*=alldep* pip - pip install datalad datalad-osf + conda install git-annex=*=alldep* + uv tool install datalad --with=datalad-next --with=datalad-osf + uv tool install datalad-osf --with=datalad-next - name: Install fsl and ANTs run: | conda install fsl-fugue fsl-topup ants - - uses: actions/checkout@v4 - - name: Install dependencies - timeout-minutes: 5 - run: | - pip install .[tests] - - - uses: actions/cache@v4 with: path: ~/.cache/templateflow @@ -134,7 +144,7 @@ jobs: tf-cache- - name: Get TemplateFlow's required objects run: | - python tools/cache_templateflow.py + uv run tools/cache_templateflow.py - uses: actions/cache@v4 with: @@ -198,18 +208,16 @@ jobs: mkdir -p $( dirname $FS_LICENSE ) echo "b2VzdGViYW5Ac3RhbmZvcmQuZWR1CjMwNzU2CiAqQ1MzYkJ5VXMxdTVNCiBGU2kvUGJsejJxR1V3Cg==" | base64 -d > $FS_LICENSE - - name: Run pytest with coverage + - name: Install tox run: | - export PATH=${AFNI_HOME}:$PATH - export FSLDIR=${CONDA_PREFIX} - pytest -v --cov sdcflows --cov-report xml:cov.xml --doctest-modules -n auto sdcflows \ - --durations=20 --durations-min=10 -m "$MARKS" - env: - MARKS: ${{ matrix.marks }} + uv tool install tox --with=tox-uv --with=tox-gh-actions + - name: Show tox config + run: tox c + - name: Run tox + run: tox -v --exit-and-dump-after 1200 - - uses: codecov/codecov-action@v4 + - uses: codecov/codecov-action@v5 with: - file: cov.xml token: ${{ secrets.CODECOV_TOKEN }} if: ${{ always() }} diff --git a/pyproject.toml b/pyproject.toml index fa54d89b70..c39f549eb0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,23 +17,24 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] license = "Apache-2.0" requires-python = ">=3.9" dependencies = [ + "acres >= 0.2.0", "attrs >= 20.1.0", - "nibabel >=3.1.0", - "nipype >=1.8.5,<2.0", - "traits <6.4", + "nibabel >= 3.0", + "nipype >= 1.8.5", "migas >= 0.4.0", "niworkflows >= 1.7.0", - "nitransforms >= 23.0.1", - "numpy >= 1.21.0", + "nitransforms >= 24.1.0", + "numpy >= 1.22", "pybids >= 0.16.4", "scikit-image >= 0.18", "scipy >= 1.8.1", - "templateflow", - "toml", + "templateflow >= 23.1", + "toml >= 0.10", ] dynamic = ["version"] @@ -49,22 +50,13 @@ doc = [ "importlib_resources", "ipykernel", "ipython", - "matplotlib >= 2.2.0", "nbsphinx", - "nibabel", - "nipype >= 1.5.1", - "niworkflows >= 1.10.0", - "numpy", - "packaging", "pandoc", "pydot >= 1.2.3", "pydotplus", - "scipy", "sphinx >= 7.2.2", "sphinx-argparse", "sphinxcontrib-apidoc", - "templateflow", - "traits < 6.4" ] mem = [ @@ -79,11 +71,11 @@ dev = [ ] test = [ - "coverage", - "pytest", - "pytest-cov", + "coverage[toml] >=5.2.1", + "pytest >= 6", + "pytest-cov >= 2.11", "pytest-env", - "pytest-xdist" + "pytest-xdist >= 2.5", ] # Aliases @@ -159,8 +151,22 @@ per-file-ignores = [ ] [tool.pytest.ini_options] +minversion = "6" +testpaths = ["sdcflows"] +log_cli_level = "INFO" +xfail_strict = true norecursedirs = [".git"] -addopts = "-svx --doctest-modules --strict-markers" +addopts = [ + "-svx", + "-ra", + "--strict-config", + "--strict-markers", + "--doctest-modules", + # Config pytest-cov + "--cov=sdcflows", + "--cov-report=xml", + "--cov-config=pyproject.toml", +] doctest_optionflags = "ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS" env = "PYTHONHASHSEED=0" filterwarnings = ["ignore::DeprecationWarning"] @@ -173,7 +179,6 @@ markers = [ [tool.coverage.run] branch = true -concurrency = 'multiprocessing' omit = [ '*/tests/*', '*/__init__.py', diff --git a/sdcflows/data/__init__.py b/sdcflows/data/__init__.py index 99e82112f3..041c9366ab 100644 --- a/sdcflows/data/__init__.py +++ b/sdcflows/data/__init__.py @@ -10,173 +10,6 @@ .. autoclass:: Loader """ -from __future__ import annotations +from acres import Loader -import atexit -import os -from contextlib import AbstractContextManager, ExitStack -from functools import cached_property -from pathlib import Path -from types import ModuleType -from typing import Union - -try: - from functools import cache -except ImportError: # PY38 - from functools import lru_cache as cache - -try: # Prefer backport to leave consistency to dependency spec - from importlib_resources import as_file, files -except ImportError: - from importlib.resources import as_file, files # type: ignore - -try: # Prefer stdlib so Sphinx can link to authoritative documentation - from importlib.resources.abc import Traversable -except ImportError: - from importlib_resources.abc import Traversable - -__all__ = ["load"] - - -class Loader: - """A loader for package files relative to a module - - This class wraps :mod:`importlib.resources` to provide a getter - function with an interpreter-lifetime scope. For typical packages - it simply passes through filesystem paths as :class:`~pathlib.Path` - objects. For zipped distributions, it will unpack the files into - a temporary directory that is cleaned up on interpreter exit. - - This loader accepts a fully-qualified module name or a module - object. - - Expected usage:: - - '''Data package - - .. autofunction:: load_data - - .. automethod:: load_data.readable - - .. automethod:: load_data.as_path - - .. automethod:: load_data.cached - ''' - - from sdcflows.data import Loader - - load_data = Loader(__package__) - - :class:`~Loader` objects implement the :func:`callable` interface - and generate a docstring, and are intended to be treated and documented - as functions. - - For greater flexibility and improved readability over the ``importlib.resources`` - interface, explicit methods are provided to access resources. - - +---------------+----------------+------------------+ - | On-filesystem | Lifetime | Method | - +---------------+----------------+------------------+ - | `True` | Interpreter | :meth:`cached` | - +---------------+----------------+------------------+ - | `True` | `with` context | :meth:`as_path` | - +---------------+----------------+------------------+ - | `False` | n/a | :meth:`readable` | - +---------------+----------------+------------------+ - - It is also possible to use ``Loader`` directly:: - - from sdcflows.data import Loader - - Loader(other_package).readable('data/resource.ext').read_text() - - with Loader(other_package).as_path('data') as pkgdata: - # Call function that requires full Path implementation - func(pkgdata) - - # contrast to - - from importlib_resources import files, as_file - - files(other_package).joinpath('data/resource.ext').read_text() - - with as_file(files(other_package) / 'data') as pkgdata: - func(pkgdata) - - .. automethod:: readable - - .. automethod:: as_path - - .. automethod:: cached - """ - - def __init__(self, anchor: Union[str, ModuleType]): - self._anchor = anchor - self.files = files(anchor) - self.exit_stack = ExitStack() - atexit.register(self.exit_stack.close) - # Allow class to have a different docstring from instances - self.__doc__ = self._doc - - @cached_property - def _doc(self): - """Construct docstring for instances - - Lists the public top-level paths inside the location, where - non-public means has a `.` or `_` prefix or is a 'tests' - directory. - """ - top_level = sorted( - os.path.relpath(p, self.files) + "/"[: p.is_dir()] - for p in self.files.iterdir() - if p.name[0] not in (".", "_") and p.name != "tests" - ) - doclines = [ - f"Load package files relative to ``{self._anchor}``.", - "", - "This package contains the following (top-level) files/directories:", - "", - *(f"* ``{path}``" for path in top_level), - ] - - return "\n".join(doclines) - - def readable(self, *segments) -> Traversable: - """Provide read access to a resource through a Path-like interface. - - This file may or may not exist on the filesystem, and may be - efficiently used for read operations, including directory traversal. - - This result is not cached or copied to the filesystem in cases where - that would be necessary. - """ - return self.files.joinpath(*segments) - - def as_path(self, *segments) -> AbstractContextManager[Path]: - """Ensure data is available as a :class:`~pathlib.Path`. - - This method generates a context manager that yields a Path when - entered. - - This result is not cached, and any temporary files that are created - are deleted when the context is exited. - """ - return as_file(self.files.joinpath(*segments)) - - @cache - def cached(self, *segments) -> Path: - """Ensure data is available as a :class:`~pathlib.Path`. - - Any temporary files that are created remain available throughout - the duration of the program, and are deleted when Python exits. - - Results are cached so that multiple calls do not unpack the same - data multiple times, but the cache is sensitive to the specific - argument(s) passed. - """ - return self.exit_stack.enter_context(as_file(self.files.joinpath(*segments))) - - __call__ = cached - - -load = Loader(__package__) +load = Loader(__spec__.name) diff --git a/sdcflows/interfaces/utils.py b/sdcflows/interfaces/utils.py index 6696c786d7..a50873737e 100644 --- a/sdcflows/interfaces/utils.py +++ b/sdcflows/interfaces/utils.py @@ -50,7 +50,7 @@ class _FlattenInputSpec(BaseInterfaceInputSpec): desc="list of input data", ) in_meta = InputMultiObject( - traits.DictStrAny, + traits.Dict(traits.Str), mandatory=True, desc="list of metadata", ) @@ -61,12 +61,12 @@ class _FlattenOutputSpec(TraitedSpec): out_list = OutputMultiObject( traits.Tuple( File(exists=True), - traits.DictStrAny, + traits.Dict(traits.Str), ), desc="list of output files", ) out_data = OutputMultiObject(File(exists=True)) - out_meta = OutputMultiObject(traits.DictStrAny) + out_meta = OutputMultiObject(traits.Dict(traits.Str)) class Flatten(SimpleInterface): diff --git a/tools/cache_templateflow.py b/tools/cache_templateflow.py index 4397b2e3e9..814b56169d 100644 --- a/tools/cache_templateflow.py +++ b/tools/cache_templateflow.py @@ -1,4 +1,10 @@ #!/usr/bin/env python3 +# /// script +# requires-python = ">=3.9" +# dependencies = [ +# "templateflow", +# ] +# /// from templateflow import api as tfapi tfapi.get("MNI152NLin2009cAsym", resolution=2, desc="brain", suffix="mask") diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000000..77d4df3b13 --- /dev/null +++ b/tox.ini @@ -0,0 +1,146 @@ +[tox] +requires = + tox>=4 + tox-uv +envlist = + py3{9,10,11,12,13}-latest-{fast,slow,veryslow} + py39-min-fast + py3{11,12,13}-pre-{fast,slow,veryslow} + style + spellcheck +skip_missing_interpreters = true + +# Configuration that allows us to split tests across GitHub runners effectively +[gh-actions] +python = + 3.9: py39 + 3.10: py310 + 3.11: py311 + 3.12: py312 + 3.13: py313 + +[gh-actions:env] +DEPENDS = + min: min + latest: latest + pre: pre + +MARKS = + fast: fast + slow: slow + veryslow: veryslow + +[testenv] +description = Pytest with coverage +labels = test +pip_pre = + pre: true +pass_env = + TEMPLATEFLOW_HOME + # Freesurfer variables searched for + FREESURFER_HOME + SUBJECTS_DIR + FS_LICENSE + # FSL + FSLOUTPUTTYPE + FSLMULTIFILEQUIT + # AFNI + AFNI_HOME + AFNI_MODELPATH + AFNI_IMSAVE_WARNINGS + AFNI_TTATLAS_DATASET + AFNI_PLUGINPATH + # CI variables + TEST_DATA_HOME + TEST_OUTPUT_DIR + TEST_WORK_DIR + FMRIPREP_REGRESSION_SOURCE + CACHED_WORK_DIRECTORY + # CircleCI-specific + CIRCLE_NPROCS + SAVE_CIRCLE_ARTIFACTS + # getpass.getuser() sources for Windows: + LOGNAME + USER + LNAME + USERNAME + # Pass user color preferences through + PY_COLORS + FORCE_COLOR + NO_COLOR + CLICOLOR + CLICOLOR_FORCE + PYTHON_GIL +deps = + # Waiting on a release + py313: traits @ git+https://github.com/enthought/traits.git@10954eb +extras = tests +setenv = + pre: PIP_EXTRA_INDEX_URL=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple + pre: UV_INDEX=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple + pre: UV_INDEX_STRATEGY=unsafe-best-match +uv_resolution = + min: lowest-direct + +commands = + pytest --durations=20 --durations-min=1.0 --cov-report term-missing \ + fast: -m "not slow" \ + slow: -m "slow and not veryslow" \ + veryslow: -m "veryslow" \ + {posargs:-n auto} + +[testenv:style] +description = Check our style guide +labels = check +deps = + ruff +skip_install = true +commands = + ruff check --diff + ruff format --diff + +[testenv:style-fix] +description = Auto-apply style guide to the extent possible +labels = pre-release +deps = + ruff +skip_install = true +commands = + ruff check --fix + ruff format + ruff check --select ISC001 + +[testenv:spellcheck] +description = Check spelling +labels = check +deps = + codespell[toml] +skip_install = true +commands = + codespell . {posargs} + +[testenv:build{,-strict}] +labels = + check + pre-release +deps = + build + twine +skip_install = true +set_env = + # Ignore specific known warnings: + # https://github.com/pypa/pip/issues/11684 + # https://github.com/pypa/pip/issues/12243 + strict: PYTHONWARNINGS=error,once:pkg_resources is deprecated as an API.:DeprecationWarning:pip._internal.metadata.importlib._envs,once:Unimplemented abstract methods {'locate_file'}:DeprecationWarning:pip._internal.metadata.importlib._dists +commands = + python -m build + python -m twine check dist/* + +[testenv:publish] +depends = build +labels = release +deps = + twine +skip_install = true +commands = + python -m twine upload dist/*