Skip to content

Commit

Permalink
Merge pull request #472 from effigies/tox-uv
Browse files Browse the repository at this point in the history
chore: Update dependencies, transition to tox-uv
  • Loading branch information
effigies authored Dec 18, 2024
2 parents 656a51d + 9eb9cd6 commit d81518e
Show file tree
Hide file tree
Showing 6 changed files with 217 additions and 219 deletions.
58 changes: 33 additions & 25 deletions .github/workflows/build-test-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,18 +49,32 @@ jobs:
AFNI_IMSAVE_WARNINGS: NO
AFNI_TTATLAS_DATASET: /opt/afni/atlases
AFNI_PLUGINPATH: /opt/afni/plugins
MARKS: ${{ matrix.marks }}
DEPENDS: ${{ matrix.dependencies }}
strategy:
max-parallel: 6
fail-fast: false
matrix:
python-version: ["3.9", "3.10", "3.11", "3.12"]
marks: ["not slow"]
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
dependencies: ["latest", "pre"]
marks: ["fast"]
include:
- python-version: "3.9"
marks: "slow and not veryslow"
dependencies: "min"
marks: "fast"
- python-version: "3.9"
dependencies: "latest"
marks: "slow"
- python-version: "3.12"
dependencies: "latest"
marks: "veryslow"
exclude:
- python-version: "3.9"
dependencies: "pre"
- python-version: "3.10"
dependencies: "pre"

steps:
- uses: actions/checkout@v4
- uses: actions/cache@v4
with:
path: /var/lib/apt
Expand Down Expand Up @@ -89,11 +103,15 @@ jobs:
curl -O https://afni.nimh.nih.gov/pub/dist/bin/misc/@update.afni.binaries && \
tcsh @update.afni.binaries -package linux_ubuntu_16_64 -bindir ${AFNI_HOME}
fi
ls -l ${AFNI_HOME}
echo "PATH=${AFNI_HOME}:$PATH" | tee -a $GITHUB_ENV
- name: Git settings (pacify DataLad)
run: |
git config --global user.name 'NiPreps Bot'
git config --global user.email '[email protected]'
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v4
- name: Set up Python ${{ matrix.python-version }}
uses: conda-incubator/setup-miniconda@v3
with:
Expand All @@ -110,22 +128,14 @@ jobs:
~/conda_pkgs_dir
/home/runner/.cache/pip
key: python-${{ matrix.python-version }}-${{ env.CACHE_NUM }}
restore-keys: |
python-${{ matrix.python-version }}-${{ env.CACHE_NUM }}
- name: Install DataLad
run: |
conda install git-annex=*=alldep* pip
pip install datalad datalad-osf
conda install git-annex=*=alldep*
uv tool install datalad --with=datalad-next --with=datalad-osf
uv tool install datalad-osf --with=datalad-next
- name: Install fsl and ANTs
run: |
conda install fsl-fugue fsl-topup ants
- uses: actions/checkout@v4
- name: Install dependencies
timeout-minutes: 5
run: |
pip install .[tests]
- uses: actions/cache@v4
with:
path: ~/.cache/templateflow
Expand All @@ -134,7 +144,7 @@ jobs:
tf-cache-
- name: Get TemplateFlow's required objects
run: |
python tools/cache_templateflow.py
uv run tools/cache_templateflow.py
- uses: actions/cache@v4
with:
Expand Down Expand Up @@ -198,18 +208,16 @@ jobs:
mkdir -p $( dirname $FS_LICENSE )
echo "b2VzdGViYW5Ac3RhbmZvcmQuZWR1CjMwNzU2CiAqQ1MzYkJ5VXMxdTVNCiBGU2kvUGJsejJxR1V3Cg==" | base64 -d > $FS_LICENSE
- name: Run pytest with coverage
- name: Install tox
run: |
export PATH=${AFNI_HOME}:$PATH
export FSLDIR=${CONDA_PREFIX}
pytest -v --cov sdcflows --cov-report xml:cov.xml --doctest-modules -n auto sdcflows \
--durations=20 --durations-min=10 -m "$MARKS"
env:
MARKS: ${{ matrix.marks }}
uv tool install tox --with=tox-uv --with=tox-gh-actions
- name: Show tox config
run: tox c
- name: Run tox
run: tox -v --exit-and-dump-after 1200

- uses: codecov/codecov-action@v4
- uses: codecov/codecov-action@v5
with:
file: cov.xml
token: ${{ secrets.CODECOV_TOKEN }}
if: ${{ always() }}

Expand Down
49 changes: 27 additions & 22 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,23 +17,24 @@ classifiers = [
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
]
license = "Apache-2.0"
requires-python = ">=3.9"
dependencies = [
"acres >= 0.2.0",
"attrs >= 20.1.0",
"nibabel >=3.1.0",
"nipype >=1.8.5,<2.0",
"traits <6.4",
"nibabel >= 3.0",
"nipype >= 1.8.5",
"migas >= 0.4.0",
"niworkflows >= 1.7.0",
"nitransforms >= 23.0.1",
"numpy >= 1.21.0",
"nitransforms >= 24.1.0",
"numpy >= 1.22",
"pybids >= 0.16.4",
"scikit-image >= 0.18",
"scipy >= 1.8.1",
"templateflow",
"toml",
"templateflow >= 23.1",
"toml >= 0.10",
]
dynamic = ["version"]

Expand All @@ -49,22 +50,13 @@ doc = [
"importlib_resources",
"ipykernel",
"ipython",
"matplotlib >= 2.2.0",
"nbsphinx",
"nibabel",
"nipype >= 1.5.1",
"niworkflows >= 1.10.0",
"numpy",
"packaging",
"pandoc",
"pydot >= 1.2.3",
"pydotplus",
"scipy",
"sphinx >= 7.2.2",
"sphinx-argparse",
"sphinxcontrib-apidoc",
"templateflow",
"traits < 6.4"
]

mem = [
Expand All @@ -79,11 +71,11 @@ dev = [
]

test = [
"coverage",
"pytest",
"pytest-cov",
"coverage[toml] >=5.2.1",
"pytest >= 6",
"pytest-cov >= 2.11",
"pytest-env",
"pytest-xdist"
"pytest-xdist >= 2.5",
]

# Aliases
Expand Down Expand Up @@ -159,8 +151,22 @@ per-file-ignores = [
]

[tool.pytest.ini_options]
minversion = "6"
testpaths = ["sdcflows"]
log_cli_level = "INFO"
xfail_strict = true
norecursedirs = [".git"]
addopts = "-svx --doctest-modules --strict-markers"
addopts = [
"-svx",
"-ra",
"--strict-config",
"--strict-markers",
"--doctest-modules",
# Config pytest-cov
"--cov=sdcflows",
"--cov-report=xml",
"--cov-config=pyproject.toml",
]
doctest_optionflags = "ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS"
env = "PYTHONHASHSEED=0"
filterwarnings = ["ignore::DeprecationWarning"]
Expand All @@ -173,7 +179,6 @@ markers = [

[tool.coverage.run]
branch = true
concurrency = 'multiprocessing'
omit = [
'*/tests/*',
'*/__init__.py',
Expand Down
171 changes: 2 additions & 169 deletions sdcflows/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,173 +10,6 @@
.. autoclass:: Loader
"""
from __future__ import annotations
from acres import Loader

import atexit
import os
from contextlib import AbstractContextManager, ExitStack
from functools import cached_property
from pathlib import Path
from types import ModuleType
from typing import Union

try:
from functools import cache
except ImportError: # PY38
from functools import lru_cache as cache

try: # Prefer backport to leave consistency to dependency spec
from importlib_resources import as_file, files
except ImportError:
from importlib.resources import as_file, files # type: ignore

try: # Prefer stdlib so Sphinx can link to authoritative documentation
from importlib.resources.abc import Traversable
except ImportError:
from importlib_resources.abc import Traversable

__all__ = ["load"]


class Loader:
"""A loader for package files relative to a module
This class wraps :mod:`importlib.resources` to provide a getter
function with an interpreter-lifetime scope. For typical packages
it simply passes through filesystem paths as :class:`~pathlib.Path`
objects. For zipped distributions, it will unpack the files into
a temporary directory that is cleaned up on interpreter exit.
This loader accepts a fully-qualified module name or a module
object.
Expected usage::
'''Data package
.. autofunction:: load_data
.. automethod:: load_data.readable
.. automethod:: load_data.as_path
.. automethod:: load_data.cached
'''
from sdcflows.data import Loader
load_data = Loader(__package__)
:class:`~Loader` objects implement the :func:`callable` interface
and generate a docstring, and are intended to be treated and documented
as functions.
For greater flexibility and improved readability over the ``importlib.resources``
interface, explicit methods are provided to access resources.
+---------------+----------------+------------------+
| On-filesystem | Lifetime | Method |
+---------------+----------------+------------------+
| `True` | Interpreter | :meth:`cached` |
+---------------+----------------+------------------+
| `True` | `with` context | :meth:`as_path` |
+---------------+----------------+------------------+
| `False` | n/a | :meth:`readable` |
+---------------+----------------+------------------+
It is also possible to use ``Loader`` directly::
from sdcflows.data import Loader
Loader(other_package).readable('data/resource.ext').read_text()
with Loader(other_package).as_path('data') as pkgdata:
# Call function that requires full Path implementation
func(pkgdata)
# contrast to
from importlib_resources import files, as_file
files(other_package).joinpath('data/resource.ext').read_text()
with as_file(files(other_package) / 'data') as pkgdata:
func(pkgdata)
.. automethod:: readable
.. automethod:: as_path
.. automethod:: cached
"""

def __init__(self, anchor: Union[str, ModuleType]):
self._anchor = anchor
self.files = files(anchor)
self.exit_stack = ExitStack()
atexit.register(self.exit_stack.close)
# Allow class to have a different docstring from instances
self.__doc__ = self._doc

@cached_property
def _doc(self):
"""Construct docstring for instances
Lists the public top-level paths inside the location, where
non-public means has a `.` or `_` prefix or is a 'tests'
directory.
"""
top_level = sorted(
os.path.relpath(p, self.files) + "/"[: p.is_dir()]
for p in self.files.iterdir()
if p.name[0] not in (".", "_") and p.name != "tests"
)
doclines = [
f"Load package files relative to ``{self._anchor}``.",
"",
"This package contains the following (top-level) files/directories:",
"",
*(f"* ``{path}``" for path in top_level),
]

return "\n".join(doclines)

def readable(self, *segments) -> Traversable:
"""Provide read access to a resource through a Path-like interface.
This file may or may not exist on the filesystem, and may be
efficiently used for read operations, including directory traversal.
This result is not cached or copied to the filesystem in cases where
that would be necessary.
"""
return self.files.joinpath(*segments)

def as_path(self, *segments) -> AbstractContextManager[Path]:
"""Ensure data is available as a :class:`~pathlib.Path`.
This method generates a context manager that yields a Path when
entered.
This result is not cached, and any temporary files that are created
are deleted when the context is exited.
"""
return as_file(self.files.joinpath(*segments))

@cache
def cached(self, *segments) -> Path:
"""Ensure data is available as a :class:`~pathlib.Path`.
Any temporary files that are created remain available throughout
the duration of the program, and are deleted when Python exits.
Results are cached so that multiple calls do not unpack the same
data multiple times, but the cache is sensitive to the specific
argument(s) passed.
"""
return self.exit_stack.enter_context(as_file(self.files.joinpath(*segments)))

__call__ = cached


load = Loader(__package__)
load = Loader(__spec__.name)
Loading

0 comments on commit d81518e

Please sign in to comment.