Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update dependencies #458

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
steps:
- uses: khanlab/actions/.github/actions/[email protected]
id: setup
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ on:
paths:
- snakebids/**
- scripts/**
- poetry.lock

jobs:
quality:
Expand Down Expand Up @@ -51,7 +52,7 @@ jobs:
needs: [ 'quality' ]
strategy:
matrix:
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13.-rc.3']
steps:
- uses: khanlab/actions/.github/actions/[email protected]
id: setup
Expand All @@ -67,7 +68,7 @@ jobs:
needs: [ 'build-cache-env' ]
strategy:
matrix:
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13.-rc.3']
split: ['1', '2', '3', '4', '5']
fail-fast: false
steps:
Expand Down
2 changes: 1 addition & 1 deletion .readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ formats:
build:
os: 'ubuntu-22.04'
tools:
python: '3.11'
python: '3.12'
jobs:
post_checkout:
- (git fetch --tags) || exit 183
Expand Down
2,264 changes: 1,190 additions & 1,074 deletions poetry.lock

Large diffs are not rendered by default.

34 changes: 15 additions & 19 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ patterns = [
python = ">=3.8,<4.0"
# core dep with high breakage potential given plans for redesign, so
# keep upper limit
pybids = ">=0.16.0,<0.17"
pybids = ">=0.16.0,<0.18"
snakemake = [
{ version = ">=5.28.0,<8", python = "<3.11" },
{ version = ">=7.18.2", python = ">=3.11" },
Expand Down Expand Up @@ -81,7 +81,7 @@ pluggy = ">=1.3"
[tool.poetry.group.dev.dependencies]
pytest = "^8"
pytest-mock = "^3.7.0"
poethepoet = "^0.27"
poethepoet = "^0.29"
pre-commit = "^3.0.0"
# a mkinit dep has the 'platform_system == "Windows"' as a marker on an incompatible dependency
# (pydantic<2.0 cf copier), so set the inverse as a marker here so mkinit can
Expand All @@ -92,8 +92,9 @@ pytest-benchmark = "^4.0.0"
pyfakefs = "^5.1.0"
pyparsing = "^3.0.9"
pathvalidate = "^3.0.0"
pyright = "^1.1.370"
ruff = "^0.5"
# Avoid bug in 1.1.388 (see https://github.com/microsoft/pyright/issues/9136)
pyright = "1.1.377"
ruff = "^0.6"
pytest-xdist = "^3.3.1"
pytest-split = [
{ version = "^0.8.1", python = "3.8" },
Expand All @@ -106,21 +107,16 @@ docstring-parser = "^0.16"


[tool.poetry.group.docs.dependencies]
sphinx = "^7"
sphinx-argparse = "^0.4.0"
sphinxcontrib-asciinema = "^0.4"
myst-parser = "^3"
furo = "^2024.4.27"
sphinx-copybutton = "^0.5.2"
sphinx-reredirects = "^0.1.3"
sphinx-design = [
{ version = "^0.5.0", python = "3.8" },
{ version = "^0.6", python = ">=3.9" },
]
sphinx-autobuild = [
{ version = "^2021.03.14", python = "3.8" },
{ version = "^2024.02.04", python = ">=3.9" },
]
sphinx = { version = "^8", python = ">=3.10" }
sphinx-argparse = { version = "^0.4.0", python = ">=3.10" }
sphinxcontrib-asciinema = { version = "^0.4", python = ">=3.10" }
myst-parser = { version = "^4", python = ">=3.10" }
furo = { version = "^2024.4.27", python = ">=3.10" }
sphinx-copybutton = { version = "^0.5.2", python = ">=3.10" }
sphinx-reredirects = { version = "^0.1.3", python = ">=3.10" }
sphinx-design = { version = "^0.6", python = ">=3.10" }
sphinx-autobuild = { version = "^2024.02.04", python = ">=3.10" }


[tool.poetry.scripts]
snakebids = "snakebids.admin:main"
Expand Down
1 change: 1 addition & 0 deletions snakebids/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

__submodules__ = ["core", "paths"]


from snakebids import _warningformat # noqa: F401

# isort: split
Expand Down
3 changes: 1 addition & 2 deletions snakebids/core/_querying.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@

import attrs
import more_itertools as itx
from bids.layout import BIDSLayout, Query
from bids.layout.models import BIDSFile
from bids.layout import BIDSFile, BIDSLayout, Query
from typing_extensions import Self, TypeAlias, override

from snakebids.exceptions import ConfigError, PybidsError
Expand Down
2 changes: 1 addition & 1 deletion snakebids/core/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

import attr
import more_itertools as itx
from bids import BIDSLayout
from bids.layout import BIDSLayout
from pvandyken.deprecated import deprecated
from typing_extensions import Self, TypedDict

Expand Down
2 changes: 1 addition & 1 deletion snakebids/core/input_generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
)

import more_itertools as itx
from bids import BIDSLayout, BIDSLayoutIndexer
from bids.layout import BIDSLayout, BIDSLayoutIndexer

from snakebids.core._querying import (
FilterSpecError,
Expand Down
4 changes: 2 additions & 2 deletions snakebids/plugins/cli_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import argparse
import re
from pathlib import Path
from typing import Any
from typing import Any, Callable

import attrs

Expand Down Expand Up @@ -38,7 +38,7 @@ def _make_underscore_dash_aliases(name: str) -> set[str]:
return {name}


def _find_type(name: str, *, yamlsafe: bool = True) -> type[Any]:
def _find_type(name: str, *, yamlsafe: bool = True) -> Callable[[Any], Any]:
import importlib

if name == "Path":
Expand Down
6 changes: 6 additions & 0 deletions snakebids/resources/bids_tags.json
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,12 @@
"before": "subset-",
"match": "[a-zA-Z0-9]+"
},
"nucleus": {
"tag": "nucleus",
"before": "nuc-",
"match": "[a-zA-Z0-9]+"

},
"desc": {
"tag": "desc",
"before": "desc-",
Expand Down
5 changes: 2 additions & 3 deletions snakebids/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import pytest
from hypothesis import database, settings
from pyfakefs.fake_filesystem import FakeFilesystem
from upath import UPath

import snakebids.paths.resources as specs
from snakebids import resources, set_bids_spec
Expand Down Expand Up @@ -51,9 +52,7 @@ def fakefs(


@pytest.fixture
def fakefs_tmpdir(
request: pytest.FixtureRequest, fakefs: FakeFilesystem | None
) -> Path:
def tmpdir(request: pytest.FixtureRequest, fakefs: FakeFilesystem | None) -> Path:
"""Version of tmpdir compatible with fakefs

If fakefs is disabled, a tmpdir is returned using the builtin tmpdir fixture.
Expand Down
93 changes: 58 additions & 35 deletions snakebids/tests/test_generate_inputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,21 @@
import warnings
from collections import defaultdict
from pathlib import Path, PosixPath
from typing import Any, Iterable, Literal, NamedTuple, TypedDict, TypeVar, cast
from typing import (
TYPE_CHECKING,
Any,
Iterable,
Literal,
NamedTuple,
TypedDict,
TypeVar,
cast,
)

import attrs
import more_itertools as itx
import pytest
from bids import BIDSLayout
from bids.layout import BIDSLayout
from hypothesis import HealthCheck, assume, example, given, settings
from hypothesis import strategies as st
from pyfakefs.fake_filesystem import FakeFilesystem
Expand Down Expand Up @@ -46,6 +55,7 @@
allow_function_scoped,
create_dataset,
create_snakebids_config,
debug,
example_if,
get_bids_path,
get_zip_list,
Expand All @@ -56,6 +66,9 @@
from snakebids.utils.containers import MultiSelectDict
from snakebids.utils.utils import DEPRECATION_FLAG, BidsEntity, BidsParseError

if TYPE_CHECKING:
from _typeshed import StrPath, Unused

T = TypeVar("T")


Expand Down Expand Up @@ -196,13 +209,13 @@ def test_attribute_errors_from_pybids_qualified_and_raised():


class TestFilterBools:
@pytest.fixture(autouse=True)
def bids_fs(self, bids_fs: FakeFilesystem | None):
return bids_fs
# @pytest.fixture(autouse=True)
# def bids_fs(self, bids_fs: FakeFilesystem | None):
# return bids_fs

@pytest.fixture
def tmpdir(self, fakefs_tmpdir: Path):
return fakefs_tmpdir
# @pytest.fixture
# def tmpdir(self, tmpdir: Path):
# return tmpdir

def disambiguate_components(self, dataset: BidsDataset):
assert len(dataset) == 2
Expand Down Expand Up @@ -230,7 +243,7 @@ def set_difference(set_: set[str], comp: BidsComponent) -> set[str]:
)
@given(dataset=sb_st.datasets())
def test_ambiguous_paths_with_extra_entities_leads_to_error(
self, tmpdir: Path, dataset: BidsDataset
self, tmpdir: Path, dataset: BidsDataset, mocker: MockerFixture
):
root = tempfile.mkdtemp(dir=tmpdir)
create_dataset(root, dataset)
Expand All @@ -256,7 +269,7 @@ def test_ambiguous_paths_with_extra_entities_leads_to_error(
)
@given(dataset=sb_st.datasets())
def test_ambiguous_paths_with_missing_entity_leads_to_error(
self, tmpdir: Path, dataset: BidsDataset
self, tmpdir: Path, dataset: BidsDataset, mocker: MockerFixture
):
root = tempfile.mkdtemp(dir=tmpdir)
create_dataset(root, dataset)
Expand Down Expand Up @@ -507,15 +520,27 @@ def add_entity(component: BidsComponent, entity: str, value: str):
assert result == BidsDataset({"template": dataset["template"]})


@pytest.mark.disable_fakefs(True)
class TestFilterMethods:
@pytest.fixture(autouse=True)
def bids_fs(self, bids_fs: FakeFilesystem | None):
return bids_fs
# @pytest.fixture(autouse=True)
# def bids_fs(self, bids_fs: FakeFilesystem | None):
# return bids_fs

@pytest.fixture
def tmpdir(self, fakefs_tmpdir: Path):
return fakefs_tmpdir
# @pytest.fixture
# def tmpdir(self, fakefs_tmpdir: Path):
# return fakefs_tmpdir

@debug(
component=BidsComponent(
name="template",
path="sub-{subject}/sub-{subject}_nuc-{nucleus}",
zip_lists={
"subject": ["0"],
"nucleus": ["0"],
},
),
data=mock_data("0"),
)
@example(
component=BidsComponent(
name="template",
Expand Down Expand Up @@ -849,10 +874,8 @@ def test_filter_with_invalid_method_raises_error(self, tmpdir: Path, method: str
max_examples=1,
)
@given(dataset=sb_st.datasets_one_comp(unique=True))
def test_duplicate_wildcards_does_not_create_error(
dataset: BidsDataset, bids_fs: Path, fakefs_tmpdir: Path
):
root = tempfile.mkdtemp(dir=fakefs_tmpdir)
def test_duplicate_wildcards_does_not_create_error(dataset: BidsDataset, tmpdir: Path):
root = tempfile.mkdtemp(dir=tmpdir)
rooted = BidsDataset.from_iterable(
attrs.evolve(comp, path=os.path.join(root, comp.path))
for comp in dataset.values()
Expand Down Expand Up @@ -1059,8 +1082,8 @@ def get_subset(of: Iterable[T]) -> list[T]:

class TestCustomPaths:
@pytest.fixture
def temp_dir(self, fakefs_tmpdir: Path, bids_fs: Path):
return fakefs_tmpdir
def temp_dir(self, tmpdir: Path, bids_fs: Path):
return tmpdir

def generate_test_directory(
self, entities: dict[str, list[str]], template: Path, tmpdir: Path
Expand Down Expand Up @@ -1584,13 +1607,13 @@ def test_get_lists_from_bids():


class TestGenBidsLayout:
@pytest.fixture
def tmpdir(self, fakefs_tmpdir: Path):
return fakefs_tmpdir
# @pytest.fixture
# def tmpdir(self, tmpdir: Path):
# return tmpdir

@pytest.fixture(autouse=True)
def bids_fs(self, bids_fs: FakeFilesystem | None):
return bids_fs
# @pytest.fixture(autouse=True)
# def bids_fs(self, bids_fs: FakeFilesystem | None):
# return bids_fs

@settings(
max_examples=1, suppress_health_check=[HealthCheck.function_scoped_fixture]
Expand Down Expand Up @@ -1666,7 +1689,7 @@ def test_path_subclassing(self, path: str, path_type: str):
# Google cloud is not posix, for mocking purpose however we just
# need a class that is a subclass of Path
class MockGCSPath(PosixPath):
def __init__(self, *pathsegments: str):
def __init__(self, *pathsegments: StrPath, **kwargs: Unused):
super().__init__(*pathsegments)

def __str__(self): # __fspath__ calls __str__ by default
Expand Down Expand Up @@ -1707,8 +1730,8 @@ def __str__(self): # __fspath__ calls __str__ by default
],
)
@given(dataset=sb_st.datasets(unique=True))
def test_generate_inputs(dataset: BidsDataset, bids_fs: Path, fakefs_tmpdir: Path):
root = tempfile.mkdtemp(dir=fakefs_tmpdir)
def test_generate_inputs(dataset: BidsDataset, tmpdir: Path):
root = tempfile.mkdtemp(dir=tmpdir)
rooted = BidsDataset.from_iterable(
attrs.evolve(comp, path=os.path.join(root, comp.path))
for comp in dataset.values()
Expand Down Expand Up @@ -1741,8 +1764,8 @@ class TestParticipantFiltering:
MODE = Literal["include", "exclude"]

@pytest.fixture
def tmpdir(self, bids_fs: Path, fakefs_tmpdir: Path):
return fakefs_tmpdir
def tmpdir(self, bids_fs: Path, tmpdir: Path):
return tmpdir

def get_filter_params(self, mode: MODE, filters: list[str] | str):
class FiltParams(TypedDict, total=False):
Expand Down Expand Up @@ -1920,12 +1943,12 @@ def test_exclude_participant_does_not_make_all_other_filters_regex(
@settings(max_examples=1, suppress_health_check=[HealthCheck.function_scoped_fixture])
@given(dataset=sb_st.datasets_one_comp(blacklist_entities=["extension"], unique=True))
def test_when_all_custom_paths_no_layout_indexed(
dataset: BidsDataset, bids_fs: Path, fakefs_tmpdir: Path, mocker: MockerFixture
dataset: BidsDataset, bids_fs: Path, tmpdir: Path, mocker: MockerFixture
):
# Need to reset mocker at beginning because hypothesis may call this function
# multiple times
mocker.stopall()
root = tempfile.mkdtemp(dir=fakefs_tmpdir)
root = tempfile.mkdtemp(dir=tmpdir)
rooted = BidsDataset.from_iterable(
attrs.evolve(comp, path=os.path.join(root, comp.path))
for comp in dataset.values()
Expand Down
Loading
Loading