diff --git a/.github/workflows/read-nwbfile-tests.yml b/.github/workflows/read-nwbfile-tests.yml index adc073c02..d17d1522d 100644 --- a/.github/workflows/read-nwbfile-tests.yml +++ b/.github/workflows/read-nwbfile-tests.yml @@ -13,7 +13,7 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest", "windows-latest"] # TODO: update mac and streaming methods - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - uses: s-weigand/setup-conda@v1 with: diff --git a/.github/workflows/streaming-tests.yml b/.github/workflows/streaming-tests.yml index e67bcf289..e6d555b0c 100644 --- a/.github/workflows/streaming-tests.yml +++ b/.github/workflows/streaming-tests.yml @@ -13,7 +13,7 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest", "windows-latest"] # TODO: update mac and streaming methods - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - uses: s-weigand/setup-conda@v1 with: diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index b0ea4d259..a6b26df7a 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -13,7 +13,7 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest", "macos-13", "windows-latest"] - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - uses: conda-incubator/setup-miniconda@v3 with: diff --git a/setup.py b/setup.py index cd1e31161..75c9ddd60 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,6 @@ classifiers=[ "Development Status :: 4 - Beta", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", diff --git a/src/nwbinspector/_configuration.py b/src/nwbinspector/_configuration.py index 47c6faf03..b8692fe64 100644 --- a/src/nwbinspector/_configuration.py +++ b/src/nwbinspector/_configuration.py @@ -3,7 +3,7 @@ import json from pathlib import Path from types import FunctionType -from typing import List, Optional +from typing import Optional import jsonschema import yaml @@ -65,8 +65,8 @@ def load_config(filepath_or_keyword: PathType) -> dict: def configure_checks( checks: list = available_checks, config: Optional[dict] = None, - ignore: Optional[List[str]] = None, - select: Optional[List[str]] = None, + ignore: Optional[list[str]] = None, + select: Optional[list[str]] = None, importance_threshold: Importance = Importance.BEST_PRACTICE_SUGGESTION, ) -> list: """ diff --git a/src/nwbinspector/_dandi_inspection.py b/src/nwbinspector/_dandi_inspection.py index bdda537a3..6aa4c0f78 100644 --- a/src/nwbinspector/_dandi_inspection.py +++ b/src/nwbinspector/_dandi_inspection.py @@ -1,5 +1,5 @@ import pathlib -from typing import Iterable, List, Literal, Union +from typing import Iterable, Literal, Union from warnings import filterwarnings import h5py @@ -16,8 +16,8 @@ def inspect_dandiset( dandiset_version: Union[str, Literal["draft"], None] = None, config: Union[str, pathlib.Path, dict, Literal["dandi"], None] = None, checks: Union[list, None] = None, - ignore: Union[List[str], None] = None, - select: Union[List[str], None] = None, + ignore: Union[list[str], None] = None, + select: Union[list[str], None] = None, importance_threshold: Union[str, Importance] = Importance.BEST_PRACTICE_SUGGESTION, skip_validate: bool = False, show_progress_bar: bool = True, @@ -112,8 +112,8 @@ def inspect_dandi_file_path( dandiset_version: Union[str, Literal["draft"], None] = None, config: Union[str, pathlib.Path, dict, Literal["dandi"]] = "dandi", checks: Union[list, None] = None, - ignore: Union[List[str], None] = None, - select: Union[List[str], None] = None, + ignore: Union[list[str], None] = None, + select: Union[list[str], None] = None, importance_threshold: Union[str, Importance] = Importance.BEST_PRACTICE_SUGGESTION, skip_validate: bool = False, client: Union["dandi.dandiapi.DandiAPIClient", None] = None, diff --git a/src/nwbinspector/_formatting.py b/src/nwbinspector/_formatting.py index 712077ddd..6427f6a75 100644 --- a/src/nwbinspector/_formatting.py +++ b/src/nwbinspector/_formatting.py @@ -8,7 +8,7 @@ from enum import Enum from pathlib import Path from platform import platform -from typing import Dict, List, Optional, Union +from typing import Optional, Union import numpy as np @@ -75,9 +75,9 @@ class MessageFormatter: def __init__( self, - messages: List[InspectorMessage], - levels: List[str], - reverse: Optional[List[bool]] = None, + messages: list[InspectorMessage], + levels: list[str], + reverse: Optional[list[bool]] = None, detailed: bool = False, formatter_options: Optional[FormatterOptions] = None, ): @@ -107,7 +107,7 @@ def __init__( self.formatted_messages = [] @staticmethod - def _count_messages_by_importance(messages: List[InspectorMessage]) -> Dict[str, int]: + def _count_messages_by_importance(messages: list[InspectorMessage]) -> Dict[str, int]: message_count_by_importance = {importance_level.name: 0 for importance_level in Importance} for message in messages: message_count_by_importance[message.importance.name] += 1 @@ -138,16 +138,16 @@ def _get_message_header(self, message: InspectorMessage): message_header += f"with name '{message.object_name}'" return message_header - def _get_message_increment(self, level_counter: List[int]): + def _get_message_increment(self, level_counter: list[int]): return ( f"{'.'.join(np.array(level_counter, dtype=str))}.{self.message_counter}" f"{self.formatter_options.indent}" ) def _add_subsection( self, - organized_messages: Dict[str, Union[dict, List[InspectorMessage]]], - levels: List[str], - level_counter: List[int], + organized_messages: dict[str, Union[dict, list[InspectorMessage]]], + levels: list[str], + level_counter: list[int], ): """Recursive helper for display_messages.""" this_level_counter = list(level_counter) # local copy passed from previous recursion level @@ -193,7 +193,7 @@ def _add_subsection( self.formatted_messages.extend([f"{' ' * len(increment)} Message: {message.message}", ""]) self.message_counter += 1 - def format_messages(self) -> List[str]: + def format_messages(self) -> list[str]: """Deploy recursive addition of sections, terminating with message display.""" report_header = _get_report_header() self.formatted_messages.extend( @@ -217,9 +217,9 @@ def format_messages(self) -> List[str]: def format_messages( - messages: List[InspectorMessage], - levels: List[str] = None, - reverse: Optional[List[bool]] = None, + messages: list[InspectorMessage], + levels: list[str] = None, + reverse: Optional[list[bool]] = None, detailed: bool = False, ) -> List[str]: """Print InspectorMessages in order specified by the organization structure.""" diff --git a/src/nwbinspector/_nwb_inspection.py b/src/nwbinspector/_nwb_inspection.py index a68a4ce36..8e1d733a2 100644 --- a/src/nwbinspector/_nwb_inspection.py +++ b/src/nwbinspector/_nwb_inspection.py @@ -5,7 +5,7 @@ from collections import defaultdict from concurrent.futures import ProcessPoolExecutor, as_completed from pathlib import Path -from typing import Iterable, List, Optional, Type, Union +from typing import Iterable, Optional, Type, Union from warnings import filterwarnings, warn import pynwb @@ -375,8 +375,8 @@ def inspect_nwbfile_object( nwbfile_object: pynwb.NWBFile, checks: Optional[list] = None, config: Optional[dict] = None, - ignore: Optional[List[str]] = None, - select: Optional[List[str]] = None, + ignore: Optional[list[str]] = None, + select: Optional[list[str]] = None, importance_threshold: Union[str, Importance] = Importance.BEST_PRACTICE_SUGGESTION, ) -> Iterable[InspectorMessage]: """ diff --git a/src/nwbinspector/_organization.py b/src/nwbinspector/_organization.py index 884979043..9caaee5e3 100644 --- a/src/nwbinspector/_organization.py +++ b/src/nwbinspector/_organization.py @@ -1,7 +1,7 @@ """Internally used tools specifically for rendering more human-readable output from collected check results.""" from enum import Enum -from typing import List, Optional +from typing import Optional from natsort import natsorted @@ -16,7 +16,7 @@ def _sort_unique_values(unique_values: list, reverse: bool = False): return natsorted(unique_values, reverse=reverse) -def organize_messages(messages: List[InspectorMessage], levels: List[str], reverse: Optional[List[bool]] = None): +def organize_messages(messages: list[InspectorMessage], levels: list[str], reverse: Optional[list[bool]] = None): """ General function for organizing list of InspectorMessages. diff --git a/src/nwbinspector/testing/_testing.py b/src/nwbinspector/testing/_testing.py index eb2b36024..6fc7fb207 100644 --- a/src/nwbinspector/testing/_testing.py +++ b/src/nwbinspector/testing/_testing.py @@ -4,7 +4,7 @@ import os from datetime import datetime from pathlib import Path -from typing import Optional, Tuple +from typing import Optional from urllib import request from uuid import uuid4 @@ -20,7 +20,7 @@ TESTING_CONFIG_FILE_PATH = Path.cwd() / "tests" / "testing_config.json" -def check_streaming_tests_enabled() -> Tuple[bool, Optional[str]]: +def check_streaming_tests_enabled() -> tuple[bool, Optional[str]]: """ General purpose helper for determining if the testing environment can support S3 DANDI streaming. diff --git a/src/nwbinspector/tools/_dandi.py b/src/nwbinspector/tools/_dandi.py index 1afb502af..e1d338c5c 100644 --- a/src/nwbinspector/tools/_dandi.py +++ b/src/nwbinspector/tools/_dandi.py @@ -2,12 +2,12 @@ import re from concurrent.futures import ProcessPoolExecutor, as_completed -from typing import Dict, Optional +from typing import Optional from ..utils import calculate_number_of_cpu, is_module_installed -def get_s3_urls_and_dandi_paths(dandiset_id: str, version_id: Optional[str] = None, n_jobs: int = 1) -> Dict[str, str]: +def get_s3_urls_and_dandi_paths(dandiset_id: str, version_id: Optional[str] = None, n_jobs: int = 1) -> dict[str, str]: """ Collect S3 URLS from a DANDISet ID. @@ -46,7 +46,7 @@ def get_s3_urls_and_dandi_paths(dandiset_id: str, version_id: Optional[str] = No return s3_urls_to_dandi_paths -def _get_content_url_and_path(asset, follow_redirects: int = 1, strip_query: bool = True) -> Dict[str, str]: +def _get_content_url_and_path(asset, follow_redirects: int = 1, strip_query: bool = True) -> dict[str, str]: """ Private helper function for parallelization in 'get_s3_urls_and_dandi_paths'. diff --git a/src/nwbinspector/utils/_utils.py b/src/nwbinspector/utils/_utils.py index 87f148f59..038bd7412 100644 --- a/src/nwbinspector/utils/_utils.py +++ b/src/nwbinspector/utils/_utils.py @@ -7,7 +7,7 @@ from importlib import import_module from pathlib import Path from time import sleep -from typing import Callable, Dict, List, Optional, Tuple, TypeVar, Union +from typing import Callable, Optional, TypeVar, Union import h5py import numpy as np @@ -15,10 +15,10 @@ from numpy.typing import ArrayLike from packaging import version -# TODO: deprecated these in favor of explicit typing +# TODO: deprecat these in favor of explicit typing PathType = TypeVar("PathType", str, Path) # For types that can be either files or folders FilePathType = TypeVar("FilePathType", str, Path) -OptionalListOfStrings = Optional[List[str]] +OptionalListOfStrings = Optional[list[str]] dict_regex = r"({.+:.+})" # TODO: remove this from global scope MAX_CACHE_ITEMS = 1000 # lru_cache default is 128 calls of matching input/output, but might need more to get use here @@ -26,14 +26,14 @@ @lru_cache(maxsize=MAX_CACHE_ITEMS) def _cache_data_retrieval_command( - data: h5py.Dataset, reduced_selection: Tuple[Tuple[Optional[int], Optional[int], Optional[int]]] + data: h5py.Dataset, reduced_selection: tuple[tuple[Optional[int], Optional[int], Optional[int]]] ) -> np.ndarray: """LRU caching for _cache_data_selection cannot be applied to list inputs; this expects the tuple or Dataset.""" selection = tuple([slice(*reduced_slice) for reduced_slice in reduced_selection]) # reconstitute the slices return data[selection] -def cache_data_selection(data: Union[h5py.Dataset, ArrayLike], selection: Union[slice, Tuple[slice]]) -> np.ndarray: +def cache_data_selection(data: Union[h5py.Dataset, ArrayLike], selection: Union[slice, tuple[slice]]) -> np.ndarray: """Extract the selection lazily from the data object for efficient caching (most beneficial during streaming).""" if isinstance(data, np.memmap): # np.memmap objects are not hashable - simply return the selection lazily return data[selection] @@ -166,7 +166,7 @@ def get_package_version(name: str) -> version.Version: def robust_s3_read( - command: Callable, max_retries: int = 10, command_args: Optional[list] = None, command_kwargs: Optional[Dict] = None + command: Callable, max_retries: int = 10, command_args: Optional[list] = None, command_kwargs: Optional[dict] = None ): """Attempt the command (usually acting on an S3 IO) up to the number of max_retries using exponential backoff.""" command_args = command_args or []