Skip to content

Commit

Permalink
Upgrade to Pydantic 2 (#767)
Browse files Browse the repository at this point in the history
  • Loading branch information
CodyCBakerPhD authored Mar 6, 2024
1 parent 84ed29f commit 2a2d805
Show file tree
Hide file tree
Showing 20 changed files with 306 additions and 414 deletions.
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
# Upcoming

### Improvements

* Upgraded Pydantic support to `>v2.0.0`. [PR #767](https://github.com/catalystneuro/neuroconv/pull/767)
* Absorbed the `DatasetInfo` model into the `DatasetIOConfiguration` model. [PR #767](https://github.com/catalystneuro/neuroconv/pull/767)
* Keyword argument `field_name` of the `DatasetIOConfiguration.from_neurodata_object` method has been renamed to `dataset_name` to be more consistent with its usage. This only affects direct initialization of the model; usage via the `BackendConfiguration` constructor and its associated helper functions in `neuroconv.tools.nwb_helpers` is unaffected. [PR #767](https://github.com/catalystneuro/neuroconv/pull/767)
* Manual construction of a `DatasetIOConfiguration` now requires the field `dataset_name`, and will be validated to match the final path of `location_in_file`. Usage via the automated constructors is unchanged. [PR #767](https://github.com/catalystneuro/neuroconv/pull/767)



# v0.4.7 (February 21, 2024)

### Deprecation
Expand Down
5 changes: 3 additions & 2 deletions requirements-minimal.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@ jsonschema>=3.2.0
PyYAML>=5.4
scipy>=1.4.1
h5py>=3.9.0
hdmf>=3.12.1
hdmf>=3.12.2
hdmf_zarr>=0.4.0
pynwb>=2.3.2;python_version>='3.8'
pydantic>=1.10.13,<2.0.0
pydantic>=2.0.0
typing_extensions>=4.1.0
psutil>=5.8.0
tqdm>=4.60.0
pandas
Expand Down
15 changes: 7 additions & 8 deletions src/neuroconv/tools/nwb_helpers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from ._backend_configuration import get_default_backend_configuration
from ._configuration_models._base_backend import BackendConfiguration
from ._configuration_models._base_dataset_io import DatasetInfo, DatasetIOConfiguration
from ._configuration_models._base_dataset_io import DatasetIOConfiguration
from ._configuration_models._hdf5_backend import HDF5BackendConfiguration
from ._configuration_models._hdf5_dataset_io import (
AVAILABLE_HDF5_COMPRESSION_METHODS,
Expand Down Expand Up @@ -37,21 +37,20 @@
"BACKEND_CONFIGURATIONS",
"DATASET_IO_CONFIGURATIONS",
"BACKEND_NWB_IO",
"BackendConfiguration",
"HDF5BackendConfiguration",
"ZarrBackendConfiguration",
"DatasetIOConfiguration",
"HDF5DatasetIOConfiguration",
"ZarrDatasetIOConfiguration",
"get_default_backend_configuration",
"get_default_dataset_io_configurations",
"configure_backend",
"BackendConfiguration",
"DatasetIOConfiguration",
"get_default_dataset_io_configurations",
"get_default_backend_configuration",
"add_device_from_metadata",
"get_default_nwbfile_metadata",
"get_module",
"make_nwbfile_from_metadata",
"make_or_load_nwbfile",
"DatasetInfo",
"HDF5BackendConfiguration",
"HDF5DatasetIOConfiguration",
"ZarrBackendConfiguration",
"ZarrDatasetIOConfiguration",
]
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@
from typing import ClassVar, Dict, Literal, Type

from hdmf.container import DataIO
from pydantic import BaseModel, Field
from pydantic import BaseModel, ConfigDict, Field
from pynwb import NWBFile
from typing_extensions import Self

from ._base_dataset_io import DatasetIOConfiguration
from .._dataset_configuration import get_default_dataset_io_configurations
Expand All @@ -13,10 +14,11 @@
class BackendConfiguration(BaseModel):
"""A model for matching collections of DatasetConfigurations to a specific backend."""

backend: ClassVar[Literal["hdf5", "zarr"]] = Field(
description="The name of the backend used to configure the NWBFile."
)
data_io_class: Type[DataIO] = Field(description="The DataIO class that is specific to this backend.")
backend: ClassVar[Literal["hdf5", "zarr"]]
data_io_class: ClassVar[Type[DataIO]]

model_config = ConfigDict(validate_assignment=True) # Re-validate model on mutation

dataset_configurations: Dict[str, DatasetIOConfiguration] = Field(
description=(
"A mapping from object locations (e.g. `acquisition/TestElectricalSeriesAP/data`) "
Expand All @@ -38,10 +40,10 @@ def __str__(self) -> str:
return string

@classmethod
def from_nwbfile(cls, nwbfile: NWBFile) -> "BackendConfiguration":
def from_nwbfile(cls, nwbfile: NWBFile) -> Self:
default_dataset_configurations = get_default_dataset_io_configurations(nwbfile=nwbfile, backend=cls.backend)
dataset_configurations = {
default_dataset_configuration.dataset_info.location_in_file: default_dataset_configuration
default_dataset_configuration.location_in_file: default_dataset_configuration
for default_dataset_configuration in default_dataset_configurations
}

Expand Down
Loading

0 comments on commit 2a2d805

Please sign in to comment.