Skip to content

Commit

Permalink
Expose AWS Region to NWBHDF5IO (#1903)
Browse files Browse the repository at this point in the history
  • Loading branch information
rly authored May 22, 2024
1 parent 7a0d8b4 commit c7ff5ef
Show file tree
Hide file tree
Showing 7 changed files with 17 additions and 16 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

### Enhancements and minor changes
- Set rate default value inside `mock_ElectricalSeries` to avoid having to set `rate=None` explicitly when passing timestamps. @h-mayorquin [#1894](https://github.com/NeurodataWithoutBorders/pynwb/pull/1894)
- Exposed `aws_region` to `NWBHDF5IO`. @rly [#1903](https://github.com/NeurodataWithoutBorders/pynwb/pull/1903)

## PyNWB 2.7.0 (May 2, 2024)

Expand Down
2 changes: 1 addition & 1 deletion environment-ros3.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ channels:
dependencies:
- python==3.12
- h5py==3.11.0
- hdmf==3.13.0
- hdmf==3.14.0
- matplotlib==3.8.0
- numpy==1.26.4
- pandas==2.2.1
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ classifiers = [
]
dependencies = [
"h5py>=2.10",
"hdmf>=3.12.2",
"hdmf>=3.14.0",
"numpy>=1.18, <2.0", # pin below 2.0 until HDMF supports numpy 2.0
"pandas>=1.1.5",
"python-dateutil>=2.7.3",
Expand Down
2 changes: 1 addition & 1 deletion requirements-min.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# minimum versions of package dependencies for installing PyNWB
h5py==2.10 # support for selection of datasets with list of indices added in 2.10
hdmf==3.12.2
hdmf==3.14.0
numpy==1.18
pandas==1.1.5
python-dateutil==2.7.3
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# pinned dependencies to reproduce an entire development environment to use PyNWB
h5py==3.11.0
hdmf==3.13.0
hdmf==3.14.0
numpy==1.26.4
pandas==2.2.1
python-dateutil==2.9.0.post0
15 changes: 5 additions & 10 deletions src/pynwb/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,24 +254,19 @@ def can_read(path: str):
{'name': 'extensions', 'type': (str, TypeMap, list),
'doc': 'a path to a namespace, a TypeMap, or a list consisting paths to namespaces and TypeMaps',
'default': None},
{'name': 'file', 'type': [h5py.File, 'S3File'], 'doc': 'a pre-existing h5py.File object', 'default': None},
{'name': 'comm', 'type': 'Intracomm', 'doc': 'the MPI communicator to use for parallel I/O',
'default': None},
{'name': 'driver', 'type': str, 'doc': 'driver for h5py to use when opening HDF5 file', 'default': None},
{'name': 'herd_path', 'type': str, 'doc': 'The path to the HERD',
'default': None},)
*get_docval(_HDF5IO.__init__, "file", "comm", "driver", "aws_region", "herd_path"),)
def __init__(self, **kwargs):
path, mode, manager, extensions, load_namespaces, file_obj, comm, driver, herd_path =\
path, mode, manager, extensions, load_namespaces, file_obj, comm, driver, aws_region, herd_path =\
popargs('path', 'mode', 'manager', 'extensions', 'load_namespaces',
'file', 'comm', 'driver', 'herd_path', kwargs)
'file', 'comm', 'driver', 'aws_region', 'herd_path', kwargs)
# Define the BuildManager to use
io_modes_that_create_file = ['w', 'w-', 'x']
if mode in io_modes_that_create_file or manager is not None or extensions is not None:
load_namespaces = False

if load_namespaces:
tm = get_type_map()
super().load_namespaces(tm, path, file=file_obj, driver=driver)
super().load_namespaces(tm, path, file=file_obj, driver=driver, aws_region=aws_region)
manager = BuildManager(tm)

# XXX: Leaving this here in case we want to revert to this strategy for
Expand All @@ -289,7 +284,7 @@ def __init__(self, **kwargs):
manager = get_manager()
# Open the file
super().__init__(path, manager=manager, mode=mode, file=file_obj, comm=comm,
driver=driver, herd_path=herd_path)
driver=driver, aws_region=aws_region, herd_path=herd_path)

@property
def nwb_version(self):
Expand Down
9 changes: 7 additions & 2 deletions src/pynwb/validate.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def _validate_helper(io: HDMFIO, namespace: str = CORE_NAMESPACE) -> list:


def _get_cached_namespaces_to_validate(
path: str, driver: Optional[str] = None
path: str, driver: Optional[str] = None, aws_region: Optional[str] = None,
) -> Tuple[List[str], BuildManager, Dict[str, str]]:
"""
Determine the most specific namespace(s) that are cached in the given NWBFile that can be used for validation.
Expand Down Expand Up @@ -58,7 +58,12 @@ def _get_cached_namespaces_to_validate(
catalog = NamespaceCatalog(
group_spec_cls=NWBGroupSpec, dataset_spec_cls=NWBDatasetSpec, spec_namespace_cls=NWBNamespace
)
namespace_dependencies = NWBHDF5IO.load_namespaces(namespace_catalog=catalog, path=path, driver=driver)
namespace_dependencies = NWBHDF5IO.load_namespaces(
namespace_catalog=catalog,
path=path,
driver=driver,
aws_region=aws_region
)

# Determine which namespaces are the most specific (i.e. extensions) and validate against those
candidate_namespaces = set(namespace_dependencies.keys())
Expand Down

0 comments on commit c7ff5ef

Please sign in to comment.