From 86b274dc28f83c33f695bca8e01e94ff8955baf2 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 2 May 2024 17:22:35 -0700 Subject: [PATCH 01/16] Release 2.7.0 (#1887) * Update CHANGELOG.md * reqs * reqs * ros3 * Update environment-ros3.yml Co-authored-by: Steph Prince <40640337+stephprince@users.noreply.github.com> * Update CHANGELOG.md Co-authored-by: Steph Prince <40640337+stephprince@users.noreply.github.com> --------- Co-authored-by: Steph Prince <40640337+stephprince@users.noreply.github.com> --- CHANGELOG.md | 2 +- environment-ros3.yml | 8 ++++---- requirements.txt | 10 +++++----- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 36bd0a35d..327541002 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # PyNWB Changelog -## PyNWB 2.7.0 (Upcoming) +## PyNWB 2.7.0 (May 2, 2024) ### Enhancements and minor changes - Added `bounds` field to `SpatialSeries` to set optional boundary range (min, max) for each dimension of data. @mavaylon1 [#1869](https://github.com/NeurodataWithoutBorders/pynwb/pull/1869/files) diff --git a/environment-ros3.yml b/environment-ros3.yml index 155d2a938..8c93623bc 100644 --- a/environment-ros3.yml +++ b/environment-ros3.yml @@ -5,11 +5,11 @@ channels: - defaults dependencies: - python==3.12 - - h5py==3.10.0 - - hdmf==3.12.2 + - h5py==3.11.0 + - hdmf==3.13.0 - matplotlib==3.8.0 - - numpy==1.26 - - pandas==2.1.2 + - numpy==1.26.4 + - pandas==2.2.1 - python-dateutil==2.8.2 - setuptools - pytest==7.4.3 # This is for the upcoming pytest update diff --git a/requirements.txt b/requirements.txt index 836052317..dd152ad16 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ # pinned dependencies to reproduce an entire development environment to use PyNWB -h5py==3.10.0 -hdmf==3.12.2 -numpy==1.26.1 -pandas==2.1.2 -python-dateutil==2.8.2 +h5py==3.11.0 +hdmf==3.13.0 +numpy==1.26.4 +pandas==2.2.1 +python-dateutil==2.9.0.post0 From 7a0d8b47eba2f67fcd40c53571de77e969b97724 Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Mon, 13 May 2024 20:10:35 -0600 Subject: [PATCH 02/16] Avoid having to set `rate=None` explitctly when passing timestamps in `mock_ElectricalSeries` (#1894) * set rate as optional value * changelog * Update CHANGELOG.md Co-authored-by: Steph Prince <40640337+stephprince@users.noreply.github.com> * update CHANGELOG.md * Update src/pynwb/testing/mock/ecephys.py --------- Co-authored-by: Steph Prince <40640337+stephprince@users.noreply.github.com> --- CHANGELOG.md | 5 +++++ src/pynwb/testing/mock/ecephys.py | 6 +++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 327541002..4f61a0587 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,10 @@ # PyNWB Changelog +## PyNWB 2.8.0 (Upcoming) + +### Enhancements and minor changes +- Set rate default value inside `mock_ElectricalSeries` to avoid having to set `rate=None` explicitly when passing timestamps. @h-mayorquin [#1894](https://github.com/NeurodataWithoutBorders/pynwb/pull/1894) + ## PyNWB 2.7.0 (May 2, 2024) ### Enhancements and minor changes diff --git a/src/pynwb/testing/mock/ecephys.py b/src/pynwb/testing/mock/ecephys.py index 36796c267..0669e7493 100644 --- a/src/pynwb/testing/mock/ecephys.py +++ b/src/pynwb/testing/mock/ecephys.py @@ -70,7 +70,7 @@ def mock_ElectricalSeries( name: Optional[str] = None, description: str = "description", data=None, - rate: float = 30000.0, + rate: Optional[float] = None, timestamps=None, starting_time: Optional[float] = None, electrodes: Optional[DynamicTableRegion] = None, @@ -80,6 +80,10 @@ def mock_ElectricalSeries( conversion: float = 1.0, offset: float = 0., ) -> ElectricalSeries: + + # Set a default rate if timestamps are not provided + rate = 30_000.0 if (timestamps is None and rate is None) else rate + electrical_series = ElectricalSeries( name=name or name_generator("ElectricalSeries"), description=description, From c7ff5ef07df96843c8016e9f48ae232bf93d0631 Mon Sep 17 00:00:00 2001 From: Ryan Ly Date: Wed, 22 May 2024 15:36:58 -0700 Subject: [PATCH 03/16] Expose AWS Region to NWBHDF5IO (#1903) --- CHANGELOG.md | 1 + environment-ros3.yml | 2 +- pyproject.toml | 2 +- requirements-min.txt | 2 +- requirements.txt | 2 +- src/pynwb/__init__.py | 15 +++++---------- src/pynwb/validate.py | 9 +++++++-- 7 files changed, 17 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4f61a0587..d357c8168 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ ### Enhancements and minor changes - Set rate default value inside `mock_ElectricalSeries` to avoid having to set `rate=None` explicitly when passing timestamps. @h-mayorquin [#1894](https://github.com/NeurodataWithoutBorders/pynwb/pull/1894) +- Exposed `aws_region` to `NWBHDF5IO`. @rly [#1903](https://github.com/NeurodataWithoutBorders/pynwb/pull/1903) ## PyNWB 2.7.0 (May 2, 2024) diff --git a/environment-ros3.yml b/environment-ros3.yml index 8c93623bc..92e17b5dd 100644 --- a/environment-ros3.yml +++ b/environment-ros3.yml @@ -6,7 +6,7 @@ channels: dependencies: - python==3.12 - h5py==3.11.0 - - hdmf==3.13.0 + - hdmf==3.14.0 - matplotlib==3.8.0 - numpy==1.26.4 - pandas==2.2.1 diff --git a/pyproject.toml b/pyproject.toml index ab2fceb33..4701ca7d5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,7 @@ classifiers = [ ] dependencies = [ "h5py>=2.10", - "hdmf>=3.12.2", + "hdmf>=3.14.0", "numpy>=1.18, <2.0", # pin below 2.0 until HDMF supports numpy 2.0 "pandas>=1.1.5", "python-dateutil>=2.7.3", diff --git a/requirements-min.txt b/requirements-min.txt index f6b765b0b..a047d81c7 100644 --- a/requirements-min.txt +++ b/requirements-min.txt @@ -1,6 +1,6 @@ # minimum versions of package dependencies for installing PyNWB h5py==2.10 # support for selection of datasets with list of indices added in 2.10 -hdmf==3.12.2 +hdmf==3.14.0 numpy==1.18 pandas==1.1.5 python-dateutil==2.7.3 diff --git a/requirements.txt b/requirements.txt index dd152ad16..c1fd347ff 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ # pinned dependencies to reproduce an entire development environment to use PyNWB h5py==3.11.0 -hdmf==3.13.0 +hdmf==3.14.0 numpy==1.26.4 pandas==2.2.1 python-dateutil==2.9.0.post0 diff --git a/src/pynwb/__init__.py b/src/pynwb/__init__.py index 9ea18efb8..ada39764e 100644 --- a/src/pynwb/__init__.py +++ b/src/pynwb/__init__.py @@ -254,16 +254,11 @@ def can_read(path: str): {'name': 'extensions', 'type': (str, TypeMap, list), 'doc': 'a path to a namespace, a TypeMap, or a list consisting paths to namespaces and TypeMaps', 'default': None}, - {'name': 'file', 'type': [h5py.File, 'S3File'], 'doc': 'a pre-existing h5py.File object', 'default': None}, - {'name': 'comm', 'type': 'Intracomm', 'doc': 'the MPI communicator to use for parallel I/O', - 'default': None}, - {'name': 'driver', 'type': str, 'doc': 'driver for h5py to use when opening HDF5 file', 'default': None}, - {'name': 'herd_path', 'type': str, 'doc': 'The path to the HERD', - 'default': None},) + *get_docval(_HDF5IO.__init__, "file", "comm", "driver", "aws_region", "herd_path"),) def __init__(self, **kwargs): - path, mode, manager, extensions, load_namespaces, file_obj, comm, driver, herd_path =\ + path, mode, manager, extensions, load_namespaces, file_obj, comm, driver, aws_region, herd_path =\ popargs('path', 'mode', 'manager', 'extensions', 'load_namespaces', - 'file', 'comm', 'driver', 'herd_path', kwargs) + 'file', 'comm', 'driver', 'aws_region', 'herd_path', kwargs) # Define the BuildManager to use io_modes_that_create_file = ['w', 'w-', 'x'] if mode in io_modes_that_create_file or manager is not None or extensions is not None: @@ -271,7 +266,7 @@ def __init__(self, **kwargs): if load_namespaces: tm = get_type_map() - super().load_namespaces(tm, path, file=file_obj, driver=driver) + super().load_namespaces(tm, path, file=file_obj, driver=driver, aws_region=aws_region) manager = BuildManager(tm) # XXX: Leaving this here in case we want to revert to this strategy for @@ -289,7 +284,7 @@ def __init__(self, **kwargs): manager = get_manager() # Open the file super().__init__(path, manager=manager, mode=mode, file=file_obj, comm=comm, - driver=driver, herd_path=herd_path) + driver=driver, aws_region=aws_region, herd_path=herd_path) @property def nwb_version(self): diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py index 827249cbb..aecfb2556 100644 --- a/src/pynwb/validate.py +++ b/src/pynwb/validate.py @@ -30,7 +30,7 @@ def _validate_helper(io: HDMFIO, namespace: str = CORE_NAMESPACE) -> list: def _get_cached_namespaces_to_validate( - path: str, driver: Optional[str] = None + path: str, driver: Optional[str] = None, aws_region: Optional[str] = None, ) -> Tuple[List[str], BuildManager, Dict[str, str]]: """ Determine the most specific namespace(s) that are cached in the given NWBFile that can be used for validation. @@ -58,7 +58,12 @@ def _get_cached_namespaces_to_validate( catalog = NamespaceCatalog( group_spec_cls=NWBGroupSpec, dataset_spec_cls=NWBDatasetSpec, spec_namespace_cls=NWBNamespace ) - namespace_dependencies = NWBHDF5IO.load_namespaces(namespace_catalog=catalog, path=path, driver=driver) + namespace_dependencies = NWBHDF5IO.load_namespaces( + namespace_catalog=catalog, + path=path, + driver=driver, + aws_region=aws_region + ) # Determine which namespaces are the most specific (i.e. extensions) and validate against those candidate_namespaces = set(namespace_dependencies.keys()) From 014b143f31695046d4f3f91ce785350b65a126fb Mon Sep 17 00:00:00 2001 From: Sandro Date: Thu, 23 May 2024 01:04:39 +0200 Subject: [PATCH 04/16] Exclude artifacts from sdist and wheel (#1902) Co-authored-by: Ryan Ly --- MANIFEST.in | 5 ----- pyproject.toml | 12 ++++++++++++ 2 files changed, 12 insertions(+), 5 deletions(-) delete mode 100644 MANIFEST.in diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index dd0fdadda..000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include license.txt Legal.txt src/pynwb/_due.py -include requirements.txt requirements-dev.txt requirements-doc.txt requirements-min.txt environment-ros3.yml -include test.py tox.ini - -graft tests diff --git a/pyproject.toml b/pyproject.toml index 4701ca7d5..92e921999 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,8 +55,20 @@ source = "vcs" # src/pynwb/__init__.py to set `__version__` (from _version.py). version-file = "src/pynwb/_version.py" +[tool.hatch.build.targets.sdist] +exclude = [ + ".git*", + ".codecov.yml", + ".readthedocs.yaml", +] + [tool.hatch.build.targets.wheel] packages = ["src/pynwb"] +exclude = [ + ".git*", + ".codecov.yml", + ".readthedocs.yaml", +] [tool.codespell] skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs,.tox,nwb-schema,./docs/_build/*,*.ipynb" From 2f851df5bbb3661b2226c38df738a7a9826870a8 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Wed, 22 May 2024 16:36:01 -0700 Subject: [PATCH 05/16] Integrate TypeConfig (#1829) * integration * path * clean up * update * Delete out.txt * update * need to clean * partial clean up * checkpoint * checkpoint * check * Delete docs/source/sg_execution_times.rst * Update ecephys.py * checkpoint * test * test * update * checkpoint passing * clean up * tutorial draft * docs * checkpoint * subject * gallery * gallery * Update requirements-min.txt * Update requirements.txt * rebase * test * test * coverage * Create requirements-opt.txt * Update check_sphinx_links.yml * link * Update tox.ini * Update run_coverage.yml * Update tox.ini * Update run_coverage.yml * Update docs/gallery/general/plot_configurator.py Co-authored-by: Ryan Ly * Update docs/gallery/general/plot_configurator.py Co-authored-by: Ryan Ly * feedback * Update CHANGELOG.md * Update plot_configurator.py * Update plot_configurator.py * Update docs/gallery/general/nwb_gallery_config.yaml Co-authored-by: Ryan Ly --------- Co-authored-by: Ryan Ly --- .github/workflows/check_sphinx_links.yml | 2 +- .github/workflows/run_all_tests.yml | 7 +- .github/workflows/run_coverage.yml | 14 ++- CHANGELOG.md | 1 + .../gallery/general/experimenter_termset.yaml | 17 +++ docs/gallery/general/nwb_gallery_config.yaml | 10 ++ docs/gallery/general/nwb_subject_termset.yaml | 27 +++++ docs/gallery/general/plot_configurator.py | 113 ++++++++++++++++++ requirements-opt.txt | 3 + src/pynwb/__init__.py | 31 +++++ src/pynwb/config/nwb_config.yaml | 7 ++ src/pynwb/config/nwb_subject_termset.yaml | 27 +++++ src/pynwb/core.py | 13 ++ test.py | 5 + tests/unit/test_config.py | 60 ++++++++++ .../unit/test_config/nwb_subject_termset.yaml | 27 +++++ tests/unit/test_config/test_nwb_config.yaml | 7 ++ tests/unit/test_core.py | 5 + tox.ini | 8 ++ 19 files changed, 376 insertions(+), 8 deletions(-) create mode 100644 docs/gallery/general/experimenter_termset.yaml create mode 100644 docs/gallery/general/nwb_gallery_config.yaml create mode 100644 docs/gallery/general/nwb_subject_termset.yaml create mode 100644 docs/gallery/general/plot_configurator.py create mode 100644 requirements-opt.txt create mode 100644 src/pynwb/config/nwb_config.yaml create mode 100644 src/pynwb/config/nwb_subject_termset.yaml create mode 100644 tests/unit/test_config.py create mode 100644 tests/unit/test_config/nwb_subject_termset.yaml create mode 100644 tests/unit/test_config/test_nwb_config.yaml diff --git a/.github/workflows/check_sphinx_links.yml b/.github/workflows/check_sphinx_links.yml index 49da87755..11003a19a 100644 --- a/.github/workflows/check_sphinx_links.yml +++ b/.github/workflows/check_sphinx_links.yml @@ -28,7 +28,7 @@ jobs: - name: Install Sphinx dependencies and package run: | python -m pip install --upgrade pip - python -m pip install -r requirements-doc.txt + python -m pip install -r requirements-doc.txt -r requirements-opt.txt python -m pip install . - name: Check Sphinx internal and external links diff --git a/.github/workflows/run_all_tests.yml b/.github/workflows/run_all_tests.yml index dab896025..4e8ea2418 100644 --- a/.github/workflows/run_all_tests.yml +++ b/.github/workflows/run_all_tests.yml @@ -26,6 +26,7 @@ jobs: - { name: linux-python3.9 , test-tox-env: py39 , build-tox-env: build-py39 , python-ver: "3.9" , os: ubuntu-latest } - { name: linux-python3.10 , test-tox-env: py310 , build-tox-env: build-py310 , python-ver: "3.10", os: ubuntu-latest } - { name: linux-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: ubuntu-latest } + - { name: linux-python3.11-opt , test-tox-env: py311-optional , build-tox-env: build-py311 , python-ver: "3.11", os: ubuntu-latest } - { name: linux-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: ubuntu-latest } - { name: linux-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: ubuntu-latest } - { name: linux-python3.12-prerelease , test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.12", os: ubuntu-latest } @@ -33,6 +34,7 @@ jobs: - { name: windows-python3.9 , test-tox-env: py39 , build-tox-env: build-py39 , python-ver: "3.9" , os: windows-latest } - { name: windows-python3.10 , test-tox-env: py310 , build-tox-env: build-py310 , python-ver: "3.10", os: windows-latest } - { name: windows-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: windows-latest } + - { name: windows-python3.11-opt , test-tox-env: py311-optional , build-tox-env: build-py311 , python-ver: "3.11", os: windows-latest } - { name: windows-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: windows-latest } - { name: windows-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: windows-latest } - { name: windows-python3.12-prerelease, test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.11", os: windows-latest } @@ -40,6 +42,7 @@ jobs: - { name: macos-python3.9 , test-tox-env: py39 , build-tox-env: build-py39 , python-ver: "3.9" , os: macos-13 } - { name: macos-python3.10 , test-tox-env: py310 , build-tox-env: build-py310 , python-ver: "3.10", os: macos-latest } - { name: macos-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: macos-latest } + - { name: macos-python3.11-opt , test-tox-env: py311-optional , build-tox-env: build-py311 , python-ver: "3.11", os: macos-latest } - { name: macos-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: macos-latest } - { name: macos-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: macos-latest } - { name: macos-python3.12-prerelease , test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.12", os: macos-latest } @@ -198,7 +201,7 @@ jobs: include: - { name: conda-linux-python3.12-ros3 , python-ver: "3.12", os: ubuntu-latest } - { name: conda-windows-python3.12-ros3, python-ver: "3.12", os: windows-latest } - - { name: conda-macos-python3.12-ros3 , python-ver: "3.12", os: macos-13 } # This is due to DANDI not supporting osx-arm64. Will support macos-latest when this changes. + - { name: conda-macos-python3.12-ros3 , python-ver: "3.12", os: macos-13 } # This is due to DANDI not supporting osx-arm64. Will support macos-latest when this changes. steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -245,7 +248,7 @@ jobs: include: - { name: conda-linux-gallery-python3.12-ros3 , python-ver: "3.12", os: ubuntu-latest } - { name: conda-windows-gallery-python3.12-ros3, python-ver: "3.12", os: windows-latest } - - { name: conda-macos-gallery-python3.12-ros3 , python-ver: "3.12", os: macos-13 } # This is due to DANDI not supporting osx-arm64. Will support macos-latest when this changes. + - { name: conda-macos-gallery-python3.12-ros3 , python-ver: "3.12", os: macos-13 } # This is due to DANDI not supporting osx-arm64. Will support macos-latest when this changes. steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 diff --git a/.github/workflows/run_coverage.yml b/.github/workflows/run_coverage.yml index 5f060abbf..ec42694d8 100644 --- a/.github/workflows/run_coverage.yml +++ b/.github/workflows/run_coverage.yml @@ -11,7 +11,7 @@ on: jobs: run-coverage: - name: ${{ matrix.os }} + name: ${{ matrix.os }}, opt reqs ${{ matrix.opt_req }} runs-on: ${{ matrix.os }} # TODO handle forks # run pipeline on either a push event or a PR event on a fork @@ -21,7 +21,11 @@ jobs: shell: bash strategy: matrix: - os: [ubuntu-latest, macos-latest, windows-latest] + include: + - { os: ubuntu-latest , opt_req: true } + - { os: ubuntu-latest , opt_req: false } + - { os: windows-latest, opt_req: false } + - { os: macos-latest , opt_req: false } env: OS: ${{ matrix.os }} PYTHON: '3.12' @@ -47,9 +51,9 @@ jobs: python -m pip install --upgrade pip python -m pip install -r requirements-dev.txt -r requirements.txt - # - name: Install optional dependencies - # if: ${{ matrix.opt_req }} - # run: python -m pip install -r requirements-opt.txt + - name: Install optional dependencies + if: ${{ matrix.opt_req }} + run: python -m pip install -r requirements-opt.txt - name: Install package run: | diff --git a/CHANGELOG.md b/CHANGELOG.md index d357c8168..c461c6d7f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ ### Enhancements and minor changes - Set rate default value inside `mock_ElectricalSeries` to avoid having to set `rate=None` explicitly when passing timestamps. @h-mayorquin [#1894](https://github.com/NeurodataWithoutBorders/pynwb/pull/1894) +- Integrate validation through the `TypeConfigurator`. @mavaylon1 [#1829](https://github.com/NeurodataWithoutBorders/pynwb/pull/1829) - Exposed `aws_region` to `NWBHDF5IO`. @rly [#1903](https://github.com/NeurodataWithoutBorders/pynwb/pull/1903) ## PyNWB 2.7.0 (May 2, 2024) diff --git a/docs/gallery/general/experimenter_termset.yaml b/docs/gallery/general/experimenter_termset.yaml new file mode 100644 index 000000000..78ed35e67 --- /dev/null +++ b/docs/gallery/general/experimenter_termset.yaml @@ -0,0 +1,17 @@ +id: termset/experimenter_example +name: Experimenter +version: 0.0.1 +prefixes: + ORC: https://orcid.org/ +imports: + - linkml:types +default_range: string + +enums: + Experimenters: + permissible_values: + Bilbo Baggins: + description: He who must not be named. + meaning: ORC:111 + + diff --git a/docs/gallery/general/nwb_gallery_config.yaml b/docs/gallery/general/nwb_gallery_config.yaml new file mode 100644 index 000000000..bf5cf1463 --- /dev/null +++ b/docs/gallery/general/nwb_gallery_config.yaml @@ -0,0 +1,10 @@ +namespaces: + core: + version: 2.7.0 + data_types: + Subject: + species: + termset: nwb_subject_termset.yaml + NWBFile: + experimenter: + termset: experimenter_termset.yaml diff --git a/docs/gallery/general/nwb_subject_termset.yaml b/docs/gallery/general/nwb_subject_termset.yaml new file mode 100644 index 000000000..e952c6776 --- /dev/null +++ b/docs/gallery/general/nwb_subject_termset.yaml @@ -0,0 +1,27 @@ +id: termset/species_example +name: Species +version: 0.0.1 +prefixes: + NCBI_TAXON: https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=Info&id= +imports: + - linkml:types +default_range: string + +enums: + Species: + permissible_values: + Homo sapiens: + description: the species is human + meaning: NCBI_TAXON:9606 + Mus musculus: + description: the species is a house mouse + meaning: NCBI_TAXON:10090 + Ursus arctos horribilis: + description: the species is a grizzly bear + meaning: NCBI_TAXON:116960 + Myrmecophaga tridactyla: + description: the species is an anteater + meaning: NCBI_TAXON:71006 + Ailuropoda melanoleuca: + description: the species is a panda + meaning: NCBI_TAXON:9646 diff --git a/docs/gallery/general/plot_configurator.py b/docs/gallery/general/plot_configurator.py new file mode 100644 index 000000000..52a2a6326 --- /dev/null +++ b/docs/gallery/general/plot_configurator.py @@ -0,0 +1,113 @@ +""" +How to Configure Term Validations +================================= + +This is a user guide for how to curate and take advantage of configuration files in +order to more easily validate terms within datasets or attributes. + +Introduction +------------- +Users will create a configuration YAML file that outlines the fields (within a neurodata type) +they want to be validated against a set of allowed terms. +After creating the configuration file, users will need to load the +configuration file with the :py:func:`~pynwb.load_type_config` method. +With the configuration loaded, every instance of the neurodata +types defined in the configuration file will have the respective fields wrapped with a +:py:class:`~hdmf.term_set.TermSetWrapper`. +This automatic wrapping is what provides the term validation for the field value. +For greater control on which datasets and attributes are validated +against which sets of allowed terms, use the +:py:class:`~hdmf.term_set.TermSetWrapper` on individual datasets and attributes instead. +You can follow the +`TermSet tutorial in the HDMF documentation +`_ +for more information. + +To unload a configuration, simply call :py:func:`~pynwb.unload_type_config`. +We also provide a helper method to see the configuration that has been loaded: +:py:func:`~pynwb.get_loaded_type_config` + + +How to make a Configuration File +-------------------------------- +To see an example of a configuration file, please refer to +``_. +The configuration file uses YAML syntax. The +user will construct a series of nested dictionaries to encompass all the necessary information. + +1. The user needs to define all the relevant namespaces. Recall that each neurodata type exists within + a namespace, whether that is the core namespace in PyNWB or a namespace in an extension. As namespaces grow, + we also require a version to be recorded in the configuration file to ensure proper functionality. +2. Within a namespace dictionary, the user will have a list of data types they want to configure. +3. Each data type will have a list of fields associated with a :py:class:`~hdmf.term_set.TermSet`. + The user can use the same or unique TermSet instances for each field. +""" +try: + import linkml_runtime # noqa: F401 +except ImportError as e: + raise ImportError("Please install linkml-runtime to run this example: pip install linkml-runtime") from e + +from dateutil import tz +from datetime import datetime +from uuid import uuid4 +import os + +from pynwb import NWBFile, get_loaded_type_config, load_type_config, unload_type_config +from pynwb.file import Subject + +# How to use a Configuration file +# ------------------------------- +# As mentioned prior, the first step after creating a configuration file is +# to load the file. In this configuration file, we have defined two fields +# we want to always be validated: ``experimenter`` and ``species``. Each of these +# are from a different neurodata type, :py:class:`~pynwb.file.NWBFile` and +# :py:class:`~pynwb.file.Subject` respectively, and each +# have a unique associated :py:class:`~hdmf.term_set.TermSet`. +# It is important to remember that with the configuration loaded, the fields +# are wrapped automatically, meaning the user should proceed with creating +# the instances normally, i.e., without wrapping directly. Once instantiated, +# the value of the fields are wrapped and then validated to see if it is a +# permissible value in their respective :py:class:`~hdmf.term_set.TermSet`. + +dir_path = os.path.dirname(os.path.abspath("__file__")) +yaml_file = os.path.join(dir_path, 'nwb_gallery_config.yaml') +load_type_config(config_path=yaml_file) + +session_start_time = datetime(2018, 4, 25, hour=2, minute=30, second=3, tzinfo=tz.gettz("US/Pacific")) + +nwbfile = NWBFile( + session_description="Mouse exploring an open field", # required + identifier=str(uuid4()), # required + session_start_time=session_start_time, # required + session_id="session_1234", # optional + experimenter=[ + "Bilbo Baggins", + ], # optional + lab="Bag End Laboratory", # optional + institution="University of My Institution", # optional + experiment_description="I went on an adventure to reclaim vast treasures.", # optional + related_publications="DOI:10.1016/j.neuron.2016.12.011", # optional +) + +subject = Subject( + subject_id="001", + age="P90D", + description="mouse 5", + species="Mus musculus", + sex="M", +) + +nwbfile.subject = subject + +#################################### +# How to see the Configuration file +# --------------------------------- +# Call :py:class:`~pynwb.get_loaded_type_config` to get a dictionary containing the +# current configuration. +config = get_loaded_type_config() + +###################################### +# How to unload the Configuration file +# ------------------------------------ +# Call :py:class:`~pynwb.unload_type_config` to toggle off the automatic validation. +unload_type_config() diff --git a/requirements-opt.txt b/requirements-opt.txt new file mode 100644 index 000000000..3badc79c7 --- /dev/null +++ b/requirements-opt.txt @@ -0,0 +1,3 @@ +linkml-runtime==1.7.4; python_version >= "3.9" +schemasheets==0.2.1; python_version >= "3.9" +oaklib==0.5.32; python_version >= "3.9" diff --git a/src/pynwb/__init__.py b/src/pynwb/__init__.py index ada39764e..50db92dcc 100644 --- a/src/pynwb/__init__.py +++ b/src/pynwb/__init__.py @@ -12,6 +12,10 @@ from hdmf.backends.hdf5 import HDF5IO as _HDF5IO from hdmf.build import BuildManager, TypeMap import hdmf.common +from hdmf.common import load_type_config as hdmf_load_type_config +from hdmf.common import get_loaded_type_config as hdmf_get_loaded_type_config +from hdmf.common import unload_type_config as hdmf_unload_type_config + CORE_NAMESPACE = 'core' @@ -19,6 +23,33 @@ from .validate import validate # noqa: F401, E402 +@docval({'name': 'config_path', 'type': str, 'doc': 'Path to the configuration file.'}, + {'name': 'type_map', 'type': TypeMap, 'doc': 'The TypeMap.', 'default': None}, + is_method=False) +def load_type_config(**kwargs): + """ + This method will either load the default config or the config provided by the path. + """ + config_path = kwargs['config_path'] + type_map = kwargs['type_map'] or get_type_map() + + hdmf_load_type_config(config_path=config_path, type_map=type_map) + +@docval({'name': 'type_map', 'type': TypeMap, 'doc': 'The TypeMap.', 'default': None}, + is_method=False) +def get_loaded_type_config(**kwargs): + type_map = kwargs['type_map'] or get_type_map() + return hdmf_get_loaded_type_config(type_map=type_map) + +@docval({'name': 'type_map', 'type': TypeMap, 'doc': 'The TypeMap.', 'default': None}, + is_method=False) +def unload_type_config(**kwargs): + """ + Remove validation. + """ + type_map = kwargs['type_map'] or get_type_map() + hdmf_unload_type_config(type_map=type_map) + def __get_resources(): try: from importlib.resources import files diff --git a/src/pynwb/config/nwb_config.yaml b/src/pynwb/config/nwb_config.yaml new file mode 100644 index 000000000..8b1a31a02 --- /dev/null +++ b/src/pynwb/config/nwb_config.yaml @@ -0,0 +1,7 @@ +namespaces: + core: + version: 2.7.0 + data_types: + Subject: + species: + termset: nwb_subject_termset.yaml diff --git a/src/pynwb/config/nwb_subject_termset.yaml b/src/pynwb/config/nwb_subject_termset.yaml new file mode 100644 index 000000000..e952c6776 --- /dev/null +++ b/src/pynwb/config/nwb_subject_termset.yaml @@ -0,0 +1,27 @@ +id: termset/species_example +name: Species +version: 0.0.1 +prefixes: + NCBI_TAXON: https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=Info&id= +imports: + - linkml:types +default_range: string + +enums: + Species: + permissible_values: + Homo sapiens: + description: the species is human + meaning: NCBI_TAXON:9606 + Mus musculus: + description: the species is a house mouse + meaning: NCBI_TAXON:10090 + Ursus arctos horribilis: + description: the species is a grizzly bear + meaning: NCBI_TAXON:116960 + Myrmecophaga tridactyla: + description: the species is an anteater + meaning: NCBI_TAXON:71006 + Ailuropoda melanoleuca: + description: the species is a panda + meaning: NCBI_TAXON:9646 diff --git a/src/pynwb/core.py b/src/pynwb/core.py index b54f3e147..f9ae2bd2f 100644 --- a/src/pynwb/core.py +++ b/src/pynwb/core.py @@ -10,6 +10,7 @@ from hdmf.utils import LabelledDict # noqa: F401 from . import CORE_NAMESPACE, register_class +from pynwb import get_type_map def _not_parent(arg): @@ -46,6 +47,18 @@ def _error_on_new_warn_on_construct(self, error_msg: str): raise ValueError(error_msg) warn(error_msg) + def _get_type_map(self): + return get_type_map() + + @property + def data_type(self): + """ + Return the spec data type associated with this container, i.e., the neurodata_type. + """ + # we need this function here to use the correct _data_type_attr. + _type = getattr(self, self._data_type_attr) + return _type + @register_class('NWBContainer', CORE_NAMESPACE) class NWBContainer(NWBMixin, Container): diff --git a/test.py b/test.py index dec0966d8..5bddb7c7d 100644 --- a/test.py +++ b/test.py @@ -136,6 +136,11 @@ def __run_example_tests_helper(examples_scripts): ws.append(w) for w in ws: warnings.showwarning(w.message, w.category, w.filename, w.lineno, w.line) + except (ImportError, ValueError, ModuleNotFoundError) as e: + if "linkml" in str(e): + pass # this is OK because linkml is not always installed + else: + raise e except Exception: print(traceback.format_exc()) FAILURES += 1 diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py new file mode 100644 index 000000000..925d57879 --- /dev/null +++ b/tests/unit/test_config.py @@ -0,0 +1,60 @@ +from dateutil import tz +from datetime import datetime +from uuid import uuid4 +import os + +from hdmf.term_set import TermSetWrapper + +from pynwb import NWBFile, get_loaded_type_config, load_type_config, unload_type_config +from pynwb.file import Subject +from pynwb.testing import TestCase + +try: + from linkml_runtime.utils.schemaview import SchemaView # noqa: F401 + REQUIREMENTS_INSTALLED = True +except ImportError: + REQUIREMENTS_INSTALLED = False + + +class TestPyNWBTypeConfig(TestCase): + def setUp(self): + if not REQUIREMENTS_INSTALLED: + self.skipTest("optional LinkML module is not installed") + CUR_DIR = os.path.dirname(os.path.realpath(__file__)) + path_to_config = os.path.join(CUR_DIR, 'test_config/test_nwb_config.yaml') + load_type_config(config_path=path_to_config) + + def tearDown(self): + unload_type_config() + + def test_get_loaded_type_config(self): + config = get_loaded_type_config() + self.assertEqual(config, + {'namespaces': {'core': {'version': '2.7.0-alpha', + 'data_types': {'Subject': {'species': {'termset': 'nwb_subject_termset.yaml'}}}}}}) + + def test_default_config(self): + session_start_time = datetime(2018, 4, 25, 2, 30, 3, tzinfo=tz.gettz("US/Pacific")) + nwbfile = NWBFile( + session_description="Mouse exploring an open field", # required + identifier=str(uuid4()), # required + session_start_time=session_start_time, # required + session_id="session_1234", # optional + experimenter=[ + "Ryan Ly", + ], # optional + lab="Bag End Laboratory", # optional + institution="University of My Institution", # optional + experiment_description="I went on an adventure to reclaim vast treasures.", # optional + related_publications="DOI:10.1016/j.neuron.2016.12.011", # optional + ) + subject = Subject( + subject_id="01", + age="One shouldn't ask", + description="A human.", + species="Homo sapiens", + sex="M", + ) + nwbfile.subject = subject + + self.assertIsInstance(nwbfile.subject.species, TermSetWrapper) diff --git a/tests/unit/test_config/nwb_subject_termset.yaml b/tests/unit/test_config/nwb_subject_termset.yaml new file mode 100644 index 000000000..e952c6776 --- /dev/null +++ b/tests/unit/test_config/nwb_subject_termset.yaml @@ -0,0 +1,27 @@ +id: termset/species_example +name: Species +version: 0.0.1 +prefixes: + NCBI_TAXON: https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=Info&id= +imports: + - linkml:types +default_range: string + +enums: + Species: + permissible_values: + Homo sapiens: + description: the species is human + meaning: NCBI_TAXON:9606 + Mus musculus: + description: the species is a house mouse + meaning: NCBI_TAXON:10090 + Ursus arctos horribilis: + description: the species is a grizzly bear + meaning: NCBI_TAXON:116960 + Myrmecophaga tridactyla: + description: the species is an anteater + meaning: NCBI_TAXON:71006 + Ailuropoda melanoleuca: + description: the species is a panda + meaning: NCBI_TAXON:9646 diff --git a/tests/unit/test_config/test_nwb_config.yaml b/tests/unit/test_config/test_nwb_config.yaml new file mode 100644 index 000000000..f7b5b8368 --- /dev/null +++ b/tests/unit/test_config/test_nwb_config.yaml @@ -0,0 +1,7 @@ +namespaces: + core: + version: 2.7.0-alpha + data_types: + Subject: + species: + termset: nwb_subject_termset.yaml diff --git a/tests/unit/test_core.py b/tests/unit/test_core.py index b278bc5ad..e2a060d20 100644 --- a/tests/unit/test_core.py +++ b/tests/unit/test_core.py @@ -35,6 +35,11 @@ def test_nwbfields(self): self.assertEqual(obj.prop1, "test1") self.assertEqual(obj.prop2, "test2") + def test_get_data_type(self): + obj = NWBContainer("obj1") + dt = obj.data_type + self.assertEqual(dt, 'NWBContainer') + class MyNWBData(NWBData): diff --git a/tox.ini b/tox.ini index 8aecbb544..7920b6cd7 100644 --- a/tox.ini +++ b/tox.ini @@ -32,6 +32,14 @@ deps = -rrequirements-dev.txt commands = {[testenv]commands} +# Test with python 3.11; optional and dev reqs (some features are not compatible with 3.12 yet) +[testenv:py311-optional] +basepython = python3.11 +deps = + -rrequirements-dev.txt + -rrequirements-opt.txt +commands = {[testenv]commands} + # Test with python 3.12; pinned dev reqs; upgraded, pre-release run reqs [testenv:py312-prerelease] basepython = python3.12 From 05756f937041452242fa0b60db3daaa34c4e25d5 Mon Sep 17 00:00:00 2001 From: Ryan Ly Date: Tue, 28 May 2024 16:28:08 -0700 Subject: [PATCH 06/16] Tell codespell to ignore "assertIn" (#1909) --- CHANGELOG.md | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c461c6d7f..e59ced055 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -327,7 +327,7 @@ ### Tutorial enhancements: - Added new tutorial for intracellular electrophysiology to describe the use of the new metadata tables - and declared the previous tutoral using ``SweepTable`` as deprecated. @oruebel (#1349) + and declared the previous tutorial using ``SweepTable`` as deprecated. @oruebel (#1349) - Added new tutorial for querying intracellular electrophysiology metadata (``docs/gallery/domain/plot_icephys_pandas.py``). @oruebel (#1349, #1383) - Added thumbnails for tutorials to improve presentation of online docs. @oruebel (#1349) diff --git a/pyproject.toml b/pyproject.toml index 92e921999..77d33e352 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,7 +72,7 @@ exclude = [ [tool.codespell] skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs,.tox,nwb-schema,./docs/_build/*,*.ipynb" -ignore-words-list = "optin,potatos" +ignore-words-list = "optin,potatos,assertin" [tool.coverage.run] branch = true From a7d585e0a8cfa5fa6c2724f137beb5fc10aad7f9 Mon Sep 17 00:00:00 2001 From: Ryan Ly Date: Tue, 28 May 2024 17:55:03 -0700 Subject: [PATCH 07/16] Revert "Do not auto-set timezone, allow date (#1886)" (#1908) Co-authored-by: Steph Prince <40640337+stephprince@users.noreply.github.com> --- CHANGELOG.md | 5 +- docs/gallery/advanced_io/h5dataio.py | 6 +- docs/gallery/advanced_io/linking_data.py | 7 +- docs/gallery/advanced_io/parallelio.py | 2 +- .../advanced_io/plot_iterative_write.py | 8 +- docs/gallery/domain/images.py | 21 ++- docs/gallery/general/add_remove_containers.py | 4 +- docs/gallery/general/extensions.py | 19 +-- docs/gallery/general/object_id.py | 8 +- docs/gallery/general/plot_file.py | 2 +- docs/gallery/general/plot_timeintervals.py | 7 +- docs/gallery/general/scratch.py | 12 +- src/pynwb/file.py | 41 +++-- src/pynwb/io/file.py | 32 +--- src/pynwb/testing/mock/file.py | 3 +- src/pynwb/testing/testh5io.py | 7 +- tests/integration/hdf5/test_base.py | 5 +- tests/integration/hdf5/test_io.py | 4 +- .../integration/hdf5/test_modular_storage.py | 3 +- tests/integration/hdf5/test_nwbfile.py | 141 +++--------------- tests/unit/test_epoch.py | 3 +- tests/unit/test_extension.py | 5 +- tests/unit/test_file.py | 75 +++------- tests/unit/test_icephys.py | 13 +- tests/unit/test_scratch.py | 3 +- 25 files changed, 174 insertions(+), 262 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e59ced055..34665c932 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,12 +1,15 @@ # PyNWB Changelog -## PyNWB 2.8.0 (Upcoming) +## PyNWB 2.8.0 (May 28, 2024) ### Enhancements and minor changes - Set rate default value inside `mock_ElectricalSeries` to avoid having to set `rate=None` explicitly when passing timestamps. @h-mayorquin [#1894](https://github.com/NeurodataWithoutBorders/pynwb/pull/1894) - Integrate validation through the `TypeConfigurator`. @mavaylon1 [#1829](https://github.com/NeurodataWithoutBorders/pynwb/pull/1829) - Exposed `aws_region` to `NWBHDF5IO`. @rly [#1903](https://github.com/NeurodataWithoutBorders/pynwb/pull/1903) +### Bug fixes +- Revert changes in PyNWB 2.7.0 that allow datetimes without a timezone and without a time while issues with DANDI upload are resolved. @rly [#1908](https://github.com/NeurodataWithoutBorders/pynwb/pull/1908) + ## PyNWB 2.7.0 (May 2, 2024) ### Enhancements and minor changes diff --git a/docs/gallery/advanced_io/h5dataio.py b/docs/gallery/advanced_io/h5dataio.py index 5b5f73bc2..3b4391655 100644 --- a/docs/gallery/advanced_io/h5dataio.py +++ b/docs/gallery/advanced_io/h5dataio.py @@ -19,9 +19,12 @@ # from datetime import datetime + +from dateutil.tz import tzlocal + from pynwb import NWBFile -start_time = datetime(2017, 4, 3, hour=11, minute=0) +start_time = datetime(2017, 4, 3, 11, tzinfo=tzlocal()) nwbfile = NWBFile( session_description="demonstrate advanced HDF5 I/O features", @@ -29,6 +32,7 @@ session_start_time=start_time, ) + #################### # Normally if we create a :py:class:`~pynwb.base.TimeSeries` we would do diff --git a/docs/gallery/advanced_io/linking_data.py b/docs/gallery/advanced_io/linking_data.py index 93f93c825..2f79d1488 100644 --- a/docs/gallery/advanced_io/linking_data.py +++ b/docs/gallery/advanced_io/linking_data.py @@ -51,12 +51,15 @@ # sphinx_gallery_thumbnail_path = 'figures/gallery_thumbnails_linking_data.png' from datetime import datetime +from uuid import uuid4 + import numpy as np +from dateutil.tz import tzlocal + from pynwb import NWBHDF5IO, NWBFile, TimeSeries -from uuid import uuid4 # Create the base data -start_time = datetime(2017, 4, 3, hour=11, minute=0) +start_time = datetime(2017, 4, 3, 11, tzinfo=tzlocal()) data = np.arange(1000).reshape((100, 10)) timestamps = np.arange(100) filename1 = "external1_example.nwb" diff --git a/docs/gallery/advanced_io/parallelio.py b/docs/gallery/advanced_io/parallelio.py index f04ef4a87..53abdf239 100644 --- a/docs/gallery/advanced_io/parallelio.py +++ b/docs/gallery/advanced_io/parallelio.py @@ -32,7 +32,7 @@ # from datetime import datetime # from hdmf.backends.hdf5.h5_utils import H5DataIO # -# start_time = datetime(2018, 4, 25, hour=2, minute=30, second=3) +# start_time = datetime(2018, 4, 25, 2, 30, 3, tzinfo=tz.gettz("US/Pacific")) # fname = "test_parallel_pynwb.nwb" # rank = MPI.COMM_WORLD.rank # The process ID (integer 0-3 for 4-process run) # diff --git a/docs/gallery/advanced_io/plot_iterative_write.py b/docs/gallery/advanced_io/plot_iterative_write.py index 36b8bc0be..958981a0b 100644 --- a/docs/gallery/advanced_io/plot_iterative_write.py +++ b/docs/gallery/advanced_io/plot_iterative_write.py @@ -110,9 +110,13 @@ # sphinx_gallery_thumbnail_path = 'figures/gallery_thumbnails_iterative_write.png' from datetime import datetime -from pynwb import NWBHDF5IO, NWBFile, TimeSeries from uuid import uuid4 +from dateutil.tz import tzlocal + +from pynwb import NWBHDF5IO, NWBFile, TimeSeries + + def write_test_file(filename, data, close_io=True): """ @@ -125,7 +129,7 @@ def write_test_file(filename, data, close_io=True): """ # Create a test NWBfile - start_time = datetime(2017, 4, 3, hour=11, minute=30) + start_time = datetime(2017, 4, 3, 11, tzinfo=tzlocal()) nwbfile = NWBFile( session_description="demonstrate iterative write", identifier=str(uuid4()), diff --git a/docs/gallery/domain/images.py b/docs/gallery/domain/images.py index b4511e3c5..d6eef24b3 100644 --- a/docs/gallery/domain/images.py +++ b/docs/gallery/domain/images.py @@ -19,18 +19,23 @@ The following examples will reference variables that may not be defined within the block they are used in. For clarity, we define them here: """ +# Define file paths used in the tutorial + +import os # sphinx_gallery_thumbnail_path = 'figures/gallery_thumbnails_image_data.png' from datetime import datetime +from uuid import uuid4 + import numpy as np -import os +from dateutil import tz +from dateutil.tz import tzlocal from PIL import Image + from pynwb import NWBHDF5IO, NWBFile from pynwb.base import Images from pynwb.image import GrayscaleImage, ImageSeries, OpticalSeries, RGBAImage, RGBImage -from uuid import uuid4 -# Define file paths used in the tutorial nwbfile_path = os.path.abspath("images_tutorial.nwb") moviefiles_path = [ os.path.abspath("image/file_1.tiff"), @@ -45,12 +50,12 @@ # Create an :py:class:`~pynwb.file.NWBFile` object with the required fields # (``session_description``, ``identifier``, ``session_start_time``) and additional metadata. -session_start_time = datetime(2018, 4, 25, hour=2, minute=30) +session_start_time = datetime(2018, 4, 25, 2, 30, 3, tzinfo=tz.gettz("US/Pacific")) nwbfile = NWBFile( session_description="my first synthetic recording", identifier=str(uuid4()), - session_start_time=session_start_time, + session_start_time=datetime.now(tzlocal()), experimenter=[ "Baggins, Bilbo", ], @@ -133,13 +138,13 @@ # ^^^^^^^^^^^^^^ # # External files (e.g. video files of the behaving animal) can be added to the :py:class:`~pynwb.file.NWBFile` -# by creating an :py:class:`~pynwb.image.ImageSeries` object using the +# by creating an :py:class:`~pynwb.image.ImageSeries` object using the # :py:attr:`~pynwb.image.ImageSeries.external_file` attribute that specifies # the path to the external file(s) on disk. # The file(s) path must be relative to the path of the NWB file. # Either ``external_file`` or ``data`` must be specified, but not both. # -# If the sampling rate is constant, use :py:attr:`~pynwb.base.TimeSeries.rate` and +# If the sampling rate is constant, use :py:attr:`~pynwb.base.TimeSeries.rate` and # :py:attr:`~pynwb.base.TimeSeries.starting_time` to specify time. # For irregularly sampled recordings, use :py:attr:`~pynwb.base.TimeSeries.timestamps` to specify time for each sample # image. @@ -147,7 +152,7 @@ # Each external image may contain one or more consecutive frames of the full :py:class:`~pynwb.image.ImageSeries`. # The :py:attr:`~pynwb.image.ImageSeries.starting_frame` attribute serves as an index to indicate which frame # each file contains. -# For example, if the ``external_file`` dataset has three paths to files and the first and the second file have 2 +# For example, if the ``external_file`` dataset has three paths to files and the first and the second file have 2 # frames, and the third file has 3 frames, then this attribute will have values `[0, 2, 4]`. external_file = [ diff --git a/docs/gallery/general/add_remove_containers.py b/docs/gallery/general/add_remove_containers.py index fcb74e72a..86aa373b2 100644 --- a/docs/gallery/general/add_remove_containers.py +++ b/docs/gallery/general/add_remove_containers.py @@ -33,7 +33,7 @@ nwbfile = NWBFile( session_description="demonstrate adding to an NWB file", identifier="NWB123", - session_start_time=datetime.datetime.now(), + session_start_time=datetime.datetime.now(datetime.timezone.utc), ) filename = "nwbfile.nwb" @@ -91,7 +91,7 @@ nwbfile = NWBFile( session_description="demonstrate export of an NWB file", identifier="NWB123", - session_start_time=datetime.datetime.now(), + session_start_time=datetime.datetime.now(datetime.timezone.utc), ) data1 = list(range(100, 200, 10)) timestamps1 = np.arange(10, dtype=float) diff --git a/docs/gallery/general/extensions.py b/docs/gallery/general/extensions.py index 7e232f168..ddf9159c7 100644 --- a/docs/gallery/general/extensions.py +++ b/docs/gallery/general/extensions.py @@ -164,15 +164,16 @@ def __init__(self, **kwargs): # To demonstrate this, first we will make some simulated data using our extensions. from datetime import datetime + +from dateutil.tz import tzlocal + from pynwb import NWBFile -from uuid import uuid4 -session_start_time = datetime(2017, 4, 3, hour=11, minute=0) +start_time = datetime(2017, 4, 3, 11, tzinfo=tzlocal()) +create_date = datetime(2017, 4, 15, 12, tzinfo=tzlocal()) nwbfile = NWBFile( - session_description="demonstrate caching", - identifier=str(uuid4()), - session_start_time=session_start_time, + "demonstrate caching", "NWB456", start_time, file_create_date=create_date ) device = nwbfile.create_device(name="trodes_rig123") @@ -332,6 +333,9 @@ class PotatoSack(MultiContainerInterface): # Then use the objects (again, this would often be done in a different file). from datetime import datetime + +from dateutil.tz import tzlocal + from pynwb import NWBHDF5IO, NWBFile # You can add potatoes to a potato sack in different ways @@ -339,11 +343,8 @@ class PotatoSack(MultiContainerInterface): potato_sack.add_potato(Potato("potato2", 3.0, 4.0)) potato_sack.create_potato("big_potato", 10.0, 20.0) -session_start_time = datetime(2017, 4, 3, hour=12, minute=0) nwbfile = NWBFile( - session_description="a file with metadata", - identifier=str(uuid4()), - session_start_time = session_start_time, + "a file with metadata", "NB123A", datetime(2018, 6, 1, tzinfo=tzlocal()) ) pmod = nwbfile.create_processing_module("module_name", "desc") diff --git a/docs/gallery/general/object_id.py b/docs/gallery/general/object_id.py index 25f125805..a4de45625 100644 --- a/docs/gallery/general/object_id.py +++ b/docs/gallery/general/object_id.py @@ -16,14 +16,16 @@ """ -# sphinx_gallery_thumbnail_path = 'figures/gallery_thumbnails_objectid.png' - from datetime import datetime + import numpy as np +from dateutil.tz import tzlocal + +# sphinx_gallery_thumbnail_path = 'figures/gallery_thumbnails_objectid.png' from pynwb import NWBFile, TimeSeries # set up the NWBFile -start_time = datetime(2019, 4, 3, hour=11, minute=0) +start_time = datetime(2019, 4, 3, 11, tzinfo=tzlocal()) nwbfile = NWBFile( session_description="demonstrate NWB object IDs", identifier="NWB456", diff --git a/docs/gallery/general/plot_file.py b/docs/gallery/general/plot_file.py index 5bfa837ee..5c59abf8d 100644 --- a/docs/gallery/general/plot_file.py +++ b/docs/gallery/general/plot_file.py @@ -165,7 +165,7 @@ # Use keyword arguments when constructing :py:class:`~pynwb.file.NWBFile` objects. # -session_start_time = datetime(2018, 4, 25, hour=2, minute=30, second=3, tzinfo=tz.gettz("US/Pacific")) +session_start_time = datetime(2018, 4, 25, 2, 30, 3, tzinfo=tz.gettz("US/Pacific")) nwbfile = NWBFile( session_description="Mouse exploring an open field", # required diff --git a/docs/gallery/general/plot_timeintervals.py b/docs/gallery/general/plot_timeintervals.py index 3a4ad306a..4069fd4a4 100644 --- a/docs/gallery/general/plot_timeintervals.py +++ b/docs/gallery/general/plot_timeintervals.py @@ -36,15 +36,18 @@ # sphinx_gallery_thumbnail_path = 'figures/gallery_thumbnails_timeintervals.png' from datetime import datetime +from uuid import uuid4 + import numpy as np +from dateutil.tz import tzlocal + from pynwb import NWBFile, TimeSeries -from uuid import uuid4 # create the NWBFile nwbfile = NWBFile( session_description="my first synthetic recording", # required identifier=str(uuid4()), # required - session_start_time=datetime(2017, 4, 3, hour=11), # required + session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal()), # required experimenter="Baggins, Bilbo", # optional lab="Bag End Laboratory", # optional institution="University of Middle Earth at the Shire", # optional diff --git a/docs/gallery/general/scratch.py b/docs/gallery/general/scratch.py index 9083e4081..0e00c5e96 100644 --- a/docs/gallery/general/scratch.py +++ b/docs/gallery/general/scratch.py @@ -27,20 +27,24 @@ # To demonstrate linking and scratch space, lets assume we are starting with some acquired data. # -# sphinx_gallery_thumbnail_path = 'figures/gallery_thumbnails_scratch.png' - from datetime import datetime + import numpy as np +from dateutil.tz import tzlocal + +# sphinx_gallery_thumbnail_path = 'figures/gallery_thumbnails_scratch.png' from pynwb import NWBHDF5IO, NWBFile, TimeSeries # set up the NWBFile -start_time = datetime(2019, 4, 3, hour=11, minute=0) +start_time = datetime(2019, 4, 3, 11, tzinfo=tzlocal()) +create_date = datetime(2019, 4, 15, 12, tzinfo=tzlocal()) nwb = NWBFile( session_description="demonstrate NWBFile scratch", # required identifier="NWB456", # required session_start_time=start_time, # required -) + file_create_date=create_date, +) # optional # make some fake data timestamps = np.linspace(0, 100, 1024) diff --git a/src/pynwb/file.py b/src/pynwb/file.py index 06b7fbe07..0b294e873 100644 --- a/src/pynwb/file.py +++ b/src/pynwb/file.py @@ -1,4 +1,4 @@ -from datetime import datetime, date, timedelta +from datetime import datetime, timedelta from dateutil.tz import tzlocal from collections.abc import Iterable from warnings import warn @@ -104,8 +104,8 @@ class Subject(NWBContainer): 'doc': ('The weight of the subject, including units. Using kilograms is recommended. e.g., "0.02 kg". ' 'If a float is provided, then the weight will be stored as "[value] kg".'), 'default': None}, - {'name': 'date_of_birth', 'type': (datetime, date), 'default': None, - 'doc': 'The date of birth, which may include time and timezone. May be supplied instead of age.'}, + {'name': 'date_of_birth', 'type': datetime, 'default': None, + 'doc': 'The datetime of the date of birth. May be supplied instead of age.'}, {'name': 'strain', 'type': str, 'doc': 'The strain of the subject, e.g., "C57BL/6J"', 'default': None}, ) def __init__(self, **kwargs): @@ -141,6 +141,10 @@ def __init__(self, **kwargs): if isinstance(args_to_set["age"], timedelta): args_to_set["age"] = pd.Timedelta(args_to_set["age"]).isoformat() + date_of_birth = args_to_set['date_of_birth'] + if date_of_birth and date_of_birth.tzinfo is None: + args_to_set['date_of_birth'] = _add_missing_timezone(date_of_birth) + for key, val in args_to_set.items(): setattr(self, key, val) @@ -304,11 +308,10 @@ class NWBFile(MultiContainerInterface, HERDManager): @docval({'name': 'session_description', 'type': str, 'doc': 'a description of the session where this data was generated'}, {'name': 'identifier', 'type': str, 'doc': 'a unique text identifier for the file'}, - {'name': 'session_start_time', 'type': (datetime, date), - 'doc': 'the start date and time of the recording session'}, - {'name': 'file_create_date', 'type': ('array_data', datetime, date), + {'name': 'session_start_time', 'type': datetime, 'doc': 'the start date and time of the recording session'}, + {'name': 'file_create_date', 'type': ('array_data', datetime), 'doc': 'the date and time the file was created and subsequent modifications made', 'default': None}, - {'name': 'timestamps_reference_time', 'type': (datetime, date), + {'name': 'timestamps_reference_time', 'type': datetime, 'doc': 'date and time corresponding to time zero of all timestamps; defaults to value ' 'of session_start_time', 'default': None}, {'name': 'experimenter', 'type': (tuple, list, str), @@ -463,18 +466,26 @@ def __init__(self, **kwargs): kwargs['name'] = 'root' super().__init__(**kwargs) + # add timezone to session_start_time if missing + session_start_time = args_to_set['session_start_time'] + if session_start_time.tzinfo is None: + args_to_set['session_start_time'] = _add_missing_timezone(session_start_time) + # set timestamps_reference_time to session_start_time if not provided + # if provided, ensure that it has a timezone timestamps_reference_time = args_to_set['timestamps_reference_time'] if timestamps_reference_time is None: args_to_set['timestamps_reference_time'] = args_to_set['session_start_time'] + elif timestamps_reference_time.tzinfo is None: + raise ValueError("'timestamps_reference_time' must be a timezone-aware datetime object.") # convert file_create_date to list and add timezone if missing file_create_date = args_to_set['file_create_date'] if file_create_date is None: file_create_date = datetime.now(tzlocal()) - if isinstance(file_create_date, (datetime, date)): + if isinstance(file_create_date, datetime): file_create_date = [file_create_date] - args_to_set['file_create_date'] = file_create_date + args_to_set['file_create_date'] = list(map(_add_missing_timezone, file_create_date)) # backwards-compatibility code for ic_electrodes / icephys_electrodes icephys_electrodes = args_to_set['icephys_electrodes'] @@ -1144,6 +1155,18 @@ def copy(self): return NWBFile(**kwargs) +def _add_missing_timezone(date): + """ + Add local timezone information on a datetime object if it is missing. + """ + if not isinstance(date, datetime): + raise ValueError("require datetime object") + if date.tzinfo is None: + warn("Date is missing timezone information. Updating to local timezone.", stacklevel=2) + return date.replace(tzinfo=tzlocal()) + return date + + def _tablefunc(table_name, description, columns): t = DynamicTable(name=table_name, description=description) for c in columns: diff --git a/src/pynwb/io/file.py b/src/pynwb/io/file.py index 15c2c1c06..1908c6b31 100644 --- a/src/pynwb/io/file.py +++ b/src/pynwb/io/file.py @@ -1,4 +1,4 @@ -import datetime +from dateutil.parser import parse as dateutil_parse from hdmf.build import ObjectMapper @@ -8,22 +8,6 @@ from .utils import get_nwb_version -def parse_datetime(datestr): - """Parse an ISO 8601 date string into a datetime object or a date object. - - If the date string does not contain a time component, then parse into a date object. - - :param datestr: str - :return: datetime.datetime or datetime.date - """ - if isinstance(datestr, bytes): - datestr = datestr.decode("utf-8") - dt = datetime.datetime.fromisoformat(datestr) - if "T" not in datestr: - dt = dt.date() - return dt - - @register_map(NWBFile) class NWBFileMap(ObjectMapper): @@ -173,19 +157,19 @@ def scratch(self, builder, manager): @ObjectMapper.constructor_arg('session_start_time') def dateconversion(self, builder, manager): datestr = builder.get('session_start_time').data - dt = parse_datetime(datestr) - return dt + date = dateutil_parse(datestr) + return date @ObjectMapper.constructor_arg('timestamps_reference_time') def dateconversion_trt(self, builder, manager): datestr = builder.get('timestamps_reference_time').data - dt = parse_datetime(datestr) - return dt + date = dateutil_parse(datestr) + return date @ObjectMapper.constructor_arg('file_create_date') def dateconversion_list(self, builder, manager): datestr = builder.get('file_create_date').data - dates = list(map(parse_datetime, datestr)) + dates = list(map(dateutil_parse, datestr)) return dates @ObjectMapper.constructor_arg('file_name') @@ -239,8 +223,8 @@ def dateconversion(self, builder, manager): return else: datestr = dob_builder.data - dt = parse_datetime(datestr) - return dt + date = dateutil_parse(datestr) + return date @ObjectMapper.constructor_arg("age__reference") def age_reference_none(self, builder, manager): diff --git a/src/pynwb/testing/mock/file.py b/src/pynwb/testing/mock/file.py index 50369e7e1..943f86dcb 100644 --- a/src/pynwb/testing/mock/file.py +++ b/src/pynwb/testing/mock/file.py @@ -1,6 +1,7 @@ from typing import Optional from uuid import uuid4 from datetime import datetime +from dateutil.tz import tzlocal from ...file import NWBFile, Subject from .utils import name_generator @@ -9,7 +10,7 @@ def mock_NWBFile( session_description: str = 'session_description', identifier: Optional[str] = None, - session_start_time: datetime = datetime(1970, 1, 1), + session_start_time: datetime = datetime(1970, 1, 1, tzinfo=tzlocal()), **kwargs ): return NWBFile( diff --git a/src/pynwb/testing/testh5io.py b/src/pynwb/testing/testh5io.py index 45ae8cebe..7234e79f5 100644 --- a/src/pynwb/testing/testh5io.py +++ b/src/pynwb/testing/testh5io.py @@ -1,4 +1,5 @@ from datetime import datetime +from dateutil.tz import tzlocal, tzutc import os from abc import ABCMeta, abstractmethod import warnings @@ -32,8 +33,8 @@ def getContainer(self, nwbfile): def setUp(self): self.container = self.setUpContainer() - self.start_time = datetime(1971, 1, 1, 12) - self.create_date = datetime(2018, 4, 15, 12) + self.start_time = datetime(1971, 1, 1, 12, tzinfo=tzutc()) + self.create_date = datetime(2018, 4, 15, 12, tzinfo=tzlocal()) self.container_type = self.container.__class__.__name__ self.filename = 'test_%s.nwb' % self.container_type self.export_filename = 'test_export_%s.nwb' % self.container_type @@ -225,7 +226,7 @@ def setUp(self): container_type = self.getContainerType().replace(" ", "_") session_description = 'A file to test writing and reading a %s' % container_type identifier = 'TEST_%s' % container_type - session_start_time = datetime(1971, 1, 1, 12) + session_start_time = datetime(1971, 1, 1, 12, tzinfo=tzutc()) self.nwbfile = NWBFile( session_description=session_description, identifier=identifier, diff --git a/tests/integration/hdf5/test_base.py b/tests/integration/hdf5/test_base.py index 75c346012..60f8510ff 100644 --- a/tests/integration/hdf5/test_base.py +++ b/tests/integration/hdf5/test_base.py @@ -1,5 +1,6 @@ import numpy as np from datetime import datetime +from dateutil.tz import tzlocal from pynwb import TimeSeries, NWBFile, NWBHDF5IO from pynwb.base import Images, Image, ImageReferences @@ -33,7 +34,7 @@ def test_timestamps_linking(self): tsa = TimeSeries(name='a', data=np.linspace(0, 1, 1000), timestamps=np.arange(1000.), unit='m') tsb = TimeSeries(name='b', data=np.linspace(0, 1, 1000), timestamps=tsa, unit='m') nwbfile = NWBFile(identifier='foo', - session_start_time=datetime(2017, 5, 1, 12, 0, 0), + session_start_time=datetime(2017, 5, 1, 12, 0, 0, tzinfo=tzlocal()), session_description='bar') nwbfile.add_acquisition(tsa) nwbfile.add_acquisition(tsb) @@ -51,7 +52,7 @@ def test_data_linking(self): tsb = TimeSeries(name='b', data=tsa, timestamps=np.arange(1000.), unit='m') tsc = TimeSeries(name='c', data=tsb, timestamps=np.arange(1000.), unit='m') nwbfile = NWBFile(identifier='foo', - session_start_time=datetime(2017, 5, 1, 12, 0, 0), + session_start_time=datetime(2017, 5, 1, 12, 0, 0, tzinfo=tzlocal()), session_description='bar') nwbfile.add_acquisition(tsa) nwbfile.add_acquisition(tsb) diff --git a/tests/integration/hdf5/test_io.py b/tests/integration/hdf5/test_io.py index da22a0651..d68334c89 100644 --- a/tests/integration/hdf5/test_io.py +++ b/tests/integration/hdf5/test_io.py @@ -245,7 +245,7 @@ class TestAppend(TestCase): def setUp(self): self.nwbfile = NWBFile(session_description='hi', identifier='hi', - session_start_time=datetime(1970, 1, 1, 12)) + session_start_time=datetime(1970, 1, 1, 12, tzinfo=tzutc())) self.path = "test_append.nwb" def tearDown(self): @@ -312,7 +312,7 @@ class TestH5DataIO(TestCase): def setUp(self): self.nwbfile = NWBFile(session_description='a', identifier='b', - session_start_time=datetime(1970, 1, 1, 12)) + session_start_time=datetime(1970, 1, 1, 12, tzinfo=tzutc())) self.path = "test_pynwb_io_hdf5_h5dataIO.h5" def tearDown(self): diff --git a/tests/integration/hdf5/test_modular_storage.py b/tests/integration/hdf5/test_modular_storage.py index 09c916bfb..fba5d02db 100644 --- a/tests/integration/hdf5/test_modular_storage.py +++ b/tests/integration/hdf5/test_modular_storage.py @@ -1,6 +1,7 @@ import os import gc from datetime import datetime +from dateutil.tz import tzutc import numpy as np from hdmf.backends.hdf5 import HDF5IO @@ -18,7 +19,7 @@ def setUp(self): self.link_filename = os.path.join(os.getcwd(), 'test_time_series_modular_link.nwb') # Make the data container file write - self.start_time = datetime(1971, 1, 1, 12) + self.start_time = datetime(1971, 1, 1, 12, tzinfo=tzutc()) self.data = np.arange(2000).reshape((1000, 2)) self.timestamps = np.linspace(0, 1, 1000) # The container before roundtrip diff --git a/tests/integration/hdf5/test_nwbfile.py b/tests/integration/hdf5/test_nwbfile.py index 641928598..e164ec649 100644 --- a/tests/integration/hdf5/test_nwbfile.py +++ b/tests/integration/hdf5/test_nwbfile.py @@ -1,4 +1,4 @@ -from datetime import datetime, date +from datetime import datetime from dateutil.tz import tzlocal, tzutc import pandas as pd import numpy as np @@ -21,12 +21,7 @@ def setUp(self): """ Set up an NWBFile object with an acquisition TimeSeries, analysis TimeSeries, and a processing module """ self.start_time = datetime(1970, 1, 1, 12, tzinfo=tzutc()) self.ref_time = datetime(1979, 1, 1, 0, tzinfo=tzutc()) - # try some dates with/without timezone and time - self.create_date = [ - datetime(2017, 5, 1, 12, tzinfo=tzlocal()), - datetime(2017, 5, 2, 13), - datetime(2017, 5, 2), - ] + self.create_date = datetime(2017, 4, 15, 12, tzinfo=tzlocal()) self.manager = get_manager() self.filename = 'test_nwbfileio.h5' self.nwbfile = NWBFile(session_description='a test NWB File', @@ -157,61 +152,16 @@ def getContainer(self, nwbfile): return nwbfile -class TestNWBFileNoTimezoneRoundTrip(TestNWBFileIO): - """ Test that an NWBFile with no timezone information can be written to and read from file """ - - def build_nwbfile(self): - description = 'test nwbfile no time' - identifier = 'TEST_no_time' - start_time = datetime(2024, 4, 10, 0, 21) - - self.container = NWBFile( - session_description=description, - identifier=identifier, - session_start_time=start_time - ) - - def roundtripContainer(self, cache_spec=False): - self.read_nwbfile = super().roundtripContainer(cache_spec=cache_spec) - self.assertEqual(self.read_nwbfile.session_start_time, self.container.session_start_time) - self.assertIsNone(self.read_nwbfile.session_start_time.tzinfo) - return self.read_nwbfile - - -class TestNWBFileNoTimeRoundTrip(TestNWBFileIO): - """ Test that an NWBFile with no time information can be written to and read from file """ - - def build_nwbfile(self): - description = 'test nwbfile no time' - identifier = 'TEST_no_time' - start_time = date(2024, 4, 9) - - self.container = NWBFile( - session_description=description, - identifier=identifier, - session_start_time=start_time - ) - - def roundtripContainer(self, cache_spec=False): - self.read_nwbfile = super().roundtripContainer(cache_spec=cache_spec) - self.assertEqual(self.read_nwbfile.session_start_time, self.container.session_start_time) - self.assertIsInstance(self.read_nwbfile.session_start_time, date) - self.assertNotIsInstance(self.read_nwbfile.session_start_time, datetime) - return self.read_nwbfile - - class TestExperimentersConstructorRoundtrip(TestNWBFileIO): """ Test that a list of multiple experimenters in a constructor is written to and read from file """ def build_nwbfile(self): description = 'test nwbfile experimenter' identifier = 'TEST_experimenter' - self.container = NWBFile( - session_description=description, - identifier=identifier, - session_start_time=self.start_time, - experimenter=('experimenter1', 'experimenter2') - ) + self.nwbfile = NWBFile(session_description=description, + identifier=identifier, + session_start_time=self.start_time, + experimenter=('experimenter1', 'experimenter2')) class TestExperimentersSetterRoundtrip(TestNWBFileIO): @@ -220,12 +170,10 @@ class TestExperimentersSetterRoundtrip(TestNWBFileIO): def build_nwbfile(self): description = 'test nwbfile experimenter' identifier = 'TEST_experimenter' - self.container = NWBFile( - session_description=description, - identifier=identifier, - session_start_time=self.start_time - ) - self.container.experimenter = ('experimenter1', 'experimenter2') + self.nwbfile = NWBFile(session_description=description, + identifier=identifier, + session_start_time=self.start_time) + self.nwbfile.experimenter = ('experimenter1', 'experimenter2') class TestPublicationsConstructorRoundtrip(TestNWBFileIO): @@ -234,12 +182,10 @@ class TestPublicationsConstructorRoundtrip(TestNWBFileIO): def build_nwbfile(self): description = 'test nwbfile publications' identifier = 'TEST_publications' - self.container = NWBFile( - session_description=description, - identifier=identifier, - session_start_time=self.start_time, - related_publications=('pub1', 'pub2') - ) + self.nwbfile = NWBFile(session_description=description, + identifier=identifier, + session_start_time=self.start_time, + related_publications=('pub1', 'pub2')) class TestPublicationsSetterRoundtrip(TestNWBFileIO): @@ -248,12 +194,10 @@ class TestPublicationsSetterRoundtrip(TestNWBFileIO): def build_nwbfile(self): description = 'test nwbfile publications' identifier = 'TEST_publications' - self.container = NWBFile( - session_description=description, - identifier=identifier, - session_start_time=self.start_time - ) - self.container.related_publications = ('pub1', 'pub2') + self.nwbfile = NWBFile(session_description=description, + identifier=identifier, + session_start_time=self.start_time) + self.nwbfile.related_publications = ('pub1', 'pub2') class TestSubjectIO(NWBH5IOMixin, TestCase): @@ -307,55 +251,6 @@ def getContainer(self, nwbfile): return nwbfile.subject -class TestSubjectDOBNoDateSetIO(NWBH5IOMixin, TestCase): - - def setUpContainer(self): - """ Return the test Subject """ - return Subject( - age="P90D", - description="An unfortunate rat", - genotype="WT", - sex="M", - species="Rattus norvegicus", - subject_id="RAT123", - weight="2 kg", - date_of_birth=date(2024, 4, 9), - strain="my_strain", - ) - - def addContainer(self, nwbfile): - """ Add the test Subject to the given NWBFile """ - nwbfile.subject = self.container - - def getContainer(self, nwbfile): - """ Return the test Subject from the given NWBFile """ - return nwbfile.subject - - -class TestSubjectMinimalSetIO(NWBH5IOMixin, TestCase): - - def setUpContainer(self): - """ Return the test Subject """ - return Subject( - age="P90D", - description="An unfortunate rat", - genotype="WT", - sex="M", - species="Rattus norvegicus", - subject_id="RAT123", - weight="2 kg", - strain="my_strain", - ) - - def addContainer(self, nwbfile): - """ Add the test Subject to the given NWBFile """ - nwbfile.subject = self.container - - def getContainer(self, nwbfile): - """ Return the test Subject from the given NWBFile """ - return nwbfile.subject - - class TestEmptySubjectIO(TestSubjectIO): def setUpContainer(self): diff --git a/tests/unit/test_epoch.py b/tests/unit/test_epoch.py index 4bf2637df..318ad3943 100644 --- a/tests/unit/test_epoch.py +++ b/tests/unit/test_epoch.py @@ -1,6 +1,7 @@ import numpy as np import pandas as pd from datetime import datetime +from dateutil import tz from pynwb.epoch import TimeIntervals from pynwb import TimeSeries, NWBFile @@ -66,7 +67,7 @@ def test_dataframe_roundtrip_drop_ts(self): self.assertEqual(obtained.loc[2, 'foo'], df.loc[2, 'foo']) def test_no_tags(self): - nwbfile = NWBFile("a file with header data", "NB123A", datetime(1970, 1, 1)) + nwbfile = NWBFile("a file with header data", "NB123A", datetime(1970, 1, 1, tzinfo=tz.tzutc())) df = self.get_dataframe() for i, row in df.iterrows(): nwbfile.add_epoch(start_time=row['start_time'], stop_time=row['stop_time']) diff --git a/tests/unit/test_extension.py b/tests/unit/test_extension.py index abbb6511a..7664bbf22 100644 --- a/tests/unit/test_extension.py +++ b/tests/unit/test_extension.py @@ -2,6 +2,7 @@ import random import string from datetime import datetime +from dateutil.tz import tzlocal from tempfile import gettempdir from hdmf.spec import RefSpec @@ -107,7 +108,7 @@ def __init__(self, **kwargs): super().__init__(**kwargs) self.test_attr = test_attr - nwbfile = NWBFile("a file with header data", "NB123A", datetime(2017, 5, 1, 12, 0, 0)) + nwbfile = NWBFile("a file with header data", "NB123A", datetime(2017, 5, 1, 12, 0, 0, tzinfo=tzlocal())) nwbfile.add_lab_meta_data(MyTestMetaData(name='test_name', test_attr=5.)) @@ -127,7 +128,7 @@ def test_lab_meta_auto(self): MyTestMetaData = get_class('MyTestMetaData', self.prefix) - nwbfile = NWBFile("a file with header data", "NB123A", datetime(2017, 5, 1, 12, 0, 0)) + nwbfile = NWBFile("a file with header data", "NB123A", datetime(2017, 5, 1, 12, 0, 0, tzinfo=tzlocal())) nwbfile.add_lab_meta_data(MyTestMetaData(name='test_name', test_attr=5.)) diff --git a/tests/unit/test_file.py b/tests/unit/test_file.py index fc44dfdd8..98446fa46 100644 --- a/tests/unit/test_file.py +++ b/tests/unit/test_file.py @@ -1,7 +1,7 @@ import numpy as np import pandas as pd -from datetime import datetime, date, timedelta +from datetime import datetime, timedelta from dateutil.tz import tzlocal, tzutc from hdmf.common import DynamicTable @@ -9,7 +9,7 @@ from hdmf.utils import docval, get_docval, popargs from pynwb import NWBFile, TimeSeries, NWBHDF5IO from pynwb.base import Image, Images -from pynwb.file import Subject, ElectrodeTable +from pynwb.file import Subject, ElectrodeTable, _add_missing_timezone from pynwb.epoch import TimeIntervals from pynwb.ecephys import ElectricalSeries from pynwb.testing import TestCase, remove_test_file @@ -19,10 +19,9 @@ class NWBFileTest(TestCase): def setUp(self): self.start = datetime(2017, 5, 1, 12, 0, 0, tzinfo=tzlocal()) self.ref_time = datetime(1979, 1, 1, 0, tzinfo=tzutc()) - # try some dates with/without timezone and time self.create = [datetime(2017, 5, 1, 12, tzinfo=tzlocal()), - datetime(2017, 5, 2, 13), - datetime(2017, 5, 2)] + datetime(2017, 5, 2, 13, 0, 0, 1, tzinfo=tzutc()), + datetime(2017, 5, 2, 14, tzinfo=tzutc())] self.path = 'nwbfile_test.h5' self.nwbfile = NWBFile(session_description='a test session description for a test NWBFile', identifier='FILE123', @@ -503,22 +502,6 @@ def test_multi_publications(self): related_publications=('pub1', 'pub2')) self.assertTupleEqual(self.nwbfile.related_publications, ('pub1', 'pub2')) - def test_session_start_time_no_timezone(self): - self.nwbfile = NWBFile( - session_description='a test session description for a test NWBFile', - identifier='FILE123', - session_start_time=datetime(2024, 4, 10, 0, 21), - ) - self.assertIsNone(self.nwbfile.session_start_time.tzinfo) - - def test_session_start_time_no_time(self): - self.nwbfile = NWBFile( - session_description='a test session description for a test NWBFile', - identifier='FILE123', - session_start_time=date(2024, 4, 10), - ) - self.assertEqual(self.nwbfile.session_start_time, date(2024, 4, 10)) - class SubjectTest(TestCase): def setUp(self): @@ -534,7 +517,7 @@ def setUp(self): date_of_birth=datetime(2017, 5, 1, 12, tzinfo=tzlocal()), strain='my_strain', ) - self.start = datetime(2017, 5, 1, 12) + self.start = datetime(2017, 5, 1, 12, tzinfo=tzlocal()) self.path = 'nwbfile_test.h5' self.nwbfile = NWBFile( 'a test session description for a test NWBFile', @@ -603,35 +586,6 @@ def test_subject_age_duration(self): self.assertEqual(subject.age, "P1DT3H46M39S") - def test_dob_no_timezone(self): - self.subject = Subject( - age='P90D', - age__reference="birth", - description='An unfortunate rat', - genotype='WT', - sex='M', - species='Rattus norvegicus', - subject_id='RAT123', - weight='2 kg', - date_of_birth=datetime(2024, 4, 10, 0, 21), - strain='my_strain', - ) - - def test_dob_no_time(self): - self.subject = Subject( - age='P90D', - age__reference="birth", - description='An unfortunate rat', - genotype='WT', - sex='M', - species='Rattus norvegicus', - subject_id='RAT123', - weight='2 kg', - date_of_birth=date(2024, 4, 10), - strain='my_strain', - ) - - class TestCacheSpec(TestCase): """Test whether the file can be written and read when caching the spec.""" @@ -689,3 +643,22 @@ def test_reftime_default(self): # 'timestamps_reference_time' should default to 'session_start_time' self.assertEqual(self.nwbfile.timestamps_reference_time, self.start_time) + +class TestTimestampsRefAware(TestCase): + def setUp(self): + self.start_time = datetime(2017, 5, 1, 12, 0, 0, tzinfo=tzlocal()) + self.ref_time_notz = datetime(1979, 1, 1, 0, 0, 0) + + def test_reftime_tzaware(self): + with self.assertRaises(ValueError): + # 'timestamps_reference_time' must be a timezone-aware datetime + NWBFile('test session description', + 'TEST124', + self.start_time, + timestamps_reference_time=self.ref_time_notz) + + +class TestTimezone(TestCase): + def test_raise_warning__add_missing_timezone(self): + with self.assertWarnsWith(UserWarning, "Date is missing timezone information. Updating to local timezone."): + _add_missing_timezone(datetime(2017, 5, 1, 12)) diff --git a/tests/unit/test_icephys.py b/tests/unit/test_icephys.py index 8ff1a67ba..e0e8332f9 100644 --- a/tests/unit/test_icephys.py +++ b/tests/unit/test_icephys.py @@ -14,6 +14,7 @@ from pynwb.testing import TestCase from pynwb.file import NWBFile # Needed to test icephys functionality defined on NWBFile from datetime import datetime +from dateutil.tz import tzlocal def GetElectrode(): @@ -45,7 +46,7 @@ def test_sweep_table_depractation_warn(self): _ = NWBFile( session_description='NWBFile icephys test', identifier='NWB123', # required - session_start_time=datetime(2017, 4, 3, 11), + session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal()), ic_electrodes=[self.icephys_electrode, ], sweep_table=SweepTable()) @@ -56,14 +57,14 @@ def test_ic_electrodes_parameter_deprecation(self): _ = NWBFile( session_description='NWBFile icephys test', identifier='NWB123', # required - session_start_time=datetime(2017, 4, 3, 11), + session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal()), ic_electrodes=[self.icephys_electrode, ]) def test_icephys_electrodes_parameter(self): nwbfile = NWBFile( session_description='NWBFile icephys test', identifier='NWB123', # required - session_start_time=datetime(2017, 4, 3, 11), + session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal()), icephys_electrodes=[self.icephys_electrode, ]) self.assertEqual(nwbfile.get_icephys_electrode('test_iS'), self.icephys_electrode) @@ -72,7 +73,7 @@ def test_add_ic_electrode_deprecation(self): nwbfile = NWBFile( session_description='NWBFile icephys test', identifier='NWB123', # required - session_start_time=datetime(2017, 4, 3, 11)) + session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal())) msg = "NWBFile.add_ic_electrode has been replaced by NWBFile.add_icephys_electrode." with self.assertWarnsWith(DeprecationWarning, msg): @@ -82,7 +83,7 @@ def test_ic_electrodes_attribute_deprecation(self): nwbfile = NWBFile( session_description='NWBFile icephys test', identifier='NWB123', # required - session_start_time=datetime(2017, 4, 3, 11), + session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal()), icephys_electrodes=[self.icephys_electrode, ]) # make sure NWBFile.ic_electrodes property warns @@ -99,7 +100,7 @@ def test_create_ic_electrode_deprecation(self): nwbfile = NWBFile( session_description='NWBFile icephys test', identifier='NWB123', # required - session_start_time=datetime(2017, 4, 3, 11)) + session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal())) device = Device(name='device_name') msg = "NWBFile.create_ic_electrode has been replaced by NWBFile.create_icephys_electrode." with self.assertWarnsWith(DeprecationWarning, msg): diff --git a/tests/unit/test_scratch.py b/tests/unit/test_scratch.py index c04a00e99..398ae2f78 100644 --- a/tests/unit/test_scratch.py +++ b/tests/unit/test_scratch.py @@ -1,4 +1,5 @@ from datetime import datetime +from dateutil.tz import tzlocal import numpy as np from numpy.testing import assert_array_equal import pandas as pd @@ -14,7 +15,7 @@ def setUp(self): self.nwbfile = NWBFile( session_description='a file to test writing and reading scratch data', identifier='TEST_scratch', - session_start_time=datetime(2017, 5, 1, 12, 0, 0) + session_start_time=datetime(2017, 5, 1, 12, 0, 0, tzinfo=tzlocal()) ) def test_constructor_list(self): From 04a6506a86aa829d20bf1ee32ad8e5221058ff9e Mon Sep 17 00:00:00 2001 From: Ryan Ly Date: Wed, 29 May 2024 07:43:28 -0700 Subject: [PATCH 08/16] Prepare for release of PyNWB 2.8.0 (#1910) --- .readthedocs.yaml | 1 + CHANGELOG.md | 2 +- docs/gallery/domain/ecephys.py | 2 +- docs/gallery/domain/ophys.py | 2 +- environment-ros3.yml | 6 +++--- requirements-dev.txt | 16 ++++++++-------- requirements-doc.txt | 1 + requirements.txt | 2 +- 8 files changed, 17 insertions(+), 15 deletions(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index a06d0280a..0bd5eba50 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -25,6 +25,7 @@ formats: all python: install: - requirements: requirements-doc.txt + - requirements: requirements-opt.txt - requirements: requirements.txt # Optionally include all submodules diff --git a/CHANGELOG.md b/CHANGELOG.md index 34665c932..60865ad30 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,7 @@ - Exposed `aws_region` to `NWBHDF5IO`. @rly [#1903](https://github.com/NeurodataWithoutBorders/pynwb/pull/1903) ### Bug fixes -- Revert changes in PyNWB 2.7.0 that allow datetimes without a timezone and without a time while issues with DANDI upload are resolved. @rly [#1908](https://github.com/NeurodataWithoutBorders/pynwb/pull/1908) +- Revert changes in PyNWB 2.7.0 that allow datetimes without a timezone and without a time while issues with DANDI upload of NWB files missing timezone are resolved. @rly [#1908](https://github.com/NeurodataWithoutBorders/pynwb/pull/1908) ## PyNWB 2.7.0 (May 2, 2024) diff --git a/docs/gallery/domain/ecephys.py b/docs/gallery/domain/ecephys.py index 406f2b789..208c11d24 100644 --- a/docs/gallery/domain/ecephys.py +++ b/docs/gallery/domain/ecephys.py @@ -16,7 +16,7 @@ It is recommended to cover :ref:`basics` before this tutorial. .. note:: It is recommended to check if your source data is supported by - `NeuroConv Extracellular Electrophysiology Gallery `_. + `NeuroConv Extracellular Electrophysiology Gallery `_. If it is supported, it is recommended to use NeuroConv to convert your data. The following examples will reference variables that may not be defined within the block they are used in. For diff --git a/docs/gallery/domain/ophys.py b/docs/gallery/domain/ophys.py index 277e408db..8057a7314 100644 --- a/docs/gallery/domain/ophys.py +++ b/docs/gallery/domain/ophys.py @@ -15,7 +15,7 @@ It is recommended to cover :ref:`basics` before this tutorial. .. note:: It is recommended to check if your source data is supported by - `NeuroConv Optical Physiology Gallery `_. + `NeuroConv Optical Physiology Gallery `_. If it is supported, it is recommended to use NeuroConv to convert your data. The following examples will reference variables that may not be defined within the block they are used in. For diff --git a/environment-ros3.yml b/environment-ros3.yml index 92e17b5dd..21dcc5a9c 100644 --- a/environment-ros3.yml +++ b/environment-ros3.yml @@ -9,8 +9,8 @@ dependencies: - hdmf==3.14.0 - matplotlib==3.8.0 - numpy==1.26.4 - - pandas==2.2.1 - - python-dateutil==2.8.2 + - pandas==2.2.2 + - python-dateutil==2.9.0 - setuptools - pytest==7.4.3 # This is for the upcoming pytest update - dandi==0.60.0 # NOTE: dandi does not support osx-arm64 @@ -19,4 +19,4 @@ dependencies: - aiohttp==3.9.3 - pip - pip: - - remfile==0.1.9 + - remfile==0.1.11 diff --git a/requirements-dev.txt b/requirements-dev.txt index 173090a57..37853354d 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,11 +2,11 @@ # compute coverage, and create test environments. note that depending on the version of python installed, different # versions of requirements may be installed due to package incompatibilities. # -black==24.3.0 -codespell==2.2.6 -coverage==7.3.2 -pytest==7.4.3 -isort==5.12.0 -pytest-cov==4.1.0 -tox==4.11.3 -ruff==0.1.3 +black==24.4.2 +codespell==2.3.0 +coverage==7.5.3 +pytest==8.2.1 +isort==5.13.2 +pytest-cov==5.0.0 +tox==4.15.0 +ruff==0.4.6 diff --git a/requirements-doc.txt b/requirements-doc.txt index c37aee646..90633ce88 100644 --- a/requirements-doc.txt +++ b/requirements-doc.txt @@ -13,3 +13,4 @@ lxml # used by dataframe_image when using the matplotlib backend hdf5plugin dandi>=0.46.6 hdmf-zarr +zarr<2.18.0 # limited until hdmf-zarr 0.8.0 is released to resolve issues with zarr>=2.18.0 diff --git a/requirements.txt b/requirements.txt index c1fd347ff..5b3c49ded 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,5 +2,5 @@ h5py==3.11.0 hdmf==3.14.0 numpy==1.26.4 -pandas==2.2.1 +pandas==2.2.2 python-dateutil==2.9.0.post0 From 194ce9c16068d6bd8e63067494eaa013a8a4e4ee Mon Sep 17 00:00:00 2001 From: Ryan Ly Date: Wed, 12 Jun 2024 22:30:23 -0700 Subject: [PATCH 09/16] Restructure/simplify plot_file gallery (#1914) --- CHANGELOG.md | 6 + docs/gallery/general/plot_file.py | 260 ++++++++++-------------------- 2 files changed, 95 insertions(+), 171 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 60865ad30..8dc6cd988 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # PyNWB Changelog +## PyNWB 2.8.1 (Upcoming) + +### Documentation and tutorial enhancements +- Simplified the introduction to NWB tutorial. @rly [#1914](https://github.com/NeurodataWithoutBorders/pynwb/pull/1914) + + ## PyNWB 2.8.0 (May 28, 2024) ### Enhancements and minor changes diff --git a/docs/gallery/general/plot_file.py b/docs/gallery/general/plot_file.py index 5c59abf8d..ec8cf75a8 100644 --- a/docs/gallery/general/plot_file.py +++ b/docs/gallery/general/plot_file.py @@ -7,9 +7,7 @@ This example will focus on the basics of working with an :py:class:`~pynwb.file.NWBFile` object, including writing and reading of an NWB file, and giving you an introduction to the basic data types. Before we dive into code showing how to use an :py:class:`~pynwb.file.NWBFile`, we first provide -a brief overview of the basic concepts of NWB. If you are already familiar with the concepts of -:ref:`timeseries_overview` and :ref:`modules_overview`, then feel free to skip the :ref:`basics_background` -part and go directly to :ref:`basics_nwbfile`. +a brief overview of the basic concepts of NWB. .. _basics_background: @@ -20,8 +18,8 @@ represented by a separate NWB file. NWB files are represented in PyNWB by :py:class:`~pynwb.file.NWBFile` objects which provide functionality for creating and retrieving: - * :ref:`timeseries_overview` datasets, i.e., objects for storing time series data - * :ref:`modules_overview`, i.e., objects for storing and grouping analyses, and + * :ref:`timeseries_overview` datasets -- objects for storing time series data + * :ref:`modules_overview` -- objects for storing and grouping analyses, and * experiment metadata and other metadata related to data provenance. The following sections describe the :py:class:`~pynwb.base.TimeSeries` and :py:class:`~pynwb.base.ProcessingModule` @@ -58,17 +56,18 @@ * **Optical physiology and imaging:** :py:class:`~pynwb.image.ImageSeries` is the base type for image recordings and is further refined by the :py:class:`~pynwb.image.ImageMaskSeries`, - :py:class:`~pynwb.image.OpticalSeries`, and + :py:class:`~pynwb.image.OpticalSeries`, + :py:class:`~pynwb.ophys.OnePhotonSeries`, and :py:class:`~pynwb.ophys.TwoPhotonSeries` types. Other related time series types are: - :py:class:`~pynwb.image.IndexSeries` and + :py:class:`~pynwb.image.IndexSeries`, :py:class:`~pynwb.ophys.RoiResponseSeries`. - * **Others** :py:class:`~pynwb.ogen.OptogeneticSeries`, + * **Others:** :py:class:`~pynwb.ogen.OptogeneticSeries`, :py:class:`~pynwb.behavior.SpatialSeries`, :py:class:`~pynwb.misc.DecompositionSeries`, :py:class:`~pynwb.misc.AnnotationSeries`, - :py:class:`~pynwb.misc.AbstractFeatureSeries`, and + :py:class:`~pynwb.misc.AbstractFeatureSeries`, :py:class:`~pynwb.misc.IntervalSeries`. @@ -78,7 +77,7 @@ ^^^^^^^^^^^^^^^^^^ Processing modules are objects that group together common analyses done during processing of data. -Processing module objects are unique collections of analysis results. To standardize the storage of + To standardize the storage of common analyses, NWB provides the concept of an :py:class:`~pynwb.core.NWBDataInterface`, where the output of common analyses are represented as objects that extend the :py:class:`~pynwb.core.NWBDataInterface` class. In most cases, you will not need to interact with the :py:class:`~pynwb.core.NWBDataInterface` class directly. @@ -87,7 +86,7 @@ .. seealso:: - For your reference, NWB defines the following main analysis :py:class:`~pynwb.core.NWBDataInterface` subtypes: + For your reference, NWB defines the following main processing/analysis data types: * **Behavior:** :py:class:`~pynwb.behavior.BehavioralEpochs`, :py:class:`~pynwb.behavior.BehavioralEvents`, @@ -110,13 +109,7 @@ * **Others:** :py:class:`~pynwb.base.Images`. - * **TimeSeries:** Any :ref:`timeseries_overview` is also a subclass of :py:class:`~pynwb.core.NWBDataInterface` - and can be used anywhere :py:class:`~pynwb.core.NWBDataInterface` is allowed. - -.. note:: - - In addition to :py:class:`~pynwb.core.NWBContainer`, which functions as a common base type for Group objects, - :py:class:`~pynwb.core.NWBData` provides a common base for the specification of datasets in the NWB format. + * **TimeSeries:** Any :py:class:`~pynwb.base.TimeSeries` can be used to store processing/analysis data. NWB organizes data into different groups depending on the type of data. Groups can be thought of as folders within the file. Here are some of the groups within an :py:class:`~pynwb.file.NWBFile` and the types of @@ -142,7 +135,6 @@ from pynwb import NWBHDF5IO, NWBFile, TimeSeries from pynwb.behavior import Position, SpatialSeries -from pynwb.epoch import TimeIntervals from pynwb.file import Subject #################### @@ -178,7 +170,8 @@ lab="Bag End Laboratory", # optional institution="University of My Institution", # optional experiment_description="I went on an adventure to reclaim vast treasures.", # optional - related_publications="DOI:10.1016/j.neuron.2016.12.011", # optional + keywords=["behavior", "exploration", "wanderlust"], # optional + related_publications="doi:10.1016/j.neuron.2016.12.011", # optional ) nwbfile @@ -221,7 +214,7 @@ # * **sex**: Single letter abbreviation, e.g., ``"F"`` (female), ``"M"`` (male), ``"U"`` (unknown), and ``"O"`` (other) # # Add the subject information to the :py:class:`~pynwb.file.NWBFile` -# by setting the ``subject`` field to the new :py:class:`~pynwb.file.Subject` object. +# by setting the ``subject`` field to a new :py:class:`~pynwb.file.Subject` object. subject = Subject( subject_id="001", @@ -251,11 +244,12 @@ # :align: center # # For instance, we can store a :py:class:`~pynwb.base.TimeSeries` data where recording started -# ``0.0`` seconds after ``start_time`` and sampled every second: +# ``0.0`` seconds after ``start_time`` and sampled every second (1 Hz): -data = list(range(100, 200, 10)) +data = np.arange(100, 200, 10) time_series_with_rate = TimeSeries( name="test_timeseries", + description="an example time series", data=data, unit="m", starting_time=0.0, @@ -266,9 +260,10 @@ #################### # For irregularly sampled recordings, we need to provide the ``timestamps`` for the ``data``: -timestamps = list(range(10)) +timestamps = np.arange(10.) time_series_with_timestamps = TimeSeries( name="test_timeseries", + description="an example time series", data=data, unit="m", timestamps=timestamps, @@ -278,7 +273,7 @@ #################### # :py:class:`~pynwb.base.TimeSeries` objects can be added directly to :py:class:`~pynwb.file.NWBFile` using: # -# * :py:meth:`.NWBFile.add_acquisition` to add *acquisition* data (raw, acquired data that should never change), +# * :py:meth:`.NWBFile.add_acquisition` to add *acquisition* data (raw, acquired data that should never change), # * :py:meth:`.NWBFile.add_stimulus` to add *stimulus* data, or # * :py:meth:`.NWBFile.add_stimulus_template` to store *stimulus templates*. # @@ -408,108 +403,6 @@ nwbfile.processing["behavior"] -#################### -# .. _basic_writing: -# -# Writing an NWB file -# ------------------- -# -# NWB I/O is carried out using the :py:class:`~pynwb.NWBHDF5IO` class [#]_. This class is responsible -# for mapping an :py:class:`~pynwb.file.NWBFile` object into HDF5 according to the NWB schema. -# -# To write an :py:class:`~pynwb.file.NWBFile`, use the :py:meth:`~hdmf.backends.io.HDMFIO.write` method. - -io = NWBHDF5IO("basics_tutorial.nwb", mode="w") -io.write(nwbfile) -io.close() - -#################### -# You can also use :py:meth:`~pynwb.NWBHDF5IO` as a context manager: - -with NWBHDF5IO("basics_tutorial.nwb", "w") as io: - io.write(nwbfile) - -#################### -# .. _basic_reading: -# -# Reading an NWB file -# ------------------- -# -# As with writing, reading is also carried out using the :py:class:`~pynwb.NWBHDF5IO` class. -# To read the NWB file we just wrote, use another :py:class:`~pynwb.NWBHDF5IO` object, -# and use the :py:meth:`~pynwb.NWBHDF5IO.read` method to retrieve an -# :py:class:`~pynwb.file.NWBFile` object. -# -# Data arrays are read passively from the file. -# Accessing the ``data`` attribute of the :py:class:`~pynwb.base.TimeSeries` object -# does not read the data values, but presents an HDF5 object that can be indexed to read data. -# You can use the ``[:]`` operator to read the entire data array into memory. - -with NWBHDF5IO("basics_tutorial.nwb", "r") as io: - read_nwbfile = io.read() - print(read_nwbfile.acquisition["test_timeseries"]) - print(read_nwbfile.acquisition["test_timeseries"].data[:]) - -#################### -# It is often preferable to read only a portion of the data. -# To do this, index or slice into the ``data`` attribute just like you -# index or slice a numpy array. - -with NWBHDF5IO("basics_tutorial.nwb", "r") as io: - read_nwbfile = io.read() - print(read_nwbfile.acquisition["test_timeseries"].data[:2]) - -#################### -# .. note:: -# If you use :py:class:`~pynwb.NWBHDF5IO` as a context manager during read, -# be aware that the :py:class:`~pynwb.NWBHDF5IO` gets closed and when the -# context completes and the data will not be available outside of the -# context manager [#]_. - -#################### -# Accessing data -# ^^^^^^^^^^^^^^^ -# -# We can also access the :py:class:`~pynwb.behavior.SpatialSeries` data by referencing the names -# of the objects in the hierarchy that contain it. We can access a processing module by indexing -# ``nwbfile.processing`` with the name of the processing module, ``"behavior"``. -# -# Then, we can access the :py:class:`~pynwb.behavior.Position` object inside of the ``"behavior"`` -# processing module by indexing it with the name of the :py:class:`~pynwb.behavior.Position` object, -# ``"Position"``. -# -# Finally, we can access the :py:class:`~pynwb.behavior.SpatialSeries` object inside of the -# :py:class:`~pynwb.behavior.Position` object by indexing it with the name of the -# :py:class:`~pynwb.behavior.SpatialSeries` object, ``"SpatialSeries"``. - -with NWBHDF5IO("basics_tutorial.nwb", "r") as io: - read_nwbfile = io.read() - print(read_nwbfile.processing["behavior"]) - print(read_nwbfile.processing["behavior"]["Position"]) - print(read_nwbfile.processing["behavior"]["Position"]["SpatialSeries"]) - -#################### -# .. _reuse_timestamps: -# -# Reusing timestamps -# ------------------ -# -# When working with multi-modal data, it can be convenient and efficient to store timestamps once and associate multiple -# data with the single timestamps instance. PyNWB enables this by letting you reuse timestamps across -# :py:class:`~pynwb.base.TimeSeries` objects. To reuse a :py:class:`~pynwb.base.TimeSeries` timestamps in a new -# :py:class:`~pynwb.base.TimeSeries`, pass the existing :py:class:`~pynwb.base.TimeSeries` as the new -# :py:class:`~pynwb.base.TimeSeries`, pass the existing :py:class:`~pynwb.base.TimeSeries` as the new -# :py:class:`~pynwb.base.TimeSeries` timestamps: - -data = list(range(101, 201, 10)) -reuse_ts = TimeSeries( - name="reusing_timeseries", - data=data, - unit="SIunit", - timestamps=time_series_with_timestamps, -) - - #################### # Time Intervals # -------------- @@ -573,60 +466,83 @@ nwbfile.trials.to_dataframe() #################### -# .. _basic_epochs: +# .. _basic_writing: # -# Epochs -# ^^^^^^ +# Writing an NWB file +# ------------------- # -# Like trials, epochs can be added to an NWB file using the methods -# :py:meth:`.NWBFile.add_epoch_column` and :py:meth:`.NWBFile.add_epoch`. -# The third argument is one or more tags for labeling the epoch, and the fourth argument is a -# list of all the :py:class:`~pynwb.base.TimeSeries` that the epoch applies -# to. - -nwbfile.add_epoch( - start_time=2.0, - stop_time=4.0, - tags=["first", "example"], - timeseries=[time_series_with_timestamps], -) +# Writing of an NWB file is carried out using the :py:class:`~pynwb.NWBHDF5IO` class [#]_. +# +# To write an :py:class:`~pynwb.file.NWBFile`, use the :py:meth:`~hdmf.backends.io.HDMFIO.write` method. -nwbfile.add_epoch( - start_time=6.0, - stop_time=8.0, - tags=["second", "example"], - timeseries=[time_series_with_timestamps], -) +io = NWBHDF5IO("basics_tutorial.nwb", mode="w") +io.write(nwbfile) +io.close() + +#################### +# You can also use :py:meth:`~pynwb.NWBHDF5IO` as a context manager: -nwbfile.epochs.to_dataframe() +with NWBHDF5IO("basics_tutorial.nwb", "w") as io: + io.write(nwbfile) #################### -# Other time intervals -# ^^^^^^^^^^^^^^^^^^^^ -# These :py:class:`~pynwb.epoch.TimeIntervals` objects are stored in ``NWBFile.intervals``. In addition to the default -# ``epochs`` and ``trials``, you can also add your own with custom names. - -sleep_stages = TimeIntervals( - name="sleep_stages", - description="intervals for each sleep stage as determined by EEG", -) +# .. _basic_reading: +# +# Reading an NWB file +# ------------------- +# +# As with writing, reading is also carried out using the :py:class:`~pynwb.NWBHDF5IO` class. +# To read the NWB file we just wrote, create another :py:class:`~pynwb.NWBHDF5IO` object with the mode set to ``"r"``, +# and use the :py:meth:`~pynwb.NWBHDF5IO.read` method to retrieve an +# :py:class:`~pynwb.file.NWBFile` object. +# +# Data arrays are read passively from the file. +# Accessing the ``data`` attribute of the :py:class:`~pynwb.base.TimeSeries` object +# does not read the data values, but presents an HDF5 object that can be indexed to read data. +# You can use the ``[:]`` operator to read the entire data array into memory. -sleep_stages.add_column(name="stage", description="stage of sleep") -sleep_stages.add_column(name="confidence", description="confidence in stage (0-1)") +with NWBHDF5IO("basics_tutorial.nwb", "r") as io: + read_nwbfile = io.read() + print(read_nwbfile.acquisition["test_timeseries"]) + print(read_nwbfile.acquisition["test_timeseries"].data[:]) -sleep_stages.add_row(start_time=0.3, stop_time=0.5, stage=1, confidence=0.5) -sleep_stages.add_row(start_time=0.7, stop_time=0.9, stage=2, confidence=0.99) -sleep_stages.add_row(start_time=1.3, stop_time=3.0, stage=3, confidence=0.7) +#################### +# It is often preferable to read only a portion of the data. +# To do this, index or slice into the ``data`` attribute just like you +# index or slice a numpy array. -nwbfile.add_time_intervals(sleep_stages) +with NWBHDF5IO("basics_tutorial.nwb", "r") as io: + read_nwbfile = io.read() + print(read_nwbfile.acquisition["test_timeseries"].data[:2]) -sleep_stages.to_dataframe() +#################### +# .. note:: +# If you use :py:class:`~pynwb.NWBHDF5IO` as a context manager during read, +# be aware that the :py:class:`~pynwb.NWBHDF5IO` gets closed and when the +# context completes and the data will not be available outside of the +# context manager [#]_. #################### -# Now we overwrite the file with all of the data +# Accessing data +# ^^^^^^^^^^^^^^^ +# +# We can also access the :py:class:`~pynwb.behavior.SpatialSeries` data by referencing the names +# of the objects in the hierarchy that contain it. We can access a processing module by indexing +# ``nwbfile.processing`` with the name of the processing module, ``"behavior"``. +# +# Then, we can access the :py:class:`~pynwb.behavior.Position` object inside of the ``"behavior"`` +# processing module by indexing it with the name of the :py:class:`~pynwb.behavior.Position` object, +# ``"Position"``. +# +# Finally, we can access the :py:class:`~pynwb.behavior.SpatialSeries` object inside of the +# :py:class:`~pynwb.behavior.Position` object by indexing it with the name of the +# :py:class:`~pynwb.behavior.SpatialSeries` object, ``"SpatialSeries"``. -with NWBHDF5IO("basics_tutorial.nwb", "w") as io: - io.write(nwbfile) +with NWBHDF5IO("basics_tutorial.nwb", "r") as io: + read_nwbfile = io.read() + print(read_nwbfile.processing["behavior"]) + print(read_nwbfile.processing["behavior"]["Position"]) + print(read_nwbfile.processing["behavior"]["Position"]["SpatialSeries"]) #################### # .. _basic_appending: @@ -638,14 +554,16 @@ # After you have read the file, you can add [#]_ new data to it using the standard write/add functionality demonstrated # above. Let's see how this works by adding another :py:class:`~pynwb.base.TimeSeries` to acquisition. - io = NWBHDF5IO("basics_tutorial.nwb", mode="a") nwbfile = io.read() +data = np.arange(100, 200, 10) +timestamps = np.arange(10.) new_time_series = TimeSeries( name="new_time_series", - data=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10], - timestamps=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + description="a new time series", + data=data, + timestamps=timestamps, unit="n.a.", ) nwbfile.add_acquisition(new_time_series) From 90e60484415d38d3d566dc9fea63b6d99ffdbe43 Mon Sep 17 00:00:00 2001 From: Ryan Ly Date: Thu, 13 Jun 2024 09:18:18 -0700 Subject: [PATCH 10/16] Update ecephys and ophys tutorials (#1915) --- CHANGELOG.md | 1 + docs/gallery/domain/ecephys.py | 30 ++++++------ docs/gallery/domain/ophys.py | 76 +++++++++---------------------- docs/gallery/general/plot_file.py | 11 ++--- 4 files changed, 39 insertions(+), 79 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8dc6cd988..b02837174 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ ### Documentation and tutorial enhancements - Simplified the introduction to NWB tutorial. @rly [#1914](https://github.com/NeurodataWithoutBorders/pynwb/pull/1914) +- Simplified the ecephys and ophys tutorials. [#1915](https://github.com/NeurodataWithoutBorders/pynwb/pull/1915) ## PyNWB 2.8.0 (May 28, 2024) diff --git a/docs/gallery/domain/ecephys.py b/docs/gallery/domain/ecephys.py index 208c11d24..3f239eb77 100644 --- a/docs/gallery/domain/ecephys.py +++ b/docs/gallery/domain/ecephys.py @@ -39,7 +39,6 @@ # # When creating a NWB file, the first step is to create the :py:class:`~pynwb.file.NWBFile`. - nwbfile = NWBFile( session_description="my first synthetic recording", identifier=str(uuid4()), @@ -50,7 +49,8 @@ lab="Bag End Laboratory", institution="University of Middle Earth at the Shire", experiment_description="I went on an adventure to reclaim vast treasures.", - session_id="LONELYMTN001", + keywords=["ecephys", "exploration", "wanderlust"], + related_publications="doi:10.1016/j.neuron.2016.12.011", ) ####################### @@ -93,7 +93,6 @@ # additional user-specified metadata as custom columns of the table. We will be adding a ``"label"`` column to the # table. Use the following code to add electrodes for an array with 4 shanks and 3 channels per shank. - nwbfile.add_electrode_column(name="label", description="label of electrode") nshanks = 4 @@ -118,10 +117,9 @@ electrode_counter += 1 ####################### -# Similarly to the ``trials`` table, we can view the ``electrodes`` table in tabular form +# Similarly to other tables in PyNWB, we can view the ``electrodes`` table in tabular form # by converting it to a pandas :py:class:`~pandas.DataFrame`. - nwbfile.electrodes.to_dataframe() ####################### @@ -145,7 +143,6 @@ # convenience function that creates a :py:class:`~hdmf.common.table.DynamicTableRegion` which references the # ``"electrodes"`` table. - all_table_region = nwbfile.create_electrode_table_region( region=list(range(electrode_counter)), # reference row indices 0 to N-1 description="all electrodes", @@ -156,7 +153,7 @@ # ^^^^^^^^^^^^^^^^^ # # Now create an :py:class:`~pynwb.ecephys.ElectricalSeries` object to store raw data collected -# during the experiment, passing in this ``"all_table_region"`` :py:class:`~hdmf.common.table.DynamicTableRegion` +# during the experiment, passing in this ``all_table_region`` :py:class:`~hdmf.common.table.DynamicTableRegion` # reference to all rows of the electrodes table. # # .. only:: html @@ -174,10 +171,10 @@ # :align: center # - raw_data = np.random.randn(50, 12) raw_electrical_series = ElectricalSeries( name="ElectricalSeries", + description="Raw acquisition traces", data=raw_data, electrodes=all_table_region, starting_time=0.0, # timestamp of the first sample in seconds relative to the session start time @@ -188,7 +185,6 @@ # Since this :py:class:`~pynwb.ecephys.ElectricalSeries` represents raw data from the data acquisition system, # add it to the acquisition group of the :py:class:`~pynwb.file.NWBFile`. - nwbfile.add_acquisition(raw_electrical_series) #################### @@ -199,10 +195,10 @@ # again passing in the :py:class:`~hdmf.common.table.DynamicTableRegion` reference to all rows of the ``"electrodes"`` # table. - lfp_data = np.random.randn(50, 12) lfp_electrical_series = ElectricalSeries( name="ElectricalSeries", + description="LFP data", data=lfp_data, electrodes=all_table_region, starting_time=0.0, @@ -240,7 +236,6 @@ # This is analogous to how we can store the :py:class:`~pynwb.behavior.Position` object in a processing module # created with the method :py:meth:`.NWBFile.create_processing_module`. - ecephys_module = nwbfile.create_processing_module( name="ecephys", description="processed extracellular electrophysiology data" ) @@ -254,13 +249,16 @@ # # Spike times are stored in the :py:class:`~pynwb.misc.Units` table, which is a subclass of # :py:class:`~hdmf.common.table.DynamicTable`. Adding columns to the :py:class:`~pynwb.misc.Units` table is analogous -# to how we can add columns to the ``"electrodes"`` and ``"trials"`` tables. -# -# Generate some random spike data and populate the :py:class:`~pynwb.misc.Units` table using the -# method :py:meth:`.NWBFile.add_unit`. +# to how we can add columns to the ``"electrodes"`` and ``"trials"`` tables. Use the convenience method +# :py:meth:`.NWBFile.add_unit_column` to add a new column on the :py:class:`~pynwb.misc.Units` table for the +# sorting quality of the units. nwbfile.add_unit_column(name="quality", description="sorting quality") +#################### +# Generate some random spike data and populate the :py:class:`~pynwb.misc.Units` table using the +# method :py:meth:`.NWBFile.add_unit`. + firing_rate = 20 n_units = 10 res = 1000 @@ -272,7 +270,6 @@ ####################### # The :py:class:`~pynwb.misc.Units` table can also be converted to a pandas :py:class:`~pandas.DataFrame`. - nwbfile.units.to_dataframe() ####################### @@ -315,7 +312,6 @@ # Once you have finished adding all of your data to the :py:class:`~pynwb.file.NWBFile`, # write the file with :py:class:`~pynwb.NWBHDF5IO`. - with NWBHDF5IO("ecephys_tutorial.nwb", "w") as io: io.write(nwbfile) diff --git a/docs/gallery/domain/ophys.py b/docs/gallery/domain/ophys.py index 8057a7314..f8f6da98a 100644 --- a/docs/gallery/domain/ophys.py +++ b/docs/gallery/domain/ophys.py @@ -59,7 +59,8 @@ lab="Bag End Laboratory", institution="University of Middle Earth at the Shire", experiment_description="I went on an adventure to reclaim vast treasures.", - session_id="LONELYMTN001", + keywords=["ecephys", "exploration", "wanderlust"], + related_publications="doi:10.1016/j.neuron.2016.12.011", ) #################### @@ -87,7 +88,6 @@ # Create a :py:class:`~pynwb.device.Device` named ``"Microscope"`` in the :py:class:`~pynwb.file.NWBFile` object. Then # create an :py:class:`~pynwb.ophys.OpticalChannel` named ``"OpticalChannel"``. - device = nwbfile.create_device( name="Microscope", description="My two-photon microscope", @@ -123,39 +123,23 @@ # ----------------- # Now that we have our :py:class:`~pynwb.ophys.ImagingPlane`, we can create a # :py:class:`~pynwb.ophys.OnePhotonSeries` object to store raw one-photon imaging data. -# Here, we have two options. The first option is to supply the raw image data to PyNWB, -# using the data argument. The second option is to provide a path to the image files. -# These two options have trade-offs, so it is worth considering how you want to store -# this data. - -# using internal data. this data will be stored inside the NWB file -one_p_series1 = OnePhotonSeries( - name="OnePhotonSeries_internal", + +# the image data will be stored inside the NWB file +one_p_series = OnePhotonSeries( + name="OnePhotonSeries", + description="Raw 1p data", data=np.ones((1000, 100, 100)), imaging_plane=imaging_plane, rate=1.0, unit="normalized amplitude", ) -# using external data. only the file paths will be stored inside the NWB file -one_p_series2 = OnePhotonSeries( - name="OnePhotonSeries_external", - dimension=[100, 100], - external_file=["images.tiff"], - imaging_plane=imaging_plane, - starting_frame=[0], - format="external", - starting_time=0.0, - rate=1.0, -) - #################### # Since these one-photon data are acquired data, we will add the # :py:class:`~pynwb.ophys.OnePhotonSeries` objects to the :py:class:`~pynwb.file.NWBFile` # as acquired data. -nwbfile.add_acquisition(one_p_series1) -nwbfile.add_acquisition(one_p_series2) +nwbfile.add_acquisition(one_p_series) #################### # Two-photon Series @@ -178,29 +162,17 @@ # :align: center # -# using internal data. this data will be stored inside the NWB file -two_p_series1 = TwoPhotonSeries( - name="TwoPhotonSeries1", +# the image data will be stored inside the NWB file +two_p_series = TwoPhotonSeries( + name="TwoPhotonSeries", + description="Raw 2p data", data=np.ones((1000, 100, 100)), imaging_plane=imaging_plane, rate=1.0, unit="normalized amplitude", ) -# using external data. only the file paths will be stored inside the NWB file -two_p_series2 = TwoPhotonSeries( - name="TwoPhotonSeries2", - dimension=[100, 100], - external_file=["images.tiff"], - imaging_plane=imaging_plane, - starting_frame=[0], - format="external", - starting_time=0.0, - rate=1.0, -) - -nwbfile.add_acquisition(two_p_series1) -nwbfile.add_acquisition(two_p_series2) +nwbfile.add_acquisition(two_p_series) #################### # Motion Correction (optional) @@ -212,6 +184,7 @@ corrected = ImageSeries( name="corrected", # this must be named "corrected" + description="A motion corrected image stack", data=np.ones((1000, 100, 100)), unit="na", format="raw", @@ -221,6 +194,7 @@ xy_translation = TimeSeries( name="xy_translation", + description="x,y translation in pixels", data=np.ones((1000, 2)), unit="pixels", starting_time=0.0, @@ -229,7 +203,7 @@ corrected_image_stack = CorrectedImageStack( corrected=corrected, - original=one_p_series1, + original=one_p_series, xy_translation=xy_translation, ) @@ -240,7 +214,6 @@ # physiology data and add the motion correction data to the :py:class:`~pynwb.file.NWBFile`. # - ophys_module = nwbfile.create_processing_module( name="ophys", description="optical physiology processed data" ) @@ -295,14 +268,13 @@ # Then we will add the :py:class:`~pynwb.ophys.ImageSegmentation` object # to the previously created :py:class:`~pynwb.base.ProcessingModule`. - img_seg = ImageSegmentation() ps = img_seg.create_plane_segmentation( name="PlaneSegmentation", description="output from segmenting my favorite imaging plane", imaging_plane=imaging_plane, - reference_images=one_p_series1, # optional + reference_images=one_p_series, # optional ) ophys_module.add(img_seg) @@ -348,7 +320,7 @@ name="PlaneSegmentation2", description="output from segmenting my favorite imaging plane", imaging_plane=imaging_plane, - reference_images=one_p_series1, # optional + reference_images=one_p_series, # optional ) for _ in range(30): @@ -382,7 +354,7 @@ name="PlaneSegmentation3", description="output from segmenting my favorite imaging plane", imaging_plane=imaging_plane, - reference_images=one_p_series1, # optional + reference_images=one_p_series, # optional ) from itertools import product @@ -453,9 +425,9 @@ # Then we create a :py:class:`~pynwb.ophys.RoiResponseSeries` object to store fluorescence # data for those two ROIs. - roi_resp_series = RoiResponseSeries( name="RoiResponseSeries", + description="Fluorescence responses for two ROIs", data=np.ones((50, 2)), # 50 samples, 2 ROIs rois=rt_region, unit="lumens", @@ -484,7 +456,6 @@ # :alt: fluorescence UML diagram # :align: center - fl = Fluorescence(roi_response_series=roi_resp_series) ophys_module.add(fl) @@ -503,7 +474,6 @@ # :py:class:`~pynwb.file.NWBFile`, make sure to write the file. # IO operations are carried out using :py:class:`~pynwb.NWBHDF5IO`. - with NWBHDF5IO("ophys_tutorial.nwb", "w") as io: io.write(nwbfile) @@ -525,10 +495,9 @@ # with the name of the :py:class:`~pynwb.ophys.RoiResponseSeries` object, # which we named ``"RoiResponseSeries"``. - with NWBHDF5IO("ophys_tutorial.nwb", "r") as io: read_nwbfile = io.read() - print(read_nwbfile.acquisition["TwoPhotonSeries1"]) + print(read_nwbfile.acquisition["TwoPhotonSeries"]) print(read_nwbfile.processing["ophys"]) print(read_nwbfile.processing["ophys"]["Fluorescence"]) print(read_nwbfile.processing["ophys"]["Fluorescence"]["RoiResponseSeries"]) @@ -545,11 +514,10 @@ # Load and print all the data values of the :py:class:`~pynwb.ophys.RoiResponseSeries` # object representing the fluorescence data. - with NWBHDF5IO("ophys_tutorial.nwb", "r") as io: read_nwbfile = io.read() - print(read_nwbfile.acquisition["TwoPhotonSeries1"]) + print(read_nwbfile.acquisition["TwoPhotonSeries"]) print(read_nwbfile.processing["ophys"]["Fluorescence"]["RoiResponseSeries"].data[:]) #################### diff --git a/docs/gallery/general/plot_file.py b/docs/gallery/general/plot_file.py index ec8cf75a8..b410d4b69 100644 --- a/docs/gallery/general/plot_file.py +++ b/docs/gallery/general/plot_file.py @@ -76,13 +76,8 @@ Processing Modules ^^^^^^^^^^^^^^^^^^ -Processing modules are objects that group together common analyses done during processing of data. - To standardize the storage of -common analyses, NWB provides the concept of an :py:class:`~pynwb.core.NWBDataInterface`, where the output of -common analyses are represented as objects that extend the :py:class:`~pynwb.core.NWBDataInterface` class. -In most cases, you will not need to interact with the :py:class:`~pynwb.core.NWBDataInterface` class directly. -More commonly, you will be creating instances of classes that extend this class. - +Processing modules are objects that group together common analyses done during processing of data. They +often hold data of different processing/analysis data types. .. seealso:: @@ -168,7 +163,7 @@ "Baggins, Bilbo", ], # optional lab="Bag End Laboratory", # optional - institution="University of My Institution", # optional + institution="University of Middle Earth at the Shire", # optional experiment_description="I went on an adventure to reclaim vast treasures.", # optional keywords=["behavior", "exploration", "wanderlust"], # optional related_publications="doi:10.1016/j.neuron.2016.12.011", # optional From 9a461062e3e12cce1d733a3562a4c5314b470dc8 Mon Sep 17 00:00:00 2001 From: Rohan Shah Date: Wed, 26 Jun 2024 14:19:57 -0400 Subject: [PATCH 11/16] Fix `channel_conversion` use in `TimeSeries.get_data_in_units` (#1923) * fix channel_conversion in TimeSeries get_data_in_units * update CHANGELOG * simply loop range --- CHANGELOG.md | 3 +++ src/pynwb/base.py | 2 +- tests/unit/test_ecephys.py | 18 +++++++++++------- 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b02837174..cee64c308 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,9 @@ - Simplified the introduction to NWB tutorial. @rly [#1914](https://github.com/NeurodataWithoutBorders/pynwb/pull/1914) - Simplified the ecephys and ophys tutorials. [#1915](https://github.com/NeurodataWithoutBorders/pynwb/pull/1915) +### Bug fixes +- Fixed use of `channel_conversion` in `TimeSeries` `get_data_in_units`. @rohanshah [1923](https://github.com/NeurodataWithoutBorders/pynwb/pull/1923) + ## PyNWB 2.8.0 (May 28, 2024) diff --git a/src/pynwb/base.py b/src/pynwb/base.py index 98fb62372..8b4daf48f 100644 --- a/src/pynwb/base.py +++ b/src/pynwb/base.py @@ -357,7 +357,7 @@ def get_data_in_units(self): """ if "channel_conversion" in self.fields: - scale_factor = self.conversion * self.channel_conversion[:, np.newaxis] + scale_factor = self.conversion * self.channel_conversion else: scale_factor = self.conversion return np.asarray(self.data) * scale_factor + self.offset diff --git a/tests/unit/test_ecephys.py b/tests/unit/test_ecephys.py index f81b61f84..1ef0b7880 100644 --- a/tests/unit/test_ecephys.py +++ b/tests/unit/test_ecephys.py @@ -117,22 +117,26 @@ def test_dimensions_warning(self): ) in str(w[-1].message) def test_get_data_in_units(self): - - data = np.asarray([[1, 1, 1, 1, 1], [1, 1, 1, 1, 1]]) - conversion = 1.0 + samples = 100 + channels = 2 + conversion = 10.0 offset = 3.0 - channel_conversion = np.asarray([2.0, 2.0]) + channel_conversion = np.random.rand(channels) + electrical_series = mock_ElectricalSeries( - data=data, + data=np.ones((samples, channels)), conversion=conversion, offset=offset, channel_conversion=channel_conversion, ) data_in_units = electrical_series.get_data_in_units() - expected_data = data * conversion * channel_conversion[:, np.newaxis] + offset - np.testing.assert_almost_equal(data_in_units, expected_data) + for channel_index in range(channels): + np.testing.assert_almost_equal( + data_in_units[:, channel_index], + np.ones(samples) * conversion * channel_conversion[channel_index] + offset + ) class SpikeEventSeriesConstructor(TestCase): From 3c5fa481d0b910b2960b782928d04dccd745a913 Mon Sep 17 00:00:00 2001 From: Ryan Ly Date: Thu, 27 Jun 2024 21:01:36 -0400 Subject: [PATCH 12/16] Update workflows and ros3 env to use macos-latest (#1926) --- .github/workflows/run_all_tests.yml | 9 +++++---- .github/workflows/run_tests.yml | 1 + environment-ros3.yml | 15 ++++++++------- 3 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.github/workflows/run_all_tests.yml b/.github/workflows/run_all_tests.yml index 4e8ea2418..a1b1f75dd 100644 --- a/.github/workflows/run_all_tests.yml +++ b/.github/workflows/run_all_tests.yml @@ -38,8 +38,9 @@ jobs: - { name: windows-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: windows-latest } - { name: windows-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: windows-latest } - { name: windows-python3.12-prerelease, test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.11", os: windows-latest } + # minimum versions of dependencies do not have wheels or cannot be built on macos-arm64 - { name: macos-python3.8-minimum , test-tox-env: py38-minimum , build-tox-env: build-py38-minimum , python-ver: "3.8" , os: macos-13 } - - { name: macos-python3.9 , test-tox-env: py39 , build-tox-env: build-py39 , python-ver: "3.9" , os: macos-13 } + - { name: macos-python3.9 , test-tox-env: py39 , build-tox-env: build-py39 , python-ver: "3.9" , os: macos-latest } - { name: macos-python3.10 , test-tox-env: py310 , build-tox-env: build-py310 , python-ver: "3.10", os: macos-latest } - { name: macos-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: macos-latest } - { name: macos-python3.11-opt , test-tox-env: py311-optional , build-tox-env: build-py311 , python-ver: "3.11", os: macos-latest } @@ -98,6 +99,7 @@ jobs: - { name: windows-gallery-python3.8-minimum , test-tox-env: gallery-py38-minimum , python-ver: "3.8" , os: windows-latest } - { name: windows-gallery-python3.12-upgraded , test-tox-env: gallery-py312-upgraded , python-ver: "3.12", os: windows-latest } - { name: windows-gallery-python3.12-prerelease, test-tox-env: gallery-py312-prerelease, python-ver: "3.12", os: windows-latest } + # minimum versions of dependencies do not have wheels or cannot be built on macos-arm64 - { name: macos-gallery-python3.8-minimum , test-tox-env: gallery-py38-minimum , python-ver: "3.8" , os: macos-13 } - { name: macos-gallery-python3.12-upgraded , test-tox-env: gallery-py312-upgraded , python-ver: "3.12", os: macos-latest } - { name: macos-gallery-python3.12-prerelease , test-tox-env: gallery-py312-prerelease, python-ver: "3.12", os: macos-latest } @@ -201,7 +203,7 @@ jobs: include: - { name: conda-linux-python3.12-ros3 , python-ver: "3.12", os: ubuntu-latest } - { name: conda-windows-python3.12-ros3, python-ver: "3.12", os: windows-latest } - - { name: conda-macos-python3.12-ros3 , python-ver: "3.12", os: macos-13 } # This is due to DANDI not supporting osx-arm64. Will support macos-latest when this changes. + - { name: conda-macos-python3.12-ros3 , python-ver: "3.12", os: macos-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -248,7 +250,7 @@ jobs: include: - { name: conda-linux-gallery-python3.12-ros3 , python-ver: "3.12", os: ubuntu-latest } - { name: conda-windows-gallery-python3.12-ros3, python-ver: "3.12", os: windows-latest } - - { name: conda-macos-gallery-python3.12-ros3 , python-ver: "3.12", os: macos-13 } # This is due to DANDI not supporting osx-arm64. Will support macos-latest when this changes. + - { name: conda-macos-gallery-python3.12-ros3 , python-ver: "3.12", os: macos-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -273,7 +275,6 @@ jobs: - name: Install run dependencies run: | - pip install matplotlib pip install . pip list diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index b512b2de0..aa121acb4 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -24,6 +24,7 @@ jobs: - { name: linux-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: ubuntu-latest , upload-wheels: true } - { name: windows-python3.8-minimum , test-tox-env: py38-minimum , build-tox-env: build-py38-minimum , python-ver: "3.8" , os: windows-latest } - { name: windows-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: windows-latest } + # minimum versions of dependencies do not have wheels or cannot be built on macos-arm64 - { name: macos-python3.8-minimum , test-tox-env: py38-minimum , build-tox-env: build-py38-minimum , python-ver: "3.8" , os: macos-13 } steps: - name: Cancel non-latest runs diff --git a/environment-ros3.yml b/environment-ros3.yml index 21dcc5a9c..84031808f 100644 --- a/environment-ros3.yml +++ b/environment-ros3.yml @@ -6,17 +6,18 @@ channels: dependencies: - python==3.12 - h5py==3.11.0 - - hdmf==3.14.0 + - hdmf==3.14.1 - matplotlib==3.8.0 - numpy==1.26.4 - pandas==2.2.2 - python-dateutil==2.9.0 - setuptools - - pytest==7.4.3 # This is for the upcoming pytest update - - dandi==0.60.0 # NOTE: dandi does not support osx-arm64 - - fsspec==2024.2.0 - - requests==2.31.0 - - aiohttp==3.9.3 + - pytest==7.4.3 # pin to pytest < 8 because of incompatibilities to be addressed + - fsspec==2024.6.0 + - requests==2.32.3 + - aiohttp==3.9.5 - pip - pip: - - remfile==0.1.11 + - remfile==0.1.13 + - dandi==0.62.1 # NOTE: dandi is not available on conda for osx-arm64 + From c99c0c5b3bf731a6d845bfcd44f10ddc8f99c228 Mon Sep 17 00:00:00 2001 From: Ryan Ly Date: Thu, 27 Jun 2024 22:30:36 -0400 Subject: [PATCH 13/16] Add comments to io/file.py (#1925) * Add comments to io/file.py * Update changelog --- CHANGELOG.md | 1 + src/pynwb/io/file.py | 104 +++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 101 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cee64c308..790a54c27 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ ### Documentation and tutorial enhancements - Simplified the introduction to NWB tutorial. @rly [#1914](https://github.com/NeurodataWithoutBorders/pynwb/pull/1914) - Simplified the ecephys and ophys tutorials. [#1915](https://github.com/NeurodataWithoutBorders/pynwb/pull/1915) +- Add comments to `src/pynwb/io/file.py` to improve developer documentation. @rly [#1925](https://github.com/NeurodataWithoutBorders/pynwb/pull/1925) ### Bug fixes - Fixed use of `channel_conversion` in `TimeSeries` `get_data_in_units`. @rohanshah [1923](https://github.com/NeurodataWithoutBorders/pynwb/pull/1923) diff --git a/src/pynwb/io/file.py b/src/pynwb/io/file.py index 1908c6b31..90e8f36b7 100644 --- a/src/pynwb/io/file.py +++ b/src/pynwb/io/file.py @@ -127,6 +127,15 @@ def __init__(self, spec): @ObjectMapper.object_attr('scratch_datas') def scratch_datas(self, container, manager): + """Set the value for the 'scratch_datas' spec on NWBFile to a list of ScratchData objects. + + Used when writing (building) the NWBFile container to a file. + + The 'scratch' group can contain both groups and datasets. This mapping function + is used when writing the value for the 'scratch_datas' spec (ScratchData type + -- see __init__ above). The value is set to a list of all ScratchData + objects in the 'scratch' field of the NWBFile container. + """ scratch = container.scratch ret = list() for s in scratch.values(): @@ -136,6 +145,15 @@ def scratch_datas(self, container, manager): @ObjectMapper.object_attr('scratch_containers') def scratch_containers(self, container, manager): + """Set the value for the 'scratch_containers' spec on NWBFile to a list of non-ScratchData objects. + + Used when writing (building) the NWBFile container to a file. + + The 'scratch' group can contain both groups and datasets. This mapping function + is used when writing the value for the 'scratch_containers' spec (NWBContainers + and DynamicTable type -- see __init__ above). The value is set to a list of all non-ScratchData + objects in the 'scratch' field of the NWBFile container. + """ scratch = container.scratch ret = list() for s in scratch.values(): @@ -145,6 +163,14 @@ def scratch_containers(self, container, manager): @ObjectMapper.constructor_arg('scratch') def scratch(self, builder, manager): + """Set the constructor arg for 'scratch' to a tuple of objects. + + Used when constructing the NWBFile container from a written file. + + The 'scratch' group can contain both groups and datasets. This mapping function + is used to construct the contained groups and datasets and put them into a single + field 'scratch' on the NWBFile container for user convenience. + """ scratch = builder.get('scratch') ret = list() if scratch is not None: @@ -156,28 +182,54 @@ def scratch(self, builder, manager): @ObjectMapper.constructor_arg('session_start_time') def dateconversion(self, builder, manager): + """Set the constructor arg for 'session_start_time' to a datetime object. + + Used when constructing the NWBFile container from a written file. + + Dates are read into builders as strings and are parsed into datetime objects + for user convenience and consistency with how they are written. + """ datestr = builder.get('session_start_time').data date = dateutil_parse(datestr) return date @ObjectMapper.constructor_arg('timestamps_reference_time') def dateconversion_trt(self, builder, manager): + """Set the constructor arg for 'timestamps_reference_time' to a datetime object. + + Used when constructing the NWBFile container from a written file. + + Dates are read into builders as strings and are parsed into datetime objects + for user convenience and consistency with how they are written. + """ datestr = builder.get('timestamps_reference_time').data date = dateutil_parse(datestr) return date @ObjectMapper.constructor_arg('file_create_date') def dateconversion_list(self, builder, manager): + """Set the constructor arg for 'file_create_date' to a datetime object. + + Used when constructing the NWBFile container from a written file. + + Dates are read into builders as strings and are parsed into datetime objects + for user convenience and consistency with how they are written. + """ datestr = builder.get('file_create_date').data dates = list(map(dateutil_parse, datestr)) return dates - @ObjectMapper.constructor_arg('file_name') - def name(self, builder, manager): - return builder.name - @ObjectMapper.constructor_arg('experimenter') def experimenter_carg(self, builder, manager): + """Set the constructor arg for 'experimenter' to a tuple if the builder value is a string. + + Used when constructing the NWBFile container from a written file. + + In early versions of the NWB 2 schema, 'experimenter' was specified as a string. + Then it was changed to be a 1-D array of strings. This mapping function is necessary + to allow reading of both data where 'experimenter' was specified as a string and data + where 'experimenter' was specified as an array. + """ ret = None exp_bldr = builder['general'].get('experimenter') if exp_bldr is not None: @@ -189,6 +241,14 @@ def experimenter_carg(self, builder, manager): @ObjectMapper.object_attr('experimenter') def experimenter_obj_attr(self, container, manager): + """Change the value for the field 'experimenter' on NWBFile to a tuple if it is a string. + + Used when writing (building) the NWBFile container to a file. + + In early versions of the NWB 2 schema, 'experimenter' was specified as a string. + Then it was changed to be a 1-D array of strings. This mapping function is necessary + for writing a valid 'experimenter' array if it is a string in the NWBFile container. + """ ret = None if isinstance(container.experimenter, str): ret = (container.experimenter,) @@ -196,6 +256,15 @@ def experimenter_obj_attr(self, container, manager): @ObjectMapper.constructor_arg('related_publications') def publications_carg(self, builder, manager): + """Set the constructor arg for 'related_publications' to a tuple if the builder value is a string. + + Used when constructing the NWBFile container from a written file. + + In early versions of the NWB 2 schema, 'related_publications' was specified as a string. + Then it was changed to be a 1-D array of strings. This mapping function is necessary + to allow reading of both data where 'related_publications' was specified as a string and data + where 'related_publications' was specified as an array. + """ ret = None pubs_bldr = builder['general'].get('related_publications') if pubs_bldr is not None: @@ -207,6 +276,14 @@ def publications_carg(self, builder, manager): @ObjectMapper.object_attr('related_publications') def publication_obj_attr(self, container, manager): + """Change the value for the field 'related_publications' on NWBFile to a tuple if it is a string. + + Used when writing (building) the NWBFile container to a file. + + In early versions of the NWB 2 schema, 'related_publications' was specified as a string. + Then it was changed to be a 1-D array of strings. This mapping function is necessary + for writing a valid 'related_publications' array if it is a string in the NWBFile container. + """ ret = None if isinstance(container.related_publications, str): ret = (container.related_publications,) @@ -218,6 +295,13 @@ class SubjectMap(ObjectMapper): @ObjectMapper.constructor_arg('date_of_birth') def dateconversion(self, builder, manager): + """Set the constructor arg for 'date_of_birth' to a datetime object. + + Used when constructing the Subject container from a written file. + + Dates are read into builders as strings and are parsed into datetime objects + for user convenience and consistency with how they are written. + """ dob_builder = builder.get('date_of_birth') if dob_builder is None: return @@ -228,6 +312,18 @@ def dateconversion(self, builder, manager): @ObjectMapper.constructor_arg("age__reference") def age_reference_none(self, builder, manager): + """Set the constructor arg for 'age__reference' to "unspecified" for NWB files < 2.6, else "birth". + + Used when constructing the Subject container from a written file. + + NWB schema 2.6.0 introduced a new optional attribute 'reference' on the 'age' dataset with a default + value of "birth". When data written with NWB versions < 2.6 are read, 'age__reference' is set to + "unspecified" in the Subject constructor. "unspecified" is a special non-None placeholder value + that is handled specially in Subject.__init__ to distinguish it from no value being provided by the + user. When data written with NWB versions >= 2.6 are read, 'age__reference' is set to the default + value, "birth", in the Subject constructor (this is not strictly necessary because Subject.__init__ + has default value "birth" for 'age__reference'). + """ age_builder = builder.get("age") age_reference = None if age_builder is not None: From d3ac4b91d703e61381c5b0ec1bf40ff91537fc9f Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Wed, 3 Jul 2024 14:16:24 -0700 Subject: [PATCH 14/16] prepare release 2.8.1 (#1927) * Update CHANGELOG.md * Update pyproject.toml * update readme and requirements-doc.txt * fix yaml file path for configurator tutorial * update path to yaml in configuration tutorial * update path to yaml in configuration tutorial * update working dir for gallery tests * update channel number to stop transpose warning * update yaml file path in configuration tutorial * fix text section in configuration tutorial --- CHANGELOG.md | 2 +- README.rst | 6 +++--- docs/gallery/general/plot_configurator.py | 6 +++++- pyproject.toml | 3 ++- requirements-doc.txt | 5 ++++- tests/unit/test_ecephys.py | 2 +- 6 files changed, 16 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 790a54c27..f8750ffcd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # PyNWB Changelog -## PyNWB 2.8.1 (Upcoming) +## PyNWB 2.8.1 (July 3, 2024) ### Documentation and tutorial enhancements - Simplified the introduction to NWB tutorial. @rly [#1914](https://github.com/NeurodataWithoutBorders/pynwb/pull/1914) diff --git a/README.rst b/README.rst index d5d99789a..408446cff 100644 --- a/README.rst +++ b/README.rst @@ -49,10 +49,10 @@ Overall Health :target: https://github.com/neurodatawithoutborders/pynwb/blob/dev/license.txt :alt: PyPI - License -**Conda** +**Conda Feedstock** -.. image:: https://circleci.com/gh/conda-forge/pynwb-feedstock.svg?style=shield - :target: https://circleci.com/gh/conda-forge/pynwb-feedstock +.. image:: https://dev.azure.com/conda-forge/feedstock-builds/_apis/build/status/pynwb-feedstock?branchName=main + :target: https://dev.azure.com/conda-forge/feedstock-builds/_build/latest?definitionId=5703&branchName=main :alt: Conda Feedstock Status NWB Format API diff --git a/docs/gallery/general/plot_configurator.py b/docs/gallery/general/plot_configurator.py index 52a2a6326..0156a8fef 100644 --- a/docs/gallery/general/plot_configurator.py +++ b/docs/gallery/general/plot_configurator.py @@ -55,6 +55,7 @@ from pynwb import NWBFile, get_loaded_type_config, load_type_config, unload_type_config from pynwb.file import Subject +#################################### # How to use a Configuration file # ------------------------------- # As mentioned prior, the first step after creating a configuration file is @@ -69,7 +70,10 @@ # the value of the fields are wrapped and then validated to see if it is a # permissible value in their respective :py:class:`~hdmf.term_set.TermSet`. -dir_path = os.path.dirname(os.path.abspath("__file__")) +try: + dir_path = os.path.dirname(os.path.abspath(__file__)) # when running as a .py +except NameError: + dir_path = os.path.dirname(os.path.abspath("__file__")) # when running as a script or notebook yaml_file = os.path.join(dir_path, 'nwb_gallery_config.yaml') load_type_config(config_path=yaml_file) diff --git a/pyproject.toml b/pyproject.toml index 77d33e352..4873b52e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,8 @@ authors = [ { name="Ryan Ly", email="rly@lbl.gov" }, { name="Oliver Ruebel", email="oruebel@lbl.gov" }, { name="Ben Dichter", email="ben.dichter@gmail.com" }, - { name="Matthew Avaylon", email="mavaylon@lbl.gov" } + { name="Matthew Avaylon", email="mavaylon@lbl.gov" }, + { name="Stephanie Prince", email="smprince@lbl.gov" }, ] description= "Package for working with Neurodata stored in the NWB format." readme = "README.rst" diff --git a/requirements-doc.txt b/requirements-doc.txt index 90633ce88..30c41106d 100644 --- a/requirements-doc.txt +++ b/requirements-doc.txt @@ -13,4 +13,7 @@ lxml # used by dataframe_image when using the matplotlib backend hdf5plugin dandi>=0.46.6 hdmf-zarr -zarr<2.18.0 # limited until hdmf-zarr 0.8.0 is released to resolve issues with zarr>=2.18.0 +zarr<3 # limited to zarr<3 until hdmf-zarr resolves issues with zarr 3.0 +linkml-runtime==1.7.4; python_version >= "3.9" +schemasheets==0.2.1; python_version >= "3.9" +oaklib==0.5.32; python_version >= "3.9" \ No newline at end of file diff --git a/tests/unit/test_ecephys.py b/tests/unit/test_ecephys.py index 1ef0b7880..dc194af2a 100644 --- a/tests/unit/test_ecephys.py +++ b/tests/unit/test_ecephys.py @@ -118,7 +118,7 @@ def test_dimensions_warning(self): def test_get_data_in_units(self): samples = 100 - channels = 2 + channels = 5 conversion = 10.0 offset = 3.0 channel_conversion = np.random.rand(channels) From 20c5d75a6ce0b2deb729b81803c9c6440cff029c Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Wed, 3 Jul 2024 18:34:11 -0700 Subject: [PATCH 15/16] update release process documentation (#1928) Co-authored-by: Ryan Ly --- CHANGELOG.md | 6 +++- docs/source/make_a_release.rst | 59 ++++++++++++++++++++++------------ 2 files changed, 44 insertions(+), 21 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f8750ffcd..c72e78f58 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,10 @@ # PyNWB Changelog +## PyNWB 2.8.2 (Upcoming) + +### Documentation and tutorial enhancements +- Added pre-release pull request instructions to release process documentation @stephprince [#1928](https://github.com/NeurodataWithoutBorders/pynwb/pull/1928) + ## PyNWB 2.8.1 (July 3, 2024) ### Documentation and tutorial enhancements @@ -10,7 +15,6 @@ ### Bug fixes - Fixed use of `channel_conversion` in `TimeSeries` `get_data_in_units`. @rohanshah [1923](https://github.com/NeurodataWithoutBorders/pynwb/pull/1923) - ## PyNWB 2.8.0 (May 28, 2024) ### Enhancements and minor changes diff --git a/docs/source/make_a_release.rst b/docs/source/make_a_release.rst index f88b627ea..ad258db34 100644 --- a/docs/source/make_a_release.rst +++ b/docs/source/make_a_release.rst @@ -20,25 +20,6 @@ Prerequisites * You have a `GPG signing key`_. -* Dependency versions in ``requirements.txt``, ``requirements-dev.txt``, ``requirements-opt.txt``, - ``requirements-doc.txt``, and ``requirements-min.txt`` are up-to-date. - -* Legal information and copyright dates in ``Legal.txt``, ``license.txt``, ``README.rst``, - ``docs/source/conf.py``, and any other files are up-to-date. - -* Package information in ``setup.py`` is up-to-date. - -* ``README.rst`` information is up-to-date. - -* The ``nwb-schema`` submodule is up-to-date. The version number should be checked manually in case syncing the - git submodule does not work as expected. - -* Documentation reflects any new features and changes in PyNWB functionality. - -* Documentation builds locally. - -* Documentation builds on the `ReadTheDocs project`_ on the "dev" build. - * Release notes have been prepared. * An appropriate new version number has been selected. @@ -56,6 +37,44 @@ Commands to evaluate starts with a dollar sign. For example:: means that ``echo "Hello"`` should be copied and evaluated in the terminal. +----------------------------------------------------- +Make pre-release pull request on GitHub: Step-by-step +----------------------------------------------------- + +1. Create a new branch locally or on GitHub. Update the ``CHANGELOG.md`` with the release date. + + .. code:: + + $ git checkout -b release-X.Y.Z + + +2. Create a pull request for the new release branch, then append the URL with: "&template=release.md". +For example, ``https://github.com/NeurodataWithoutBorders/pynwb/compare/dev...release-X.Y.Z?quick_pull=1&template=release.md`` + + +3. Follow the checklist in the template. The checklist covers the following steps in more detail: + + * Make sure all PRs to be included in this release have been merged to ``dev``. + + * Update package versions in ``requirements.txt``, ``requirements-dev.txt``, ``requirements-opt.txt``, + ``requirements-doc.txt``, ``requirements-min.txt``, ``environment-ros3.yml``, and ``pyproject.toml``. + + * Check legal information and copyright dates in ``Legal.txt``, ``license.txt``, ``README.rst``, + ``docs/source/conf.py``. + + * Update ``pyproject.toml`` and ``README.rst`` as needed. + + * Update ``src/pynwb/nwb-schema`` submodule as needed. Check the version number manually to make sure + we are using the latest release. + + * Update documentation to reflect new features and changes in PyNWB functionality. + + * Run tests locally, inspect all warnings and outputs, and try to remove all warnings. + + * Test documentation builds locally and on the `ReadTheDocs project`_ on the "dev" build. + + + ------------------------------------- Publish release on PyPI: Step-by-step @@ -144,7 +163,7 @@ Publish release on PyPI: Step-by-step python -c "import pynwb; print(pynwb.__version__)" -10. Cleanup +12. Cleanup On bash/zsh: From 570fb3b396b3c2c03b4abb6af23d8d76b1c26586 Mon Sep 17 00:00:00 2001 From: Jonny Saunders Date: Mon, 15 Jul 2024 13:18:10 -0700 Subject: [PATCH 16/16] Template lint instructions (#1932) * cache __TYPE_MAP and init submodules * correct linting instructions in pull request template --- .github/PULL_REQUEST_TEMPLATE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 3542c3f8c..562162300 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -12,6 +12,6 @@ Show how to reproduce the new behavior (can be a bug fix or a new feature) - [ ] Did you update CHANGELOG.md with your changes? - [ ] Have you checked our [Contributing](https://github.com/NeurodataWithoutBorders/pynwb/blob/dev/docs/CONTRIBUTING.rst) document? - [ ] Have you ensured the PR clearly describes the problem and the solution? -- [ ] Is your contribution compliant with our coding style? This can be checked running `flake8` from the source directory. +- [ ] Is your contribution compliant with our coding style? This can be checked running `ruff check . && codespell` from the source directory. - [ ] Have you checked to ensure that there aren't other open [Pull Requests](https://github.com/NeurodataWithoutBorders/pynwb/pulls) for the same change? - [ ] Have you included the relevant issue number using "Fix #XXX" notation where XXX is the issue number? By including "Fix #XXX" you allow GitHub to close issue #XXX when the PR is merged.