-
Notifications
You must be signed in to change notification settings - Fork 84
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'dev' into prepare_2.6.0
- Loading branch information
Showing
39 changed files
with
812 additions
and
336 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,15 +1,15 @@ | ||
name: Run DANDI read tests | ||
on: | ||
schedule: | ||
- cron: '0 6 * * *' # once per day at 1am ET | ||
# NOTE this is disabled until we can run this systematically instead of randomly | ||
# so we don't get constant error notifications and waste compute cycles | ||
# See https://github.com/NeurodataWithoutBorders/pynwb/issues/1804 | ||
# schedule: | ||
# - cron: '0 6 * * *' # once per day at 1am ET | ||
workflow_dispatch: | ||
|
||
jobs: | ||
run-tests: | ||
runs-on: ubuntu-latest | ||
defaults: | ||
run: | ||
shell: bash -l {0} # necessary for conda | ||
steps: | ||
- name: Cancel non-latest runs | ||
uses: styfle/[email protected] | ||
|
@@ -22,19 +22,14 @@ jobs: | |
submodules: 'recursive' | ||
fetch-depth: 0 # tags are required for versioneer to determine the version | ||
|
||
- name: Set up Conda | ||
uses: conda-incubator/setup-miniconda@v2 | ||
- name: Set up Python | ||
uses: actions/setup-python@v4 | ||
with: | ||
auto-update-conda: true | ||
activate-environment: ros3 | ||
environment-file: environment-ros3.yml | ||
python-version: "3.11" | ||
channels: conda-forge | ||
auto-activate-base: false | ||
python-version: '3.11' | ||
|
||
- name: Install run dependencies | ||
run: | | ||
python -m pip install dandi pytest | ||
python -m pip install dandi fsspec requests aiohttp pytest | ||
python -m pip uninstall -y pynwb # uninstall pynwb | ||
python -m pip install -e . | ||
python -m pip list | ||
|
@@ -47,4 +42,4 @@ jobs: | |
- name: Run DANDI read tests | ||
run: | | ||
python tests/read_dandi/test_read_dandi.py | ||
python tests/read_dandi/read_dandi.py |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,161 @@ | ||
""" | ||
.. _editing: | ||
Editing NWB files | ||
================= | ||
This tutorial demonstrates how to edit NWB files in-place to make small changes to | ||
existing containers. To add or remove containers from an NWB file, see | ||
:ref:`modifying_data`. How and whether it is possible to edit an NWB file depends on the | ||
storage backend and the type of edit. | ||
.. warning:: | ||
Manually editing an existing NWB file can make the file invalid if you are not | ||
careful. We highly recommend making a copy before editing and running a validation | ||
check on the file after editing it. See :ref:`validating`. | ||
Editing datasets | ||
---------------- | ||
When reading an HDF5 NWB file, PyNWB exposes :py:class:`h5py.Dataset` objects, which can | ||
be edited in place. For this to work, you must open the file in read/write mode | ||
(``"r+"`` or ``"a"``). | ||
First, let's create an NWB file with data: | ||
""" | ||
from pynwb import NWBHDF5IO, NWBFile, TimeSeries | ||
from datetime import datetime | ||
from dateutil.tz import tzlocal | ||
import numpy as np | ||
|
||
nwbfile = NWBFile( | ||
session_description="my first synthetic recording", | ||
identifier="EXAMPLE_ID", | ||
session_start_time=datetime.now(tzlocal()), | ||
session_id="LONELYMTN", | ||
) | ||
|
||
nwbfile.add_acquisition( | ||
TimeSeries( | ||
name="synthetic_timeseries", | ||
description="Random values", | ||
data=np.random.randn(100, 100), | ||
unit="m", | ||
rate=10e3, | ||
) | ||
) | ||
|
||
with NWBHDF5IO("test_edit.nwb", "w") as io: | ||
io.write(nwbfile) | ||
|
||
############################################## | ||
# Now, let's edit the values of the dataset | ||
|
||
with NWBHDF5IO("test_edit.nwb", "r+") as io: | ||
nwbfile = io.read() | ||
nwbfile.acquisition["synthetic_timeseries"].data[:10] = 0.0 | ||
|
||
|
||
############################################## | ||
# You can edit the attributes of that dataset through the ``attrs`` attribute: | ||
|
||
with NWBHDF5IO("test_edit.nwb", "r+") as io: | ||
nwbfile = io.read() | ||
nwbfile.acquisition["synthetic_timeseries"].data.attrs["unit"] = "volts" | ||
|
||
############################################## | ||
# Changing the shape of dataset | ||
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
# Whether it is possible to change the shape of a dataset depends on how the dataset was | ||
# created. If the dataset was created with a flexible shape, then it is possible to | ||
# change in-place. Creating a dataset with a flexible shape is done by specifying the | ||
# ``maxshape`` argument of the :py:class:`~hdmf.backends.hdf5.h5_utils.H5DataIO` class | ||
# constructor. Using a ``None`` value for a component of the ``maxshape`` tuple allows | ||
# the size of the corresponding dimension to grow, such that is can be be reset arbitrarily long | ||
# in that dimension. Chunking is required for datasets with flexible shapes. Setting ``maxshape``, | ||
# hence, automatically sets chunking to ``True``, if not specified. | ||
# | ||
# First, let's create an NWB file with a dataset with a flexible shape: | ||
|
||
from hdmf.backends.hdf5.h5_utils import H5DataIO | ||
|
||
nwbfile = NWBFile( | ||
session_description="my first synthetic recording", | ||
identifier="EXAMPLE_ID", | ||
session_start_time=datetime.now(tzlocal()), | ||
session_id="LONELYMTN", | ||
) | ||
|
||
data_io = H5DataIO(data=np.random.randn(100, 100), maxshape=(None, 100)) | ||
|
||
nwbfile.add_acquisition( | ||
TimeSeries( | ||
name="synthetic_timeseries", | ||
description="Random values", | ||
data=data_io, | ||
unit="m", | ||
rate=10e3, | ||
) | ||
) | ||
|
||
with NWBHDF5IO("test_edit2.nwb", "w") as io: | ||
io.write(nwbfile) | ||
|
||
############################################## | ||
# The ``None``value in the first component of ``maxshape`` means that the | ||
# the first dimension of the dataset is unlimited. By setting the second dimension | ||
# of ``maxshape`` to ``100``, that dimension is fixed to be no larger than ``100``. | ||
# If you do not specify a``maxshape``, then the shape of the dataset will be fixed | ||
# to the shape that the dataset was created with. Here, you can change the shape of | ||
# the first dimension of this dataset. | ||
|
||
|
||
with NWBHDF5IO("test_edit2.nwb", "r+") as io: | ||
nwbfile = io.read() | ||
nwbfile.acquisition["synthetic_timeseries"].data.resize((200, 100)) | ||
|
||
############################################## | ||
# This will change the shape of the dataset in-place. If you try to change the shape of | ||
# a dataset with a fixed shape, you will get an error. | ||
# | ||
# .. note:: | ||
# There are several types of dataset edits that cannot be done in-place: changing the | ||
# shape of a dataset with a fixed shape, or changing the datatype, compression, | ||
# chunking, max-shape, or fill-value of a dataset. For any of these, we recommend using | ||
# the :py:class:`pynwb.NWBHDF5IO.export` method to export the data to a new file. See | ||
# :ref:`modifying_data` for more information. | ||
# | ||
# Editing groups | ||
# -------------- | ||
# Editing of groups is not yet supported in PyNWB. | ||
# To edit the attributes of a group, open the file and edit it using :py:mod:`h5py`: | ||
|
||
import h5py | ||
|
||
with h5py.File("test_edit.nwb", "r+") as f: | ||
f["acquisition"]["synthetic_timeseries"].attrs["description"] = "Random values in volts" | ||
|
||
############################################## | ||
# .. warning:: | ||
# Be careful not to edit values that will bring the file out of compliance with the | ||
# NWB specification. | ||
# | ||
# Renaming groups and datasets | ||
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
# Rename groups and datasets in-place using the :py:meth:`~h5py.Group.move` method. For example, to rename | ||
# the ``"synthetic_timeseries"`` group: | ||
|
||
with h5py.File("test_edit.nwb", "r+") as f: | ||
f["acquisition"].move("synthetic_timeseries", "synthetic_timeseries_renamed") | ||
|
||
############################################## | ||
# You can use this same technique to move a group or dataset to a different location in | ||
# the file. For example, to move the ``"synthetic_timeseries_renamed"`` group to the | ||
# ``"analysis"`` group: | ||
|
||
with h5py.File("test_edit.nwb", "r+") as f: | ||
f["acquisition"].move( | ||
"synthetic_timeseries_renamed", | ||
"/analysis/synthetic_timeseries_renamed", | ||
) |
Oops, something went wrong.