Skip to content

Commit

Permalink
Update ecephys and ophys tutorials
Browse files Browse the repository at this point in the history
  • Loading branch information
rly committed Jun 13, 2024
1 parent 194ce9c commit a139870
Show file tree
Hide file tree
Showing 4 changed files with 39 additions and 79 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

### Documentation and tutorial enhancements
- Simplified the introduction to NWB tutorial. @rly [#1914](https://github.com/NeurodataWithoutBorders/pynwb/pull/1914)
- Simplified the ecephys and ophys tutorials. [#1915](https://github.com/NeurodataWithoutBorders/pynwb/pull/1915)


## PyNWB 2.8.0 (May 28, 2024)
Expand Down
30 changes: 13 additions & 17 deletions docs/gallery/domain/ecephys.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
#
# When creating a NWB file, the first step is to create the :py:class:`~pynwb.file.NWBFile`.


nwbfile = NWBFile(
session_description="my first synthetic recording",
identifier=str(uuid4()),
Expand All @@ -50,7 +49,8 @@
lab="Bag End Laboratory",
institution="University of Middle Earth at the Shire",
experiment_description="I went on an adventure to reclaim vast treasures.",
session_id="LONELYMTN001",
keywords=["ecephys", "exploration", "wanderlust"],
related_publications="doi:10.1016/j.neuron.2016.12.011",
)

#######################
Expand Down Expand Up @@ -93,7 +93,6 @@
# additional user-specified metadata as custom columns of the table. We will be adding a ``"label"`` column to the
# table. Use the following code to add electrodes for an array with 4 shanks and 3 channels per shank.


nwbfile.add_electrode_column(name="label", description="label of electrode")

nshanks = 4
Expand All @@ -118,10 +117,9 @@
electrode_counter += 1

#######################
# Similarly to the ``trials`` table, we can view the ``electrodes`` table in tabular form
# Similarly to other tables in PyNWB, we can view the ``electrodes`` table in tabular form
# by converting it to a pandas :py:class:`~pandas.DataFrame`.


nwbfile.electrodes.to_dataframe()

#######################
Expand All @@ -145,7 +143,6 @@
# convenience function that creates a :py:class:`~hdmf.common.table.DynamicTableRegion` which references the
# ``"electrodes"`` table.


all_table_region = nwbfile.create_electrode_table_region(
region=list(range(electrode_counter)), # reference row indices 0 to N-1
description="all electrodes",
Expand All @@ -156,7 +153,7 @@
# ^^^^^^^^^^^^^^^^^
#
# Now create an :py:class:`~pynwb.ecephys.ElectricalSeries` object to store raw data collected
# during the experiment, passing in this ``"all_table_region"`` :py:class:`~hdmf.common.table.DynamicTableRegion`
# during the experiment, passing in this ``all_table_region`` :py:class:`~hdmf.common.table.DynamicTableRegion`
# reference to all rows of the electrodes table.
#
# .. only:: html
Expand All @@ -174,10 +171,10 @@
# :align: center
#


raw_data = np.random.randn(50, 12)
raw_electrical_series = ElectricalSeries(
name="ElectricalSeries",
description="Raw acquisition traces",
data=raw_data,
electrodes=all_table_region,
starting_time=0.0, # timestamp of the first sample in seconds relative to the session start time
Expand All @@ -188,7 +185,6 @@
# Since this :py:class:`~pynwb.ecephys.ElectricalSeries` represents raw data from the data acquisition system,
# add it to the acquisition group of the :py:class:`~pynwb.file.NWBFile`.


nwbfile.add_acquisition(raw_electrical_series)

####################
Expand All @@ -199,10 +195,10 @@
# again passing in the :py:class:`~hdmf.common.table.DynamicTableRegion` reference to all rows of the ``"electrodes"``
# table.


lfp_data = np.random.randn(50, 12)
lfp_electrical_series = ElectricalSeries(
name="ElectricalSeries",
description="LFP data",
data=lfp_data,
electrodes=all_table_region,
starting_time=0.0,
Expand Down Expand Up @@ -240,7 +236,6 @@
# This is analogous to how we can store the :py:class:`~pynwb.behavior.Position` object in a processing module
# created with the method :py:meth:`.NWBFile.create_processing_module`.


ecephys_module = nwbfile.create_processing_module(
name="ecephys", description="processed extracellular electrophysiology data"
)
Expand All @@ -254,13 +249,16 @@
#
# Spike times are stored in the :py:class:`~pynwb.misc.Units` table, which is a subclass of
# :py:class:`~hdmf.common.table.DynamicTable`. Adding columns to the :py:class:`~pynwb.misc.Units` table is analogous
# to how we can add columns to the ``"electrodes"`` and ``"trials"`` tables.
#
# Generate some random spike data and populate the :py:class:`~pynwb.misc.Units` table using the
# method :py:meth:`.NWBFile.add_unit`.
# to how we can add columns to the ``"electrodes"`` and ``"trials"`` tables. Use the convenience method
# :py:meth:`.NWBFile.add_unit_column` to add a new column on the :py:class:`~pynwb.misc.Units` table for the
# sorting quality of the units.

nwbfile.add_unit_column(name="quality", description="sorting quality")

####################
# Generate some random spike data and populate the :py:class:`~pynwb.misc.Units` table using the
# method :py:meth:`.NWBFile.add_unit`.

firing_rate = 20
n_units = 10
res = 1000
Expand All @@ -272,7 +270,6 @@
#######################
# The :py:class:`~pynwb.misc.Units` table can also be converted to a pandas :py:class:`~pandas.DataFrame`.


nwbfile.units.to_dataframe()

#######################
Expand Down Expand Up @@ -315,7 +312,6 @@
# Once you have finished adding all of your data to the :py:class:`~pynwb.file.NWBFile`,
# write the file with :py:class:`~pynwb.NWBHDF5IO`.


with NWBHDF5IO("ecephys_tutorial.nwb", "w") as io:
io.write(nwbfile)

Expand Down
76 changes: 22 additions & 54 deletions docs/gallery/domain/ophys.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@
lab="Bag End Laboratory",
institution="University of Middle Earth at the Shire",
experiment_description="I went on an adventure to reclaim vast treasures.",
session_id="LONELYMTN001",
keywords=["ecephys", "exploration", "wanderlust"],
related_publications="doi:10.1016/j.neuron.2016.12.011",
)

####################
Expand Down Expand Up @@ -87,7 +88,6 @@
# Create a :py:class:`~pynwb.device.Device` named ``"Microscope"`` in the :py:class:`~pynwb.file.NWBFile` object. Then
# create an :py:class:`~pynwb.ophys.OpticalChannel` named ``"OpticalChannel"``.


device = nwbfile.create_device(
name="Microscope",
description="My two-photon microscope",
Expand Down Expand Up @@ -123,39 +123,23 @@
# -----------------
# Now that we have our :py:class:`~pynwb.ophys.ImagingPlane`, we can create a
# :py:class:`~pynwb.ophys.OnePhotonSeries` object to store raw one-photon imaging data.
# Here, we have two options. The first option is to supply the raw image data to PyNWB,
# using the data argument. The second option is to provide a path to the image files.
# These two options have trade-offs, so it is worth considering how you want to store
# this data.

# using internal data. this data will be stored inside the NWB file
one_p_series1 = OnePhotonSeries(
name="OnePhotonSeries_internal",

# the image data will be stored inside the NWB file
one_p_series = OnePhotonSeries(
name="OnePhotonSeries",
description="Raw 1p data",
data=np.ones((1000, 100, 100)),
imaging_plane=imaging_plane,
rate=1.0,
unit="normalized amplitude",
)

# using external data. only the file paths will be stored inside the NWB file
one_p_series2 = OnePhotonSeries(
name="OnePhotonSeries_external",
dimension=[100, 100],
external_file=["images.tiff"],
imaging_plane=imaging_plane,
starting_frame=[0],
format="external",
starting_time=0.0,
rate=1.0,
)

####################
# Since these one-photon data are acquired data, we will add the
# :py:class:`~pynwb.ophys.OnePhotonSeries` objects to the :py:class:`~pynwb.file.NWBFile`
# as acquired data.

nwbfile.add_acquisition(one_p_series1)
nwbfile.add_acquisition(one_p_series2)
nwbfile.add_acquisition(one_p_series)

####################
# Two-photon Series
Expand All @@ -178,29 +162,17 @@
# :align: center
#

# using internal data. this data will be stored inside the NWB file
two_p_series1 = TwoPhotonSeries(
name="TwoPhotonSeries1",
# the image data will be stored inside the NWB file
two_p_series = TwoPhotonSeries(
name="TwoPhotonSeries",
description="Raw 2p data",
data=np.ones((1000, 100, 100)),
imaging_plane=imaging_plane,
rate=1.0,
unit="normalized amplitude",
)

# using external data. only the file paths will be stored inside the NWB file
two_p_series2 = TwoPhotonSeries(
name="TwoPhotonSeries2",
dimension=[100, 100],
external_file=["images.tiff"],
imaging_plane=imaging_plane,
starting_frame=[0],
format="external",
starting_time=0.0,
rate=1.0,
)

nwbfile.add_acquisition(two_p_series1)
nwbfile.add_acquisition(two_p_series2)
nwbfile.add_acquisition(two_p_series)

####################
# Motion Correction (optional)
Expand All @@ -212,6 +184,7 @@

corrected = ImageSeries(
name="corrected", # this must be named "corrected"
description="A motion corrected image stack",
data=np.ones((1000, 100, 100)),
unit="na",
format="raw",
Expand All @@ -221,6 +194,7 @@

xy_translation = TimeSeries(
name="xy_translation",
description="x,y translation in pixels",
data=np.ones((1000, 2)),
unit="pixels",
starting_time=0.0,
Expand All @@ -229,7 +203,7 @@

corrected_image_stack = CorrectedImageStack(
corrected=corrected,
original=one_p_series1,
original=one_p_series,
xy_translation=xy_translation,
)

Expand All @@ -240,7 +214,6 @@
# physiology data and add the motion correction data to the :py:class:`~pynwb.file.NWBFile`.
#


ophys_module = nwbfile.create_processing_module(
name="ophys", description="optical physiology processed data"
)
Expand Down Expand Up @@ -295,14 +268,13 @@
# Then we will add the :py:class:`~pynwb.ophys.ImageSegmentation` object
# to the previously created :py:class:`~pynwb.base.ProcessingModule`.


img_seg = ImageSegmentation()

ps = img_seg.create_plane_segmentation(
name="PlaneSegmentation",
description="output from segmenting my favorite imaging plane",
imaging_plane=imaging_plane,
reference_images=one_p_series1, # optional
reference_images=one_p_series, # optional
)

ophys_module.add(img_seg)
Expand Down Expand Up @@ -348,7 +320,7 @@
name="PlaneSegmentation2",
description="output from segmenting my favorite imaging plane",
imaging_plane=imaging_plane,
reference_images=one_p_series1, # optional
reference_images=one_p_series, # optional
)

for _ in range(30):
Expand Down Expand Up @@ -382,7 +354,7 @@
name="PlaneSegmentation3",
description="output from segmenting my favorite imaging plane",
imaging_plane=imaging_plane,
reference_images=one_p_series1, # optional
reference_images=one_p_series, # optional
)

from itertools import product
Expand Down Expand Up @@ -453,9 +425,9 @@
# Then we create a :py:class:`~pynwb.ophys.RoiResponseSeries` object to store fluorescence
# data for those two ROIs.


roi_resp_series = RoiResponseSeries(
name="RoiResponseSeries",
description="Fluorescence responses for two ROIs",
data=np.ones((50, 2)), # 50 samples, 2 ROIs
rois=rt_region,
unit="lumens",
Expand Down Expand Up @@ -484,7 +456,6 @@
# :alt: fluorescence UML diagram
# :align: center


fl = Fluorescence(roi_response_series=roi_resp_series)
ophys_module.add(fl)

Expand All @@ -503,7 +474,6 @@
# :py:class:`~pynwb.file.NWBFile`, make sure to write the file.
# IO operations are carried out using :py:class:`~pynwb.NWBHDF5IO`.


with NWBHDF5IO("ophys_tutorial.nwb", "w") as io:
io.write(nwbfile)

Expand All @@ -525,10 +495,9 @@
# with the name of the :py:class:`~pynwb.ophys.RoiResponseSeries` object,
# which we named ``"RoiResponseSeries"``.


with NWBHDF5IO("ophys_tutorial.nwb", "r") as io:
read_nwbfile = io.read()
print(read_nwbfile.acquisition["TwoPhotonSeries1"])
print(read_nwbfile.acquisition["TwoPhotonSeries"])
print(read_nwbfile.processing["ophys"])
print(read_nwbfile.processing["ophys"]["Fluorescence"])
print(read_nwbfile.processing["ophys"]["Fluorescence"]["RoiResponseSeries"])
Expand All @@ -545,11 +514,10 @@
# Load and print all the data values of the :py:class:`~pynwb.ophys.RoiResponseSeries`
# object representing the fluorescence data.


with NWBHDF5IO("ophys_tutorial.nwb", "r") as io:
read_nwbfile = io.read()

print(read_nwbfile.acquisition["TwoPhotonSeries1"])
print(read_nwbfile.acquisition["TwoPhotonSeries"])
print(read_nwbfile.processing["ophys"]["Fluorescence"]["RoiResponseSeries"].data[:])

####################
Expand Down
11 changes: 3 additions & 8 deletions docs/gallery/general/plot_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,13 +76,8 @@
Processing Modules
^^^^^^^^^^^^^^^^^^
Processing modules are objects that group together common analyses done during processing of data.
To standardize the storage of
common analyses, NWB provides the concept of an :py:class:`~pynwb.core.NWBDataInterface`, where the output of
common analyses are represented as objects that extend the :py:class:`~pynwb.core.NWBDataInterface` class.
In most cases, you will not need to interact with the :py:class:`~pynwb.core.NWBDataInterface` class directly.
More commonly, you will be creating instances of classes that extend this class.
Processing modules are objects that group together common analyses done during processing of data. They
often hold data of different processing/analysis data types.
.. seealso::
Expand Down Expand Up @@ -168,7 +163,7 @@
"Baggins, Bilbo",
], # optional
lab="Bag End Laboratory", # optional
institution="University of My Institution", # optional
institution="University of Middle Earth at the Shire", # optional
experiment_description="I went on an adventure to reclaim vast treasures.", # optional
keywords=["behavior", "exploration", "wanderlust"], # optional
related_publications="doi:10.1016/j.neuron.2016.12.011", # optional
Expand Down

0 comments on commit a139870

Please sign in to comment.