From 5e1a504377b58ce83662342bf3fc8c2265aca99a Mon Sep 17 00:00:00 2001 From: rly <rly@lbl.gov> Date: Wed, 1 Mar 2023 09:02:58 -0800 Subject: [PATCH 01/15] Revamp LabeledEvents and add new table types --- spec/ndx-events.extensions.yaml | 103 ++++++------- spec/ndx-events.namespace.yaml | 2 +- src/pynwb/ndx_events/__init__.py | 2 +- src/pynwb/ndx_events/events.py | 84 +++-------- src/pynwb/ndx_events/io/events.py | 90 +++++------ src/pynwb/tests/test_example_usage.py | 85 +++++++---- src/spec/create_extension_spec.py | 208 +++++++++++++------------- 7 files changed, 264 insertions(+), 310 deletions(-) diff --git a/spec/ndx-events.extensions.yaml b/spec/ndx-events.extensions.yaml index d52ba98..c8d557c 100644 --- a/spec/ndx-events.extensions.yaml +++ b/spec/ndx-events.extensions.yaml @@ -1,11 +1,13 @@ groups: - neurodata_type_def: Events neurodata_type_inc: NWBDataInterface - doc: A list of timestamps, stored in seconds, of an event. + doc: A list of timestamps, stored in seconds, of an event type. For example, this + neurodata type could be used to store all the times that a nosepoke was detected. + The name may be set to 'nosepoke_onset' attributes: - name: description dtype: text - doc: Description of the event. + doc: Description of the event type. datasets: - name: timestamps dtype: float32 @@ -25,19 +27,30 @@ groups: doc: The smallest possible difference between two event times. Usually 1 divided by the event time sampling rate on the data acquisition system. required: false +- neurodata_type_def: EventTypesTable + neurodata_type_inc: DynamicTable + doc: Labels and other metadata associated with each event type. + datasets: + - name: label + neurodata_type_inc: VectorData + dtype: text + doc: Label for each event type. - neurodata_type_def: LabeledEvents neurodata_type_inc: Events - doc: A list of timestamps, stored in seconds, of an event that can have different - labels. For example, this type could represent the times that reward was given, - as well as which of three different types of reward was given. In this case, the - 'data' dataset would contain values {0, 1, 2}, its 'labels' attribute would contain - three text elements, where the first (index 0) specifies the name of the reward - associated with data = 0, the second (index 1) specifies the name of the reward - associated with data = 1, etc. The labels do not have to start at 0 and do not - need to be continuous, e.g. the 'data' dataset could contain values {0, 10, 100}, - and the 'labels' attribute could contain 101 values, where labels[0] is 'No reward', - labels[10] is '10% reward', labels[100] is 'Full reward', and all other entries - in 'labels' are the empty string. + doc: "Event timestamps that can have different labels and other metadata. For example,\ + \ this type could represent the times that reward was given, as well as which\ + \ of three different types of reward was given. In this case, the 'data' dataset\ + \ would contain unique values {0, 1, 2}, and the 'labels' table would contain\ + \ three rows, one for each unique data value, and two columns: 'label', which\ + \ stores a unique name for each event type, and a custom column 'reward_type',\ + \ which stores information about the type of reward given. The values in row with\ + \ index i would represent the reward associated with a data value of i. For example,\ + \ 'timestamps' may contain values [0, 0.05, 0.15, 0.2], 'data' may contain values\ + \ [0, 1, 0, 2], and the 'labels' table may contain three rows, where the row at\ + \ index 0 has label='Users may specify custom columns in the 'labels' table to\ + \ store arbitrary metadata associated with each event type. The values in the\ + \ 'data' dataset do not have to be continuous and start at 0, but this is recommended\ + \ so that there are not empty rows in the 'labels' table." datasets: - name: data dtype: uint8 @@ -45,52 +58,30 @@ groups: - num_events shape: - null - doc: Unsigned integer labels that map onto strings using the mapping in the 'labels' - array attribute. This dataset should have the same number of elements as the - 'timestamps' dataset. - attributes: - - name: labels - dtype: text - dims: - - num_labels - shape: - - null - doc: Mapping from an unsigned integer (the zero-based index) to a string, used - to understand the values in the 'data' dataset. Use an empty string to represent - a label value that is not mapped to any text. -- neurodata_type_def: TTLs - neurodata_type_inc: LabeledEvents - doc: Data type to hold timestamps of TTL pulses. The 'data' dataset contains the - integer pulse values (or channel IDs), and the 'labels' dataset contains user-defined - labels associated with each pulse value (or channel ID). The value at index i - of the 'labels' dataset corresponds to a pulse value (or channel ID) of i in the - 'data' dataset. For example, the first value (index 0) of the 'labels' dataset - corresponds to a pulse value of 0. See the LabeledEvents type for more details. -- neurodata_type_def: AnnotatedEventsTable - neurodata_type_inc: DynamicTable - doc: Table to hold event timestamps and event metadata relevant to data preprocessing - and analysis. Each row corresponds to a different event type. Use the 'event_times' - dataset to store timestamps for each event type. Add user-defined columns to add - metadata for each event type or event time. + doc: Unsigned integer labels that map onto row indices in the 'event_types' table. + This dataset should have the same number of elements as the 'timestamps' dataset. + links: + - name: event_types + target_type: EventTypesTable + doc: Labels and other metadata associated with each event type, as accessed by + row index. +- neurodata_type_def: TTLTypesTable + neurodata_type_inc: EventTypesTable + doc: Labels and other metadata associated with each event type. datasets: - - name: event_times_index - neurodata_type_inc: VectorIndex - doc: Index into the event_times dataset. - - name: event_times - neurodata_type_inc: VectorData - dtype: float32 - doc: Event times for each event type. - attributes: - - name: resolution - dtype: float32 - doc: The smallest possible difference between two event times. Usually 1 divided - by the event time sampling rate on the data acquisition system. - required: false - name: label neurodata_type_inc: VectorData dtype: text doc: Label for each event type. - - name: event_description + - name: pulse_value neurodata_type_inc: VectorData - dtype: text - doc: Description for each event type. + dtype: int8 + doc: TTL pulse value for each event type. +- neurodata_type_def: TTLs + neurodata_type_inc: LabeledEvents + doc: Data type to hold timestamps of TTL pulses. + links: + - name: event_types + target_type: TTLTypesTable + doc: Labels and other metadata associated with each event type, as accessed by + row index. diff --git a/spec/ndx-events.namespace.yaml b/spec/ndx-events.namespace.yaml index 157abc0..dd788b3 100644 --- a/spec/ndx-events.namespace.yaml +++ b/spec/ndx-events.namespace.yaml @@ -13,4 +13,4 @@ namespaces: - VectorData - VectorIndex - source: ndx-events.extensions.yaml - version: 0.2.0 + version: 0.3.0 diff --git a/src/pynwb/ndx_events/__init__.py b/src/pynwb/ndx_events/__init__.py index 689c59c..c26af4a 100644 --- a/src/pynwb/ndx_events/__init__.py +++ b/src/pynwb/ndx_events/__init__.py @@ -22,4 +22,4 @@ load_namespaces(ndx_events_specpath) from . import io as __io # noqa: E402,F401 -from .events import Events, LabeledEvents, TTLs, AnnotatedEventsTable # noqa: E402,F401 +from .events import Events, EventTypesTable, LabeledEvents #, TTLs, AnnotatedEventsTable # noqa: E402,F401 diff --git a/src/pynwb/ndx_events/events.py b/src/pynwb/ndx_events/events.py index 50c0985..e97b691 100644 --- a/src/pynwb/ndx_events/events.py +++ b/src/pynwb/ndx_events/events.py @@ -1,10 +1,13 @@ import numpy as np -from pynwb import register_class +from pynwb import register_class, get_class from pynwb.core import NWBDataInterface, DynamicTable from hdmf.utils import docval, getargs, popargs, get_docval +EventTypesTable = get_class('EventTypesTable', 'ndx-events') +# TTLTypesTable = get_class('TTLTypesTable', 'ndx-events') + @register_class('Events', 'ndx-events') class Events(NWBDataInterface): """ @@ -61,32 +64,23 @@ class LabeledEvents(Events): "is passed, it will be converted to a numpy array of unsigned integer values. This dataset should " "have the same number of elements as the 'timestamps' dataset."), 'shape': (None,)}, - {'name': 'labels', 'type': ('array_data', 'data'), + {'name': 'event_types', 'type': EventTypesTable, 'doc': ("Mapping from an integer (the zero-based index) to a string, used to understand " "the integer values in the 'data' dataset. Use an empty string to represent " "a label value that is not mapped to any text. Use '' to represent any values " "that are None or empty. If the argument is not specified, the label " - "will be set to the string representation of the data value and '' for other values."), - 'shape': (None,), 'default': None}, + "will be set to the string representation of the data value and '' for other values.")}, *get_docval(Events.__init__, 'resolution')) def __init__(self, **kwargs): timestamps = getargs('timestamps', kwargs) - data, labels = popargs('data', 'labels', kwargs) + data, event_types = popargs('data', 'event_types', kwargs) super().__init__(**kwargs) if len(timestamps) != len(data): raise ValueError('Timestamps and data must have the same length: %d != %d' % (len(timestamps), len(data))) data = self.__check_label_indices_uint(data) self.data = data - if labels is None: - unique_indices = np.unique(data) - self.labels = [''] * int(max(unique_indices) + 1) - for k in unique_indices: - self.labels[k] = str(k) - else: - if None in labels: - raise ValueError("None values are not allowed in the labels array. Please use '' for undefined labels.") - self.labels = labels + self.event_types = event_types def __check_label_indices_uint(self, data): """Convert a list/tuple of integer label indices to a numpy array of unsigned integers. Raise error if negative @@ -110,56 +104,14 @@ def __check_label_indices_uint(self, data): return data -@register_class('TTLs', 'ndx-events') -class TTLs(LabeledEvents): - """ - Data type to hold timestamps of TTL pulses. The 'data' dataset contains the integer pulse values - (or channel IDs), and the 'labels' dataset contains user-defined labels associated with each pulse - value (or channel ID). The value at index i of the 'labels' dataset corresponds to a pulse value (or - channel ID) of i in the 'data' dataset. For example, the first value (index 0) of the 'labels' dataset - corresponds to a pulse value of 0. See the LabeledEvents type for more details. - """ - pass - - -@register_class('AnnotatedEventsTable', 'ndx-events') -class AnnotatedEventsTable(DynamicTable): - """ - Table to hold event timestamps and event metadata relevant to data preprocessing - and analysis. Each row corresponds to a different event type. Use the 'event_time' - dataset to store timestamps for each event type. Add user-defined columns to add - metadata for each event type or event time. - """ - - __fields__ = ( - 'resolution', - ) - - __columns__ = ( - {'name': 'event_times', 'description': 'Event times for each event type.', 'index': True}, - {'name': 'label', 'description': 'Label for each event type.'}, - {'name': 'event_description', 'description': 'Description for each event type.'} - # note that the name 'description' cannot be used because it is already an attribute on VectorData - ) - - @docval({'name': 'description', 'type': str, 'doc': 'Description of what is in this table'}, - {'name': 'name', 'type': str, 'doc': 'Name of this AnnotatedEventsTable', - 'default': 'AnnotatedEventsTable'}, - {'name': 'resolution', 'type': float, - 'doc': ('The smallest possible difference between two event times. Usually 1 divided ' - 'by the event time sampling rate on the data acquisition system.'), - 'default': None}, - *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames')) - def __init__(self, **kwargs): - resolution = popargs('resolution', kwargs) - super().__init__(**kwargs) - self.resolution = resolution +# @register_class('TTLs', 'ndx-events') +# class TTLs(LabeledEvents): +# """ +# Data type to hold timestamps of TTL pulses. The 'data' dataset contains the integer pulse values +# (or channel IDs), and the 'labels' dataset contains user-defined labels associated with each pulse +# value (or channel ID). The value at index i of the 'labels' dataset corresponds to a pulse value (or +# channel ID) of i in the 'data' dataset. For example, the first value (index 0) of the 'labels' dataset +# corresponds to a pulse value of 0. See the LabeledEvents type for more details. +# """ +# pass - @docval({'name': 'label', 'type': str, 'doc': 'Label for each event type.'}, - {'name': 'event_description', 'type': str, 'doc': 'Description for each event type.'}, - {'name': 'event_times', 'type': 'array_data', 'doc': 'Event times for each event type.', 'shape': (None,)}, - {'name': 'id', 'type': int, 'doc': 'ID for each unit', 'default': None}, - allow_extra=True) - def add_event_type(self, **kwargs): - """Add an event type as a row to this table.""" - super().add_row(**kwargs) diff --git a/src/pynwb/ndx_events/io/events.py b/src/pynwb/ndx_events/io/events.py index 59f7906..f50811a 100644 --- a/src/pynwb/ndx_events/io/events.py +++ b/src/pynwb/ndx_events/io/events.py @@ -6,7 +6,7 @@ from hdmf.utils import getargs, docval from hdmf.spec import AttributeSpec -from ..events import Events, LabeledEvents, AnnotatedEventsTable +from ..events import Events, LabeledEvents #, AnnotatedEventsTable @register_map(Events) @@ -21,51 +21,51 @@ def __init__(self, spec): self.map_spec('resolution', timestamps_spec.get_attribute('resolution')) -@register_map(LabeledEvents) -class LabeledEventsMap(EventsMap): +# @register_map(LabeledEvents) +# class LabeledEventsMap(EventsMap): - def __init__(self, spec): - super().__init__(spec) - # map object attribute LabeledEvents.labels -> spec LabeledEvents/data.labels - data_spec = self.spec.get_dataset('data') - self.map_spec('labels', data_spec.get_attribute('labels')) +# def __init__(self, spec): +# super().__init__(spec) +# # map object attribute LabeledEvents.labels -> spec LabeledEvents/data.labels +# data_spec = self.spec.get_dataset('data') +# self.map_spec('labels', data_spec.get_attribute('labels')) -@register_map(AnnotatedEventsTable) -class AnnotatedEventsTableMap(DynamicTableMap): +# @register_map(AnnotatedEventsTable) +# class AnnotatedEventsTableMap(DynamicTableMap): - def __init__(self, spec): - super().__init__(spec) - # map object attribute AnnotatedEventsTable.resolution -> spec AnnotatedEventsTable/event_times.resolution - event_times_spec = self.spec.get_dataset('event_times') - self.map_spec('resolution', event_times_spec.get_attribute('resolution')) - - @DynamicTableMap.constructor_arg('resolution') - def resolution_carg(self, builder, manager): - # on construct, map builder for AnnotatedEventsTable.datasets['event_times'].attributes['resolution'] - # -> AnnotatedEventsTable.__init__ argument 'resolution' - if 'event_times' in builder: - return builder['event_times'].attributes.get('resolution') - return None - - -@register_map(VectorData) -class VectorDataMap(ObjectMapper): - - # TODO when merging into NWB core, fold this into pynwb.io.core.VectorDataMap - - @docval({"name": "spec", "type": AttributeSpec, "doc": "the spec to get the attribute value for"}, - {"name": "container", "type": VectorData, "doc": "the container to get the attribute value from"}, - {"name": "manager", "type": BuildManager, "doc": "the BuildManager used for managing this build"}, - returns='the value of the attribute') - def get_attr_value(self, **kwargs): - ''' Get the value of the attribute corresponding to this spec from the given container ''' - spec, container, manager = getargs('spec', 'container', 'manager', kwargs) - - # on build of VectorData objects, map object attribute AnnotatedEventsTable.resolution - # -> spec AnnotatedEventsTable/event_times.resolution - if isinstance(container.parent, AnnotatedEventsTable): - if container.name == 'event_times': - if spec.name == 'resolution': - return container.parent.resolution - return super().get_attr_value(**kwargs) +# def __init__(self, spec): +# super().__init__(spec) +# # map object attribute AnnotatedEventsTable.resolution -> spec AnnotatedEventsTable/event_times.resolution +# event_times_spec = self.spec.get_dataset('event_times') +# self.map_spec('resolution', event_times_spec.get_attribute('resolution')) + +# @DynamicTableMap.constructor_arg('resolution') +# def resolution_carg(self, builder, manager): +# # on construct, map builder for AnnotatedEventsTable.datasets['event_times'].attributes['resolution'] +# # -> AnnotatedEventsTable.__init__ argument 'resolution' +# if 'event_times' in builder: +# return builder['event_times'].attributes.get('resolution') +# return None + + +# @register_map(VectorData) +# class VectorDataMap(ObjectMapper): + +# # TODO when merging into NWB core, fold this into pynwb.io.core.VectorDataMap + +# @docval({"name": "spec", "type": AttributeSpec, "doc": "the spec to get the attribute value for"}, +# {"name": "container", "type": VectorData, "doc": "the container to get the attribute value from"}, +# {"name": "manager", "type": BuildManager, "doc": "the BuildManager used for managing this build"}, +# returns='the value of the attribute') +# def get_attr_value(self, **kwargs): +# ''' Get the value of the attribute corresponding to this spec from the given container ''' +# spec, container, manager = getargs('spec', 'container', 'manager', kwargs) + +# # on build of VectorData objects, map object attribute AnnotatedEventsTable.resolution +# # -> spec AnnotatedEventsTable/event_times.resolution +# if isinstance(container.parent, AnnotatedEventsTable): +# if container.name == 'event_times': +# if spec.name == 'resolution': +# return container.parent.resolution +# return super().get_attr_value(**kwargs) diff --git a/src/pynwb/tests/test_example_usage.py b/src/pynwb/tests/test_example_usage.py index 4be8ffe..8637c6a 100644 --- a/src/pynwb/tests/test_example_usage.py +++ b/src/pynwb/tests/test_example_usage.py @@ -2,7 +2,7 @@ def test_example_usage(): from datetime import datetime from pynwb import NWBFile, NWBHDF5IO - from ndx_events import LabeledEvents, AnnotatedEventsTable + from ndx_events import LabeledEvents, EventTypesTable nwb = NWBFile( session_description='session description', @@ -10,6 +10,20 @@ def test_example_usage(): session_start_time=datetime.now().astimezone() ) + event_types_table = EventTypesTable( + name="EventTypesTable", + description="metadata about event types", + ) + + event_types_table.add_row(id=0, label="trial start") + event_types_table.add_row(id=1, label="cue onset") + event_types_table.add_row(id=2, label="cue offset") + event_types_table.add_row(id=3, label="response left") + event_types_table.add_row(id=4, label="response right") + event_types_table.add_row(id=5, label="reward") + + nwb.add_acquisition(event_types_table) # place here for now + # create a new LabeledEvents type to hold events recorded from the data acquisition system events = LabeledEvents( name='LabeledEvents', @@ -17,44 +31,44 @@ def test_example_usage(): timestamps=[0., 0.5, 0.6, 2., 2.05, 3., 3.5, 3.6, 4.], resolution=1e-5, # resolution of the timestamps, i.e., smallest possible difference between timestamps data=[0, 1, 2, 3, 5, 0, 1, 2, 4], - labels=['trial_start', 'cue_onset', 'cue_offset', 'response_left', 'response_right', 'reward'] + event_types=event_types_table, ) # add the LabeledEvents type to the acquisition group of the NWB file nwb.add_acquisition(events) - # create a new AnnotatedEventsTable type to hold annotated events - # each row of the table represents a single event type - annotated_events = AnnotatedEventsTable( - name='AnnotatedEventsTable', - description='annotated events from my experiment', - resolution=1e-5 # resolution of the timestamps, i.e., smallest possible difference between timestamps - ) - # add a custom indexed (ragged) column to represent whether each event time was a bad event - annotated_events.add_column( - name='bad_event', - description='whether each event time should be excluded', - index=True - ) - # add an event type (row) to the AnnotatedEventsTable instance - annotated_events.add_event_type( - label='Reward', - event_description='Times when the subject received juice reward.', - event_times=[1., 2., 3.], - bad_event=[False, False, True], - id=3 - ) - # convert the AnnotatedEventsTable to a pandas.DataFrame and print it - print(annotated_events.to_dataframe()) + # # create a new AnnotatedEventsTable type to hold annotated events + # # each row of the table represents a single event type + # annotated_events = AnnotatedEventsTable( + # name='AnnotatedEventsTable', + # description='annotated events from my experiment', + # resolution=1e-5 # resolution of the timestamps, i.e., smallest possible difference between timestamps + # ) + # # add a custom indexed (ragged) column to represent whether each event time was a bad event + # annotated_events.add_column( + # name='bad_event', + # description='whether each event time should be excluded', + # index=True + # ) + # # add an event type (row) to the AnnotatedEventsTable instance + # annotated_events.add_event_type( + # label='Reward', + # event_description='Times when the subject received juice reward.', + # event_times=[1., 2., 3.], + # bad_event=[False, False, True], + # id=3 + # ) + # # convert the AnnotatedEventsTable to a pandas.DataFrame and print it + # print(annotated_events.to_dataframe()) - # create a processing module in the NWB file to hold processed events data - events_module = nwb.create_processing_module( - name='events', - description='processed event data' - ) + # # create a processing module in the NWB file to hold processed events data + # events_module = nwb.create_processing_module( + # name='events', + # description='processed event data' + # ) - # add the AnnotatedEventsTable instance to the processing module - events_module.add(annotated_events) + # # add the AnnotatedEventsTable instance to the processing module + # events_module.add(annotated_events) # write nwb file filename = 'test.nwb' @@ -66,7 +80,12 @@ def test_example_usage(): nwb = io.read() print(nwb) # access the LabeledEvents container by name from the NWBFile acquisition group and print it + print(nwb.acquisition['EventTypesTable']) print(nwb.acquisition['LabeledEvents']) # access the AnnotatedEventsTable by name from the 'events' processing module, convert it to # a pandas.DataFrame, and print that - print(nwb.processing['events']['AnnotatedEventsTable'].to_dataframe()) + # print(nwb.processing['events']['AnnotatedEventsTable'].to_dataframe()) + + +if __name__ == "__main__": + test_example_usage() \ No newline at end of file diff --git a/src/spec/create_extension_spec.py b/src/spec/create_extension_spec.py index 531298d..7b31367 100644 --- a/src/spec/create_extension_spec.py +++ b/src/spec/create_extension_spec.py @@ -2,16 +2,14 @@ import os.path -from pynwb.spec import NWBNamespaceBuilder, export_spec, NWBGroupSpec, NWBAttributeSpec -from pynwb.spec import NWBDatasetSpec +from pynwb.spec import NWBNamespaceBuilder, export_spec, NWBGroupSpec, NWBAttributeSpec, NWBDatasetSpec, NWBLinkSpec def main(): - # these arguments were auto-generated from your cookiecutter inputs ns_builder = NWBNamespaceBuilder( doc="""NWB extension for storing timestamped event and TTL pulse data""", name="""ndx-events""", - version="""0.2.0""", + version="""0.3.0""", author=list(map(str.strip, """Ryan Ly""".split(','))), contact=list(map(str.strip, """rly@lbl.gov""".split(','))) ) @@ -21,135 +19,129 @@ def main(): ns_builder.include_type('VectorData', namespace='core') ns_builder.include_type('VectorIndex', namespace='core') - timestamps = NWBDatasetSpec( - name='timestamps', - dtype='float32', - dims=['num_events'], - shape=[None], - doc=('Event timestamps, in seconds, relative to the common experiment master-clock stored in ' - 'NWBFile.timestamps_reference_time.'), - attributes=[ - NWBAttributeSpec( - name='unit', - dtype='text', - value='seconds', - doc="Unit of measurement for timestamps, which is fixed to 'seconds'.", - ), - NWBAttributeSpec( - name='resolution', - dtype='float32', - doc=('The smallest possible difference between two event times. Usually 1 divided by the event time ' - 'sampling rate on the data acquisition system.'), - required=False, - ) - ] - ) - events = NWBGroupSpec( neurodata_type_def='Events', neurodata_type_inc='NWBDataInterface', - doc='A list of timestamps, stored in seconds, of an event.', + doc=("A list of timestamps, stored in seconds, of an event type. For example, this neurodata type could be " + "used to store all the times that a nosepoke was detected. The name may be set to 'nosepoke_onset'"), attributes=[ NWBAttributeSpec( name='description', dtype='text', - doc='Description of the event.', + doc='Description of the event type.', + ), + ], + datasets=[ + NWBDatasetSpec( + name='timestamps', + dtype='float32', + dims=['num_events'], + shape=[None], + doc=('Event timestamps, in seconds, relative to the common experiment master-clock stored in ' + 'NWBFile.timestamps_reference_time.'), + attributes=[ + NWBAttributeSpec( + name='unit', + dtype='text', + value='seconds', + doc="Unit of measurement for timestamps, which is fixed to 'seconds'.", + ), + NWBAttributeSpec( + name='resolution', + dtype='float32', + doc=('The smallest possible difference between two event times. Usually 1 divided by the ' + 'event time sampling rate on the data acquisition system.'), + required=False, + ) + ], ), ], - datasets=[timestamps] - ) - - labels = NWBAttributeSpec( - name='labels', - dtype='text', - dims=['num_labels'], - shape=[None], - doc=("Mapping from an unsigned integer (the zero-based index) to a string, used to understand the " - "values in the 'data' dataset. Use an empty string to represent a label value that is not " - "mapped to any text."), ) - data = NWBDatasetSpec( - name='data', - dtype='uint8', - dims=['num_events'], - shape=[None], - doc=("Unsigned integer labels that map onto strings using the mapping in the 'labels' array attribute. This " - "dataset should have the same number of elements as the 'timestamps' dataset."), - attributes=[labels], + event_types_table = NWBGroupSpec( + neurodata_type_def="EventTypesTable", + neurodata_type_inc='DynamicTable', + doc=("Labels and other metadata associated with each event type."), + datasets=[ + NWBDatasetSpec( + name='label', + neurodata_type_inc='VectorData', + dtype='text', + doc='Label for each event type.', + ), + ], ) labeled_events = NWBGroupSpec( neurodata_type_def='LabeledEvents', neurodata_type_inc='Events', - doc=("A list of timestamps, stored in seconds, of an event that can have different labels. For example, " + doc=("Event timestamps that can have different labels and other metadata. " + "For example, " "this type could represent the times that reward was given, as well as which of three different " - "types of reward was given. In this case, the 'data' dataset would contain values {0, 1, 2}, " - "its 'labels' attribute would contain three text elements, where the first (index 0) specifies the " - "name of the reward associated with data = 0, the second (index 1) specifies the name of the " - "reward associated with data = 1, etc. The labels do not have to start at 0 and do not need to " - "be continuous, e.g. the 'data' dataset could contain values {0, 10, 100}, and the 'labels' " - "attribute could contain 101 values, where labels[0] is 'No reward', labels[10] is '10% reward', " - "labels[100] is 'Full reward', and all other entries in 'labels' are the empty string."), - datasets=[data], - ) - - ttls = NWBGroupSpec( - neurodata_type_def='TTLs', - neurodata_type_inc='LabeledEvents', - doc=("Data type to hold timestamps of TTL pulses. The 'data' dataset contains the integer pulse values " - "(or channel IDs), and the 'labels' dataset contains user-defined labels associated with each pulse " - "value (or channel ID). The value at index i of the 'labels' dataset corresponds to a pulse value (or " - "channel ID) of i in the 'data' dataset. For example, the first value (index 0) of the 'labels' dataset " - "corresponds to a pulse value of 0. See the LabeledEvents type for more details."), - ) - - event_times_index = NWBDatasetSpec( - name='event_times_index', - neurodata_type_inc='VectorIndex', - doc=('Index into the event_times dataset.'), - ) - - event_times = NWBDatasetSpec( - name='event_times', - neurodata_type_inc='VectorData', - dtype='float32', - doc='Event times for each event type.', - attributes=[ - NWBAttributeSpec( - name='resolution', - dtype='float32', - doc=('The smallest possible difference between two event times. Usually 1 divided by the event time ' - 'sampling rate on the data acquisition system.'), - required=False, + "types of reward was given. In this case, the 'data' dataset would contain unique values {0, 1, 2}, and " + "the 'labels' table would contain three rows, one for each unique data value, and two columns: 'label', " + "which stores a unique name for each event type, and a custom column 'reward_type', which stores " + "information about the type of reward given. The values in row with index i would represent the reward " + "associated with a data value of i. For example, 'timestamps' may contain values [0, 0.05, 0.15, 0.2], " + "'data' may contain values [0, 1, 0, 2], and the 'labels' table may contain three rows, where the row " + "at index 0 has label='" + "Users may specify custom columns " + "in the 'labels' table to store arbitrary metadata associated with each event type. " + "The values in the 'data' dataset do not have to be continuous and start at 0, " + "but this is recommended so that there are not empty rows in the 'labels' table."), + links=[ + NWBLinkSpec( + name="event_types", + target_type='EventTypesTable', + doc=("Labels and other metadata associated with each event type, as accessed by row index."), + ) + ], + datasets=[ + NWBDatasetSpec( + name='data', + dtype='uint8', + dims=['num_events'], + shape=[None], + doc=("Unsigned integer labels that map onto row indices in the 'event_types' table. This " + "dataset should have the same number of elements as the 'timestamps' dataset."), ), ], ) - label_col = NWBDatasetSpec( - name='label', - neurodata_type_inc='VectorData', - dtype='text', - doc='Label for each event type.', - ) - - description_col = NWBDatasetSpec( - name='event_description', - neurodata_type_inc='VectorData', - dtype='text', - doc='Description for each event type.', + ttl_types_table = NWBGroupSpec( + neurodata_type_def="TTLTypesTable", + neurodata_type_inc='EventTypesTable', + doc=("Labels and other metadata associated with each event type."), + datasets=[ + NWBDatasetSpec( + name='label', + neurodata_type_inc='VectorData', + dtype='text', + doc='Label for each event type.', + ), + NWBDatasetSpec( + name='pulse_value', + neurodata_type_inc='VectorData', + dtype='int8', + doc='TTL pulse value for each event type.', + ), + ], ) - annotated_events_table = NWBGroupSpec( - neurodata_type_def='AnnotatedEventsTable', - neurodata_type_inc='DynamicTable', - doc=("Table to hold event timestamps and event metadata relevant to data preprocessing and analysis. Each " - "row corresponds to a different event type. Use the 'event_times' dataset to store timestamps for each " - "event type. Add user-defined columns to add metadata for each event type or event time."), - datasets=[event_times_index, event_times, label_col, description_col], + ttls = NWBGroupSpec( + neurodata_type_def='TTLs', + neurodata_type_inc='LabeledEvents', + doc=("Data type to hold timestamps of TTL pulses."), + links=[ + NWBLinkSpec( + name="event_types", + target_type='TTLTypesTable', + doc=("Labels and other metadata associated with each event type, as accessed by row index."), + ) + ], ) - new_data_types = [events, labeled_events, ttls, annotated_events_table] + new_data_types = [events, event_types_table, labeled_events, ttl_types_table, ttls] # export the spec to yaml files in the spec folder output_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'spec')) From 200855b2c99a4023f8e45f6782ea71c475f983aa Mon Sep 17 00:00:00 2001 From: rly <rly@lbl.gov> Date: Mon, 27 Mar 2023 09:03:15 -0700 Subject: [PATCH 02/15] Latest refactoring --- spec/ndx-events.extensions.yaml | 117 ++++++++++++-------- spec/ndx-events.namespace.yaml | 1 + src/pynwb/ndx_events/__init__.py | 2 +- src/pynwb/ndx_events/events.py | 123 ++++++++++----------- src/pynwb/ndx_events/io/events.py | 12 +-- src/pynwb/tests/test_example_usage.py | 84 ++++++--------- src/spec/create_extension_spec.py | 149 +++++++++++++++++--------- 7 files changed, 271 insertions(+), 217 deletions(-) diff --git a/spec/ndx-events.extensions.yaml b/spec/ndx-events.extensions.yaml index c8d557c..56304ef 100644 --- a/spec/ndx-events.extensions.yaml +++ b/spec/ndx-events.extensions.yaml @@ -1,9 +1,9 @@ groups: - neurodata_type_def: Events neurodata_type_inc: NWBDataInterface - doc: A list of timestamps, stored in seconds, of an event type. For example, this - neurodata type could be used to store all the times that a nosepoke was detected. - The name may be set to 'nosepoke_onset' + doc: A simple list of timestamps, stored in seconds, of an event type. For example, + this neurodata type could be used to store all the times that a nosepoke was detected. + The name may be set to 'nosepoke_onset'. attributes: - name: description dtype: text @@ -29,59 +29,88 @@ groups: required: false - neurodata_type_def: EventTypesTable neurodata_type_inc: DynamicTable - doc: Labels and other metadata associated with each event type. + doc: A column-based table to store information about each event type, such as name, + one event type per row. datasets: - - name: label + - name: event_name + neurodata_type_inc: VectorData + dtype: text + doc: Name of each event type. + - name: hed neurodata_type_inc: VectorData dtype: text - doc: Label for each event type. -- neurodata_type_def: LabeledEvents - neurodata_type_inc: Events - doc: "Event timestamps that can have different labels and other metadata. For example,\ - \ this type could represent the times that reward was given, as well as which\ - \ of three different types of reward was given. In this case, the 'data' dataset\ - \ would contain unique values {0, 1, 2}, and the 'labels' table would contain\ - \ three rows, one for each unique data value, and two columns: 'label', which\ - \ stores a unique name for each event type, and a custom column 'reward_type',\ - \ which stores information about the type of reward given. The values in row with\ - \ index i would represent the reward associated with a data value of i. For example,\ - \ 'timestamps' may contain values [0, 0.05, 0.15, 0.2], 'data' may contain values\ - \ [0, 1, 0, 2], and the 'labels' table may contain three rows, where the row at\ - \ index 0 has label='Users may specify custom columns in the 'labels' table to\ - \ store arbitrary metadata associated with each event type. The values in the\ - \ 'data' dataset do not have to be continuous and start at 0, but this is recommended\ - \ so that there are not empty rows in the 'labels' table." - datasets: - - name: data - dtype: uint8 dims: - num_events shape: - null - doc: Unsigned integer labels that map onto row indices in the 'event_types' table. - This dataset should have the same number of elements as the 'timestamps' dataset. - links: - - name: event_types - target_type: EventTypesTable - doc: Labels and other metadata associated with each event type, as accessed by - row index. -- neurodata_type_def: TTLTypesTable - neurodata_type_inc: EventTypesTable - doc: Labels and other metadata associated with each event type. + doc: Optional column containing the Hierarchical Event Descriptor of each event. + quantity: '?' +- neurodata_type_def: EventsTable + neurodata_type_inc: DynamicTable + doc: A column-based table to store information about events, one event per row. datasets: - - name: label + - name: timestamp + neurodata_type_inc: VectorData + dtype: float32 + dims: + - num_events + shape: + - null + doc: The time that the event occurred, in seconds. + attributes: + - name: unit + dtype: text + value: seconds + doc: Unit of measurement for timestamps, which is fixed to 'seconds'. + - name: resolution + dtype: float32 + doc: The smallest possible difference between two event times. Usually 1 divided + by the event time sampling rate on the data acquisition system. + required: false + - name: event_type + neurodata_type_inc: DynamicTableRegion + dims: + - num_events + shape: + - null + doc: The type of event that occurred. This is represented as a reference to a + row of the EventTypesTable. + - name: value neurodata_type_inc: VectorData dtype: text - doc: Label for each event type. + dims: + - num_events + shape: + - null + doc: Optional column containing the text value of each event. + quantity: '?' + - name: duration + neurodata_type_inc: VectorData + dtype: float32 + dims: + - num_events + shape: + - null + doc: Optional column containing the duration of each event, in seconds. + quantity: '?' +- neurodata_type_def: TtlTypesTable + neurodata_type_inc: EventTypesTable + doc: A column-based table to store information about each TTL type, such as name + and pulse value, one TTL type per row. + datasets: - name: pulse_value neurodata_type_inc: VectorData dtype: int8 doc: TTL pulse value for each event type. -- neurodata_type_def: TTLs - neurodata_type_inc: LabeledEvents +- neurodata_type_def: TtlsTable + neurodata_type_inc: EventsTable doc: Data type to hold timestamps of TTL pulses. - links: - - name: event_types - target_type: TTLTypesTable - doc: Labels and other metadata associated with each event type, as accessed by - row index. + datasets: + - name: event_type + neurodata_type_inc: DynamicTableRegion + dims: + - num_events + shape: + - null + doc: The type of TTL that occured. This is represented as a reference to a row + of the TtlTypesTable. diff --git a/spec/ndx-events.namespace.yaml b/spec/ndx-events.namespace.yaml index dd788b3..7c3c919 100644 --- a/spec/ndx-events.namespace.yaml +++ b/spec/ndx-events.namespace.yaml @@ -12,5 +12,6 @@ namespaces: - DynamicTable - VectorData - VectorIndex + - DynamicTableRegion - source: ndx-events.extensions.yaml version: 0.3.0 diff --git a/src/pynwb/ndx_events/__init__.py b/src/pynwb/ndx_events/__init__.py index c26af4a..28bb864 100644 --- a/src/pynwb/ndx_events/__init__.py +++ b/src/pynwb/ndx_events/__init__.py @@ -22,4 +22,4 @@ load_namespaces(ndx_events_specpath) from . import io as __io # noqa: E402,F401 -from .events import Events, EventTypesTable, LabeledEvents #, TTLs, AnnotatedEventsTable # noqa: E402,F401 +from .events import Events, EventTypesTable, EventsTable #, TTLs, AnnotatedEventsTable # noqa: E402,F401 diff --git a/src/pynwb/ndx_events/events.py b/src/pynwb/ndx_events/events.py index e97b691..482601d 100644 --- a/src/pynwb/ndx_events/events.py +++ b/src/pynwb/ndx_events/events.py @@ -6,6 +6,7 @@ EventTypesTable = get_class('EventTypesTable', 'ndx-events') +EventsTable = get_class('EventsTable', 'ndx-events') # TTLTypesTable = get_class('TTLTypesTable', 'ndx-events') @register_class('Events', 'ndx-events') @@ -38,70 +39,70 @@ def __init__(self, **kwargs): self.fields['unit'] = 'seconds' -@register_class('LabeledEvents', 'ndx-events') -class LabeledEvents(Events): - """ - A list of timestamps, stored in seconds, of an event that can have different - labels. For example, this type could represent the times that reward was given, - as well as which of three different types of reward was given. In this case, the - 'data' dataset would contain values {0, 1, 2}, and the 'labels' dataset - would contain three text elements, where the first (index 0) specifies the name - of the reward associated with data = 0, the second (index 1) specifies - the name of the reward associated with data = 1, etc. The labels do not - have to start at 0 and do not need to be sequential, e.g. the 'data' dataset - could contain values {0, 10, 100}, and the 'labels' dataset could contain 101 - values, where labels[0] is 'No reward', labels[10] is '10% reward', labels[100] - is 'Full reward', and all other entries in 'labels' are the empty string. - """ +# @register_class('LabeledEvents', 'ndx-events') +# class LabeledEvents(Events): +# """ +# A list of timestamps, stored in seconds, of an event that can have different +# labels. For example, this type could represent the times that reward was given, +# as well as which of three different types of reward was given. In this case, the +# 'data' dataset would contain values {0, 1, 2}, and the 'labels' dataset +# would contain three text elements, where the first (index 0) specifies the name +# of the reward associated with data = 0, the second (index 1) specifies +# the name of the reward associated with data = 1, etc. The labels do not +# have to start at 0 and do not need to be sequential, e.g. the 'data' dataset +# could contain values {0, 10, 100}, and the 'labels' dataset could contain 101 +# values, where labels[0] is 'No reward', labels[10] is '10% reward', labels[100] +# is 'Full reward', and all other entries in 'labels' are the empty string. +# """ - __nwbfields__ = ('data', - 'labels') +# __nwbfields__ = ('data', +# 'labels') - @docval(*get_docval(Events.__init__, 'name', 'description', 'timestamps'), # required - {'name': 'data', 'type': ('array_data', 'data'), # required - 'doc': ("Unsigned integer labels that map onto strings using the mapping in the 'labels' dataset. " - "Values must be 0 or greater and need not be sequential. If a list/tuple/array of integer values " - "is passed, it will be converted to a numpy array of unsigned integer values. This dataset should " - "have the same number of elements as the 'timestamps' dataset."), - 'shape': (None,)}, - {'name': 'event_types', 'type': EventTypesTable, - 'doc': ("Mapping from an integer (the zero-based index) to a string, used to understand " - "the integer values in the 'data' dataset. Use an empty string to represent " - "a label value that is not mapped to any text. Use '' to represent any values " - "that are None or empty. If the argument is not specified, the label " - "will be set to the string representation of the data value and '' for other values.")}, - *get_docval(Events.__init__, 'resolution')) - def __init__(self, **kwargs): - timestamps = getargs('timestamps', kwargs) - data, event_types = popargs('data', 'event_types', kwargs) - super().__init__(**kwargs) - if len(timestamps) != len(data): - raise ValueError('Timestamps and data must have the same length: %d != %d' - % (len(timestamps), len(data))) - data = self.__check_label_indices_uint(data) - self.data = data - self.event_types = event_types +# @docval(*get_docval(Events.__init__, 'name', 'description', 'timestamps'), # required +# {'name': 'data', 'type': ('array_data', 'data'), # required +# 'doc': ("Unsigned integer labels that map onto strings using the mapping in the 'labels' dataset. " +# "Values must be 0 or greater and need not be sequential. If a list/tuple/array of integer values " +# "is passed, it will be converted to a numpy array of unsigned integer values. This dataset should " +# "have the same number of elements as the 'timestamps' dataset."), +# 'shape': (None,)}, +# {'name': 'event_types', 'type': EventTypesTable, +# 'doc': ("Mapping from an integer (the zero-based index) to a string, used to understand " +# "the integer values in the 'data' dataset. Use an empty string to represent " +# "a label value that is not mapped to any text. Use '' to represent any values " +# "that are None or empty. If the argument is not specified, the label " +# "will be set to the string representation of the data value and '' for other values.")}, +# *get_docval(Events.__init__, 'resolution')) +# def __init__(self, **kwargs): +# timestamps = getargs('timestamps', kwargs) +# data, event_types = popargs('data', 'event_types', kwargs) +# super().__init__(**kwargs) +# if len(timestamps) != len(data): +# raise ValueError('Timestamps and data must have the same length: %d != %d' +# % (len(timestamps), len(data))) +# data = self.__check_label_indices_uint(data) +# self.data = data +# self.event_types = event_types - def __check_label_indices_uint(self, data): - """Convert a list/tuple of integer label indices to a numpy array of unsigned integers. Raise error if negative - or non-numeric values are found. If something other than a list/tuple/np.ndarray of ints or unsigned ints - is provided, return the original array. - """ - new_data = data - if isinstance(new_data, (list, tuple)): - new_data = np.array(new_data) - if isinstance(new_data, np.ndarray): - if not np.issubdtype(new_data.dtype, np.number): - raise ValueError("'data' must be an array of numeric values that have type unsigned int or " - "can be converted to unsigned int, not type %s" % new_data.dtype) - if np.issubdtype(new_data.dtype, np.unsignedinteger): - return new_data - if (new_data < 0).any(): - raise ValueError("Negative values are not allowed in 'data'.") - if np.issubdtype(new_data.dtype, np.integer): - return new_data.astype(np.uint) - # all other array dtypes will not be handled. the objectmapper will attempt to convert the data - return data +# def __check_label_indices_uint(self, data): +# """Convert a list/tuple of integer label indices to a numpy array of unsigned integers. Raise error if negative +# or non-numeric values are found. If something other than a list/tuple/np.ndarray of ints or unsigned ints +# is provided, return the original array. +# """ +# new_data = data +# if isinstance(new_data, (list, tuple)): +# new_data = np.array(new_data) +# if isinstance(new_data, np.ndarray): +# if not np.issubdtype(new_data.dtype, np.number): +# raise ValueError("'data' must be an array of numeric values that have type unsigned int or " +# "can be converted to unsigned int, not type %s" % new_data.dtype) +# if np.issubdtype(new_data.dtype, np.unsignedinteger): +# return new_data +# if (new_data < 0).any(): +# raise ValueError("Negative values are not allowed in 'data'.") +# if np.issubdtype(new_data.dtype, np.integer): +# return new_data.astype(np.uint) +# # all other array dtypes will not be handled. the objectmapper will attempt to convert the data +# return data # @register_class('TTLs', 'ndx-events') diff --git a/src/pynwb/ndx_events/io/events.py b/src/pynwb/ndx_events/io/events.py index f50811a..a08ab30 100644 --- a/src/pynwb/ndx_events/io/events.py +++ b/src/pynwb/ndx_events/io/events.py @@ -1,12 +1,12 @@ from pynwb import register_map from pynwb.io.core import NWBContainerMapper -from hdmf.common.io.table import DynamicTableMap -from hdmf.build import ObjectMapper, BuildManager -from hdmf.common import VectorData -from hdmf.utils import getargs, docval -from hdmf.spec import AttributeSpec +# from hdmf.common.io.table import DynamicTableMap +# from hdmf.build import ObjectMapper, BuildManager +# from hdmf.common import VectorData +# from hdmf.utils import getargs, docval +# from hdmf.spec import AttributeSpec -from ..events import Events, LabeledEvents #, AnnotatedEventsTable +from ..events import Events @register_map(Events) diff --git a/src/pynwb/tests/test_example_usage.py b/src/pynwb/tests/test_example_usage.py index 8637c6a..d8b5acf 100644 --- a/src/pynwb/tests/test_example_usage.py +++ b/src/pynwb/tests/test_example_usage.py @@ -2,7 +2,7 @@ def test_example_usage(): from datetime import datetime from pynwb import NWBFile, NWBHDF5IO - from ndx_events import LabeledEvents, EventTypesTable + from ndx_events import EventsTable, EventTypesTable nwb = NWBFile( session_description='session description', @@ -10,65 +10,45 @@ def test_example_usage(): session_start_time=datetime.now().astimezone() ) + # create an event types table event_types_table = EventTypesTable( name="EventTypesTable", description="metadata about event types", ) - event_types_table.add_row(id=0, label="trial start") - event_types_table.add_row(id=1, label="cue onset") - event_types_table.add_row(id=2, label="cue offset") - event_types_table.add_row(id=3, label="response left") - event_types_table.add_row(id=4, label="response right") - event_types_table.add_row(id=5, label="reward") + # create a new custom column with additional metadata + event_types_table.add_column( + name="extra_metadata", + description="some additional metadata about each event type", + ) + + # add event types one by one + event_types_table.add_row(id=0, event_name="trial start", extra_metadata="more metadata") + event_types_table.add_row(id=1, event_name="cue onset", extra_metadata="more metadata") + event_types_table.add_row(id=2, event_name="cue offset", extra_metadata="more metadata") + event_types_table.add_row(id=3, event_name="nosepoke left", extra_metadata="more metadata") + event_types_table.add_row(id=4, event_name="nosepoke right", extra_metadata="more metadata") + event_types_table.add_row(id=5, event_name="reward", extra_metadata="more metadata") nwb.add_acquisition(event_types_table) # place here for now - # create a new LabeledEvents type to hold events recorded from the data acquisition system - events = LabeledEvents( - name='LabeledEvents', + # create a new EventsTable type to hold events recorded from the data acquisition system + events_table = EventsTable( + name='EventsTable', description='events from my experiment', - timestamps=[0., 0.5, 0.6, 2., 2.05, 3., 3.5, 3.6, 4.], - resolution=1e-5, # resolution of the timestamps, i.e., smallest possible difference between timestamps - data=[0, 1, 2, 3, 5, 0, 1, 2, 4], - event_types=event_types_table, ) + # set the dynamic table region link + events_table["event_type"].table = event_types_table - # add the LabeledEvents type to the acquisition group of the NWB file - nwb.add_acquisition(events) - - # # create a new AnnotatedEventsTable type to hold annotated events - # # each row of the table represents a single event type - # annotated_events = AnnotatedEventsTable( - # name='AnnotatedEventsTable', - # description='annotated events from my experiment', - # resolution=1e-5 # resolution of the timestamps, i.e., smallest possible difference between timestamps - # ) - # # add a custom indexed (ragged) column to represent whether each event time was a bad event - # annotated_events.add_column( - # name='bad_event', - # description='whether each event time should be excluded', - # index=True - # ) - # # add an event type (row) to the AnnotatedEventsTable instance - # annotated_events.add_event_type( - # label='Reward', - # event_description='Times when the subject received juice reward.', - # event_times=[1., 2., 3.], - # bad_event=[False, False, True], - # id=3 - # ) - # # convert the AnnotatedEventsTable to a pandas.DataFrame and print it - # print(annotated_events.to_dataframe()) - - # # create a processing module in the NWB file to hold processed events data - # events_module = nwb.create_processing_module( - # name='events', - # description='processed event data' - # ) + # add events one by one + events_table.add_row(timestamp=0.1, event_type=0, duration=0.) + events_table.add_row(timestamp=0.3, event_type=1, duration=0.) + events_table.add_row(timestamp=0.4, event_type=2, duration=0.) + events_table.add_row(timestamp=0.8, event_type=4, duration=0.1) + events_table.add_row(timestamp=0.85, event_type=5, duration=0.) - # # add the AnnotatedEventsTable instance to the processing module - # events_module.add(annotated_events) + # add the EventsTable type to the acquisition group of the NWB file + nwb.add_acquisition(events_table) # write nwb file filename = 'test.nwb' @@ -79,12 +59,10 @@ def test_example_usage(): with NWBHDF5IO(filename, 'r', load_namespaces=True) as io: nwb = io.read() print(nwb) - # access the LabeledEvents container by name from the NWBFile acquisition group and print it + # access the events table and event types table by name from the NWBFile acquisition group and print it print(nwb.acquisition['EventTypesTable']) - print(nwb.acquisition['LabeledEvents']) - # access the AnnotatedEventsTable by name from the 'events' processing module, convert it to - # a pandas.DataFrame, and print that - # print(nwb.processing['events']['AnnotatedEventsTable'].to_dataframe()) + print(nwb.acquisition['EventsTable']) + print(nwb.acquisition['EventsTable']["event_type"]) if __name__ == "__main__": diff --git a/src/spec/create_extension_spec.py b/src/spec/create_extension_spec.py index 7b31367..4f8eb7c 100644 --- a/src/spec/create_extension_spec.py +++ b/src/spec/create_extension_spec.py @@ -18,12 +18,14 @@ def main(): ns_builder.include_type('DynamicTable', namespace='core') ns_builder.include_type('VectorData', namespace='core') ns_builder.include_type('VectorIndex', namespace='core') + ns_builder.include_type('DynamicTableRegion', namespace='core') events = NWBGroupSpec( neurodata_type_def='Events', neurodata_type_inc='NWBDataInterface', - doc=("A list of timestamps, stored in seconds, of an event type. For example, this neurodata type could be " - "used to store all the times that a nosepoke was detected. The name may be set to 'nosepoke_onset'"), + doc=("A simple list of timestamps, stored in seconds, of an event type. For example, this neurodata type " + "could be used to store all the times that a nosepoke was detected. The name may be set to " + "'nosepoke_onset'."), attributes=[ NWBAttributeSpec( name='description', @@ -52,7 +54,7 @@ def main(): doc=('The smallest possible difference between two event times. Usually 1 divided by the ' 'event time sampling rate on the data acquisition system.'), required=False, - ) + ), ], ), ], @@ -61,64 +63,104 @@ def main(): event_types_table = NWBGroupSpec( neurodata_type_def="EventTypesTable", neurodata_type_inc='DynamicTable', - doc=("Labels and other metadata associated with each event type."), + doc=("A column-based table to store information about each event type, such as name, one event type per row."), datasets=[ NWBDatasetSpec( - name='label', + name='event_name', neurodata_type_inc='VectorData', dtype='text', - doc='Label for each event type.', + doc='Name of each event type.', + ), + NWBDatasetSpec( + name='hed', + neurodata_type_inc='VectorData', + dtype='text', + dims=['num_events'], + shape=[None], + doc=("Optional column containing the Hierarchical Event Descriptor of each event."), + quantity="?", ), ], ) - labeled_events = NWBGroupSpec( - neurodata_type_def='LabeledEvents', - neurodata_type_inc='Events', - doc=("Event timestamps that can have different labels and other metadata. " - "For example, " - "this type could represent the times that reward was given, as well as which of three different " - "types of reward was given. In this case, the 'data' dataset would contain unique values {0, 1, 2}, and " - "the 'labels' table would contain three rows, one for each unique data value, and two columns: 'label', " - "which stores a unique name for each event type, and a custom column 'reward_type', which stores " - "information about the type of reward given. The values in row with index i would represent the reward " - "associated with a data value of i. For example, 'timestamps' may contain values [0, 0.05, 0.15, 0.2], " - "'data' may contain values [0, 1, 0, 2], and the 'labels' table may contain three rows, where the row " - "at index 0 has label='" - "Users may specify custom columns " - "in the 'labels' table to store arbitrary metadata associated with each event type. " - "The values in the 'data' dataset do not have to be continuous and start at 0, " - "but this is recommended so that there are not empty rows in the 'labels' table."), - links=[ - NWBLinkSpec( - name="event_types", - target_type='EventTypesTable', - doc=("Labels and other metadata associated with each event type, as accessed by row index."), - ) - ], + events_table = NWBGroupSpec( + neurodata_type_def='EventsTable', + neurodata_type_inc='DynamicTable', + # doc=("Event timestamps that can have different labels and other metadata. " + # "For example, " + # "this type could represent the times that reward was given, as well as which of three different " + # "types of reward was given. In this case, the 'data' dataset would contain unique values {0, 1, 2}, and " + # "the 'labels' table would contain three rows, one for each unique data value, and two columns: 'label', " + # "which stores a unique name for each event type, and a custom column 'reward_type', which stores " + # "information about the type of reward given. The values in row with index i would represent the reward " + # "associated with a data value of i. For example, 'timestamps' may contain values [0, 0.05, 0.15, 0.2], " + # "'data' may contain values [0, 1, 0, 2], and the 'labels' table may contain three rows, where the row " + # "at index 0 has label='" + # "Users may specify custom columns " + # "in the 'labels' table to store arbitrary metadata associated with each event type. " + # "The values in the 'data' dataset do not have to be continuous and start at 0, " + # "but this is recommended so that there are not empty rows in the 'labels' table."), + doc="A column-based table to store information about events, one event per row.", datasets=[ NWBDatasetSpec( - name='data', - dtype='uint8', + name='timestamp', + neurodata_type_inc='VectorData', + dtype='float32', dims=['num_events'], shape=[None], - doc=("Unsigned integer labels that map onto row indices in the 'event_types' table. This " - "dataset should have the same number of elements as the 'timestamps' dataset."), + doc=("The time that the event occurred, in seconds, from the session start time."), + attributes=[ + NWBAttributeSpec( + name='unit', + dtype='text', + value='seconds', + doc="Unit of measurement for timestamps, which is fixed to 'seconds'.", + ), + NWBAttributeSpec( + name='resolution', + dtype='float32', + doc=('The smallest possible difference between two event times. Usually 1 divided by the ' + 'event time sampling rate on the data acquisition system.'), + required=False, + ), + ], + ), + NWBDatasetSpec( + name='event_type', + neurodata_type_inc='DynamicTableRegion', + dims=['num_events'], + shape=[None], + doc=("The type of event that occurred. This is represented as a reference " + "to a row of the EventTypesTable."), + ), + NWBDatasetSpec( + name='value', + neurodata_type_inc='VectorData', + dtype='text', + dims=['num_events'], + shape=[None], + doc=("Optional column containing the text value of each event. Values should be described using " + "Hierarchical Event Descriptors in the linked EventTypesTable."), + quantity="?", + ), + NWBDatasetSpec( + name='duration', + neurodata_type_inc='VectorData', + dtype='float32', + dims=['num_events'], + shape=[None], + doc=("Optional column containing the duration of each event, in seconds."), + quantity="?", ), ], ) ttl_types_table = NWBGroupSpec( - neurodata_type_def="TTLTypesTable", + neurodata_type_def="TtlTypesTable", neurodata_type_inc='EventTypesTable', - doc=("Labels and other metadata associated with each event type."), + doc=("A column-based table to store information about each TTL type, such as name and pulse value, " + "one TTL type per row."), datasets=[ - NWBDatasetSpec( - name='label', - neurodata_type_inc='VectorData', - dtype='text', - doc='Label for each event type.', - ), NWBDatasetSpec( name='pulse_value', neurodata_type_inc='VectorData', @@ -128,20 +170,23 @@ def main(): ], ) - ttls = NWBGroupSpec( - neurodata_type_def='TTLs', - neurodata_type_inc='LabeledEvents', + ttls_table = NWBGroupSpec( + neurodata_type_def='TtlsTable', + neurodata_type_inc='EventsTable', doc=("Data type to hold timestamps of TTL pulses."), - links=[ - NWBLinkSpec( - name="event_types", - target_type='TTLTypesTable', - doc=("Labels and other metadata associated with each event type, as accessed by row index."), - ) + datasets=[ + NWBDatasetSpec( + name='event_type', + neurodata_type_inc='DynamicTableRegion', + dims=['num_events'], + shape=[None], + doc=("The type of TTL that occured. This is represented as a reference " + "to a row of the TtlTypesTable."), + ), ], ) - new_data_types = [events, event_types_table, labeled_events, ttl_types_table, ttls] + new_data_types = [events, event_types_table, events_table, ttl_types_table, ttls_table] # export the spec to yaml files in the spec folder output_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'spec')) From 0672b9a9e8450ff3589a797d8e65f6969bb5eee5 Mon Sep 17 00:00:00 2001 From: rly <rly@lbl.gov> Date: Mon, 27 Mar 2023 09:27:03 -0700 Subject: [PATCH 03/15] Update example --- README.md | 164 ++++++++++++-------------- src/pynwb/tests/test_example_usage.py | 71 +++++++---- 2 files changed, 124 insertions(+), 111 deletions(-) diff --git a/README.md b/README.md index 56c1845..7961a04 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,8 @@ This is an NWB extension for storing timestamped event data and TTL pulses. +The latest version is 0.3.0. This is a major change from previous versions. + Events can be: 1. **Simple events**. These are stored in the `Events` type. The `Events` type consists of only a name, a description, and a 1D array of timestamps. This should be used instead of a `TimeSeries` when the time series has no data. @@ -25,119 +27,107 @@ background, and alternative approaches to this extension can be found here: https://docs.google.com/document/d/1qcsjyFVX9oI_746RdMoDdmQPu940s0YtDjb1en1Xtdw ## Installation -Python: -``` -pip install ndx-events -``` -Matlab: +Python: +```bash +pip install -U ndx-events ``` + +Matlab: +```matlab generateExtension('<directory path>/ndx-events/spec/ndx-events.namespace.yaml'); ``` + ## Example usage -Python: +Python: + ```python from datetime import datetime - +from ndx_events import Events, EventsTable, EventTypesTable from pynwb import NWBFile, NWBHDF5IO -from ndx_events import LabeledEvents, AnnotatedEventsTable - nwb = NWBFile( - session_description='session description', - identifier='cool_experiment_001', - session_start_time=datetime.now().astimezone() + session_description="session description", + identifier="cool_experiment_001", + session_start_time=datetime.now().astimezone(), +) + +# create a basic events object +basic_tone_event = Events( + name="tone_onset", + timestamps=[0.0, 0.1, 0.3, 0.5, 0.6], + description="Times when a tone was played.", ) -# create a new LabeledEvents type to hold events recorded from the data acquisition system -events = LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 0.5, 0.6, 2., 2.05, 3., 3.5, 3.6, 4.], - resolution=1e-5, # resolution of the timestamps, i.e., smallest possible difference between timestamps - data=[0, 1, 2, 3, 5, 0, 1, 2, 4], - labels=['trial_start', 'cue_onset', 'cue_offset', 'response_left', 'response_right', 'reward'] +# add the basic events object to the NWBFile object +nwb.add_acquisition(basic_tone_event) + +# create an event types table +event_types_table = EventTypesTable( + name="EventTypesTable", + description="metadata about event types", ) -# add the LabeledEvents type to the acquisition group of the NWB file -nwb.add_acquisition(events) +# create a new custom column with additional metadata +event_types_table.add_column( + name="extra_metadata", + description="some additional metadata about each event type", +) -# create a new AnnotatedEventsTable type to hold annotated events -# each row of the table represents a single event type -annotated_events = AnnotatedEventsTable( - name='AnnotatedEventsTable', - description='annotated events from my experiment', - resolution=1e-5 # resolution of the timestamps, i.e., smallest possible difference between timestamps +# add event types one by one +event_types_table.add_row( + id=0, event_name="trial start", extra_metadata="more metadata" +) +event_types_table.add_row( + id=1, event_name="cue onset", extra_metadata="more metadata" +) +event_types_table.add_row( + id=2, event_name="cue offset", extra_metadata="more metadata" ) -# add a custom indexed (ragged) column to represent whether each event time was a bad event -annotated_events.add_column( - name='bad_event', - description='whether each event time should be excluded', - index=True +event_types_table.add_row( + id=3, event_name="nosepoke left", extra_metadata="more metadata" ) -# add an event type (row) to the AnnotatedEventsTable instance -annotated_events.add_event_type( - label='Reward', - event_description='Times when the subject received juice reward.', - event_times=[1., 2., 3.], - bad_event=[False, False, True], - id=3 +event_types_table.add_row( + id=4, event_name="nosepoke right", extra_metadata="more metadata" ) -# convert the AnnotatedEventsTable to a pandas.DataFrame and print it -print(annotated_events.to_dataframe()) +event_types_table.add_row(id=5, event_name="reward", extra_metadata="more metadata") -# create a processing module in the NWB file to hold processed events data -events_module = nwb.create_processing_module( - name='events', - description='processed event data' +# add the event types table to the acquisition group for now +# it should be added to the /general/tasks group when merged with core +nwb.add_acquisition(event_types_table) + +# create a new EventsTable type to hold events recorded from the data acquisition system +events_table = EventsTable( + name="EventsTable", + description="events from my experiment", ) +# set the dynamic table region link +events_table["event_type"].table = event_types_table + +# add events one by one +events_table.add_row(timestamp=0.1, event_type=0, duration=0.0) +events_table.add_row(timestamp=0.3, event_type=1, duration=0.0) +events_table.add_row(timestamp=0.4, event_type=2, duration=0.0) +events_table.add_row(timestamp=0.8, event_type=4, duration=0.1) +events_table.add_row(timestamp=0.85, event_type=5, duration=0.0) -# add the AnnotatedEventsTable instance to the processing module -events_module.add(annotated_events) +# add the EventsTable type to the acquisition group of the NWB file +nwb.add_acquisition(events_table) # write nwb file -filename = 'test.nwb' -with NWBHDF5IO(filename, 'w') as io: +filename = "test.nwb" +with NWBHDF5IO(filename, "w") as io: io.write(nwb) # read nwb file and check its contents -with NWBHDF5IO(filename, 'r', load_namespaces=True) as io: +with NWBHDF5IO(filename, "r", load_namespaces=True) as io: nwb = io.read() print(nwb) - # access the LabeledEvents container by name from the NWBFile acquisition group and print it - print(nwb.acquisition['LabeledEvents']) - # access the AnnotatedEventsTable by name from the 'events' processing module, convert it to - # a pandas.DataFrame, and print that - print(nwb.processing['events']['AnnotatedEventsTable'].to_dataframe()) -``` -Matlab (see discussion [here](https://github.com/NeurodataWithoutBorders/helpdesk/discussions/27#discussioncomment-2612231)): -```matlab -bad_event_col = types.hdmf_common.VectorData( ... - 'description', 'whether each event time should be excluded', ... - 'data', [false, false, true, false, true] ... -); -bad_event_col_index = types.hdmf_common.VectorIndex( ... - 'description', 'bad_event column index', ... - 'target', types.untyped.ObjectView(bad_event_col), ... - 'data', [3; 5] ... -); -annotated_events = types.ndx_events.AnnotatedEventsTable( ... - 'description', 'annotated events from my experiment', ... - 'colnames', {'bad_event'}, ... - 'bad_event', bad_event_col, ... - 'bad_event_index', bad_event_col_index, ... - 'id', types.hdmf_common.ElementIdentifiers('data', [0; 1]) ... % 0-indexed, for compatibility with Python -); - -% place the annotated events table in a "behavior" processing module in the NWB file -behavior_mod = types.core.ProcessingModule('description', 'processed behavioral data'); -behavior_mod.dynamictable.set('AnnotatedEvents', annotated_events); - -nwb = NwbFile( ... - 'session_description', 'mouse in open exploration', ... - 'identifier', 'Mouse5_Day3', ... - 'session_start_time', datetime(2018, 4, 25, 2, 30, 3) ... -); - -nwb.processing.set('behavior', behavior_mod); + # access the events table and event types table by name from the NWBFile acquisition group and print it + print(nwb.acquisition["tone_onset"]) + print(nwb.acquisition["EventTypesTable"]) + print(nwb.acquisition["EventsTable"]) + print(nwb.acquisition["EventsTable"].to_dataframe()) + print(nwb.acquisition["EventsTable"][0, "event_type"]) ``` + This extension was created using [ndx-template](https://github.com/nwb-extensions/ndx-template). diff --git a/src/pynwb/tests/test_example_usage.py b/src/pynwb/tests/test_example_usage.py index d8b5acf..bc2fa13 100644 --- a/src/pynwb/tests/test_example_usage.py +++ b/src/pynwb/tests/test_example_usage.py @@ -1,15 +1,24 @@ def test_example_usage(): from datetime import datetime - + from ndx_events import Events, EventsTable, EventTypesTable from pynwb import NWBFile, NWBHDF5IO - from ndx_events import EventsTable, EventTypesTable nwb = NWBFile( - session_description='session description', - identifier='cool_experiment_001', - session_start_time=datetime.now().astimezone() + session_description="session description", + identifier="cool_experiment_001", + session_start_time=datetime.now().astimezone(), + ) + + # create a basic events object + basic_tone_event = Events( + name="tone_onset", + timestamps=[0.0, 0.1, 0.3, 0.5, 0.6], + description="Times when a tone was played.", ) + # add the basic events object to the NWBFile object + nwb.add_acquisition(basic_tone_event) + # create an event types table event_types_table = EventTypesTable( name="EventTypesTable", @@ -23,47 +32,61 @@ def test_example_usage(): ) # add event types one by one - event_types_table.add_row(id=0, event_name="trial start", extra_metadata="more metadata") - event_types_table.add_row(id=1, event_name="cue onset", extra_metadata="more metadata") - event_types_table.add_row(id=2, event_name="cue offset", extra_metadata="more metadata") - event_types_table.add_row(id=3, event_name="nosepoke left", extra_metadata="more metadata") - event_types_table.add_row(id=4, event_name="nosepoke right", extra_metadata="more metadata") + event_types_table.add_row( + id=0, event_name="trial start", extra_metadata="more metadata" + ) + event_types_table.add_row( + id=1, event_name="cue onset", extra_metadata="more metadata" + ) + event_types_table.add_row( + id=2, event_name="cue offset", extra_metadata="more metadata" + ) + event_types_table.add_row( + id=3, event_name="nosepoke left", extra_metadata="more metadata" + ) + event_types_table.add_row( + id=4, event_name="nosepoke right", extra_metadata="more metadata" + ) event_types_table.add_row(id=5, event_name="reward", extra_metadata="more metadata") - nwb.add_acquisition(event_types_table) # place here for now + # add the event types table to the acquisition group for now + # it should be added to the /general/tasks group when merged with core + nwb.add_acquisition(event_types_table) # create a new EventsTable type to hold events recorded from the data acquisition system events_table = EventsTable( - name='EventsTable', - description='events from my experiment', + name="EventsTable", + description="events from my experiment", ) # set the dynamic table region link events_table["event_type"].table = event_types_table # add events one by one - events_table.add_row(timestamp=0.1, event_type=0, duration=0.) - events_table.add_row(timestamp=0.3, event_type=1, duration=0.) - events_table.add_row(timestamp=0.4, event_type=2, duration=0.) + events_table.add_row(timestamp=0.1, event_type=0, duration=0.0) + events_table.add_row(timestamp=0.3, event_type=1, duration=0.0) + events_table.add_row(timestamp=0.4, event_type=2, duration=0.0) events_table.add_row(timestamp=0.8, event_type=4, duration=0.1) - events_table.add_row(timestamp=0.85, event_type=5, duration=0.) + events_table.add_row(timestamp=0.85, event_type=5, duration=0.0) # add the EventsTable type to the acquisition group of the NWB file nwb.add_acquisition(events_table) # write nwb file - filename = 'test.nwb' - with NWBHDF5IO(filename, 'w') as io: + filename = "test.nwb" + with NWBHDF5IO(filename, "w") as io: io.write(nwb) # read nwb file and check its contents - with NWBHDF5IO(filename, 'r', load_namespaces=True) as io: + with NWBHDF5IO(filename, "r", load_namespaces=True) as io: nwb = io.read() print(nwb) # access the events table and event types table by name from the NWBFile acquisition group and print it - print(nwb.acquisition['EventTypesTable']) - print(nwb.acquisition['EventsTable']) - print(nwb.acquisition['EventsTable']["event_type"]) + print(nwb.acquisition["tone_onset"]) + print(nwb.acquisition["EventTypesTable"]) + print(nwb.acquisition["EventsTable"]) + print(nwb.acquisition["EventsTable"].to_dataframe()) + print(nwb.acquisition["EventsTable"][0, "event_type"]) if __name__ == "__main__": - test_example_usage() \ No newline at end of file + test_example_usage() From 09ce6c968c12ffb4d103026a42fa96e4bf5cc912 Mon Sep 17 00:00:00 2001 From: rly <rly@lbl.gov> Date: Mon, 23 Oct 2023 09:04:32 -0700 Subject: [PATCH 04/15] Update and add tests --- spec/ndx-events.extensions.yaml | 89 ++-- spec/ndx-events.namespace.yaml | 6 - src/pynwb/ndx_events/__init__.py | 2 +- src/pynwb/ndx_events/events.py | 108 ----- src/pynwb/ndx_events/io/events.py | 20 +- .../tests/integration/hdf5/test_events.py | 328 ++++++------- src/pynwb/tests/test_example_usage.py | 156 +++---- src/pynwb/tests/unit/test_events.py | 434 +++++++++++------- src/spec/create_extension_spec.py | 175 ++++--- 9 files changed, 677 insertions(+), 641 deletions(-) diff --git a/spec/ndx-events.extensions.yaml b/spec/ndx-events.extensions.yaml index 56304ef..385bc46 100644 --- a/spec/ndx-events.extensions.yaml +++ b/spec/ndx-events.extensions.yaml @@ -1,53 +1,55 @@ groups: -- neurodata_type_def: Events - neurodata_type_inc: NWBDataInterface - doc: A simple list of timestamps, stored in seconds, of an event type. For example, - this neurodata type could be used to store all the times that a nosepoke was detected. - The name may be set to 'nosepoke_onset'. - attributes: - - name: description - dtype: text - doc: Description of the event type. - datasets: - - name: timestamps - dtype: float32 - dims: - - num_events - shape: - - null - doc: Event timestamps, in seconds, relative to the common experiment master-clock - stored in NWBFile.timestamps_reference_time. - attributes: - - name: unit - dtype: text - value: seconds - doc: Unit of measurement for timestamps, which is fixed to 'seconds'. - - name: resolution - dtype: float32 - doc: The smallest possible difference between two event times. Usually 1 divided - by the event time sampling rate on the data acquisition system. - required: false - neurodata_type_def: EventTypesTable neurodata_type_inc: DynamicTable + default_name: EventTypesTable doc: A column-based table to store information about each event type, such as name, one event type per row. + attributes: + - name: description + dtype: text + default_value: Metadata about event types. + doc: Description of the event types table. + required: false datasets: - name: event_name neurodata_type_inc: VectorData dtype: text doc: Name of each event type. - - name: hed + - name: event_type_description + neurodata_type_inc: VectorData + dtype: text + doc: Description of each event type. + - name: hed_tags neurodata_type_inc: VectorData dtype: text dims: + - num_tags + shape: + - null + doc: Optional column containing the Hierarchical Event Descriptor (HED) tags for + each event type. + quantity: '?' + - name: hed_tags_index + neurodata_type_inc: VectorIndex + dims: - num_events shape: - null - doc: Optional column containing the Hierarchical Event Descriptor of each event. + doc: Index column for `hed_tags` column. quantity: '?' - neurodata_type_def: EventsTable neurodata_type_inc: DynamicTable - doc: A column-based table to store information about events, one event per row. + default_name: EventsTable + doc: A column-based table to store information about events (event instances), one + event per row. Each event must have an event_type, which is a row in the EventTypesTable. + Additional columns may be added to store metadata about each event, such as the + duration of the event, or a text value of the event. + attributes: + - name: description + dtype: text + default_value: Metadata about events. + doc: Description of the events table. + required: false datasets: - name: timestamp neurodata_type_inc: VectorData @@ -56,7 +58,7 @@ groups: - num_events shape: - null - doc: The time that the event occurred, in seconds. + doc: The time that the event occurred, in seconds, from the session start time. attributes: - name: unit dtype: text @@ -93,17 +95,38 @@ groups: - null doc: Optional column containing the duration of each event, in seconds. quantity: '?' + - name: hed_tags + neurodata_type_inc: VectorData + dtype: text + dims: + - num_tags + shape: + - null + doc: Optional column containing the Hierarchical Event Descriptor (HED) tags for + each event. HED tags should be used at the event type level, not at the event + instance level, when possible, unless it is important to annotate events individually. + quantity: '?' + - name: hed_tags_index + neurodata_type_inc: VectorIndex + dims: + - num_events + shape: + - null + doc: Index column for `hed_tags` column. + quantity: '?' - neurodata_type_def: TtlTypesTable neurodata_type_inc: EventTypesTable + default_name: TtlTypesTable doc: A column-based table to store information about each TTL type, such as name and pulse value, one TTL type per row. datasets: - name: pulse_value neurodata_type_inc: VectorData - dtype: int8 + dtype: uint8 doc: TTL pulse value for each event type. - neurodata_type_def: TtlsTable neurodata_type_inc: EventsTable + default_name: TtlsTable doc: Data type to hold timestamps of TTL pulses. datasets: - name: event_type diff --git a/spec/ndx-events.namespace.yaml b/spec/ndx-events.namespace.yaml index 7c3c919..ee73a13 100644 --- a/spec/ndx-events.namespace.yaml +++ b/spec/ndx-events.namespace.yaml @@ -7,11 +7,5 @@ namespaces: name: ndx-events schema: - namespace: core - neurodata_types: - - NWBDataInterface - - DynamicTable - - VectorData - - VectorIndex - - DynamicTableRegion - source: ndx-events.extensions.yaml version: 0.3.0 diff --git a/src/pynwb/ndx_events/__init__.py b/src/pynwb/ndx_events/__init__.py index 28bb864..3877752 100644 --- a/src/pynwb/ndx_events/__init__.py +++ b/src/pynwb/ndx_events/__init__.py @@ -22,4 +22,4 @@ load_namespaces(ndx_events_specpath) from . import io as __io # noqa: E402,F401 -from .events import Events, EventTypesTable, EventsTable #, TTLs, AnnotatedEventsTable # noqa: E402,F401 +from .events import EventTypesTable, EventsTable #, TTLs, AnnotatedEventsTable # noqa: E402,F401 diff --git a/src/pynwb/ndx_events/events.py b/src/pynwb/ndx_events/events.py index 482601d..1632709 100644 --- a/src/pynwb/ndx_events/events.py +++ b/src/pynwb/ndx_events/events.py @@ -8,111 +8,3 @@ EventTypesTable = get_class('EventTypesTable', 'ndx-events') EventsTable = get_class('EventsTable', 'ndx-events') # TTLTypesTable = get_class('TTLTypesTable', 'ndx-events') - -@register_class('Events', 'ndx-events') -class Events(NWBDataInterface): - """ - A list of timestamps, stored in seconds, of an event. - """ - - __nwbfields__ = ('description', - 'timestamps', - 'resolution', - {'name': 'unit', 'settable': False}) - - @docval({'name': 'name', 'type': str, 'doc': 'The name of this Events object'}, # required - {'name': 'description', 'type': str, 'doc': 'The name of this Events object'}, # required - {'name': 'timestamps', 'type': ('array_data', 'data'), # required - 'doc': ('Event timestamps, in seconds, relative to the common experiment master-clock ' - 'stored in NWBFile.timestamps_reference_time.'), - 'shape': (None,)}, - {'name': 'resolution', 'type': float, - 'doc': ('The smallest possible difference between two event times. Usually 1 divided ' - 'by the event time sampling rate on the data acquisition system.'), - 'default': None}) - def __init__(self, **kwargs): - description, timestamps, resolution = popargs('description', 'timestamps', 'resolution', kwargs) - super().__init__(**kwargs) - self.description = description - self.timestamps = timestamps - self.resolution = resolution - self.fields['unit'] = 'seconds' - - -# @register_class('LabeledEvents', 'ndx-events') -# class LabeledEvents(Events): -# """ -# A list of timestamps, stored in seconds, of an event that can have different -# labels. For example, this type could represent the times that reward was given, -# as well as which of three different types of reward was given. In this case, the -# 'data' dataset would contain values {0, 1, 2}, and the 'labels' dataset -# would contain three text elements, where the first (index 0) specifies the name -# of the reward associated with data = 0, the second (index 1) specifies -# the name of the reward associated with data = 1, etc. The labels do not -# have to start at 0 and do not need to be sequential, e.g. the 'data' dataset -# could contain values {0, 10, 100}, and the 'labels' dataset could contain 101 -# values, where labels[0] is 'No reward', labels[10] is '10% reward', labels[100] -# is 'Full reward', and all other entries in 'labels' are the empty string. -# """ - -# __nwbfields__ = ('data', -# 'labels') - -# @docval(*get_docval(Events.__init__, 'name', 'description', 'timestamps'), # required -# {'name': 'data', 'type': ('array_data', 'data'), # required -# 'doc': ("Unsigned integer labels that map onto strings using the mapping in the 'labels' dataset. " -# "Values must be 0 or greater and need not be sequential. If a list/tuple/array of integer values " -# "is passed, it will be converted to a numpy array of unsigned integer values. This dataset should " -# "have the same number of elements as the 'timestamps' dataset."), -# 'shape': (None,)}, -# {'name': 'event_types', 'type': EventTypesTable, -# 'doc': ("Mapping from an integer (the zero-based index) to a string, used to understand " -# "the integer values in the 'data' dataset. Use an empty string to represent " -# "a label value that is not mapped to any text. Use '' to represent any values " -# "that are None or empty. If the argument is not specified, the label " -# "will be set to the string representation of the data value and '' for other values.")}, -# *get_docval(Events.__init__, 'resolution')) -# def __init__(self, **kwargs): -# timestamps = getargs('timestamps', kwargs) -# data, event_types = popargs('data', 'event_types', kwargs) -# super().__init__(**kwargs) -# if len(timestamps) != len(data): -# raise ValueError('Timestamps and data must have the same length: %d != %d' -# % (len(timestamps), len(data))) -# data = self.__check_label_indices_uint(data) -# self.data = data -# self.event_types = event_types - -# def __check_label_indices_uint(self, data): -# """Convert a list/tuple of integer label indices to a numpy array of unsigned integers. Raise error if negative -# or non-numeric values are found. If something other than a list/tuple/np.ndarray of ints or unsigned ints -# is provided, return the original array. -# """ -# new_data = data -# if isinstance(new_data, (list, tuple)): -# new_data = np.array(new_data) -# if isinstance(new_data, np.ndarray): -# if not np.issubdtype(new_data.dtype, np.number): -# raise ValueError("'data' must be an array of numeric values that have type unsigned int or " -# "can be converted to unsigned int, not type %s" % new_data.dtype) -# if np.issubdtype(new_data.dtype, np.unsignedinteger): -# return new_data -# if (new_data < 0).any(): -# raise ValueError("Negative values are not allowed in 'data'.") -# if np.issubdtype(new_data.dtype, np.integer): -# return new_data.astype(np.uint) -# # all other array dtypes will not be handled. the objectmapper will attempt to convert the data -# return data - - -# @register_class('TTLs', 'ndx-events') -# class TTLs(LabeledEvents): -# """ -# Data type to hold timestamps of TTL pulses. The 'data' dataset contains the integer pulse values -# (or channel IDs), and the 'labels' dataset contains user-defined labels associated with each pulse -# value (or channel ID). The value at index i of the 'labels' dataset corresponds to a pulse value (or -# channel ID) of i in the 'data' dataset. For example, the first value (index 0) of the 'labels' dataset -# corresponds to a pulse value of 0. See the LabeledEvents type for more details. -# """ -# pass - diff --git a/src/pynwb/ndx_events/io/events.py b/src/pynwb/ndx_events/io/events.py index a08ab30..29f3089 100644 --- a/src/pynwb/ndx_events/io/events.py +++ b/src/pynwb/ndx_events/io/events.py @@ -6,19 +6,19 @@ # from hdmf.utils import getargs, docval # from hdmf.spec import AttributeSpec -from ..events import Events +# from ..events import Events -@register_map(Events) -class EventsMap(NWBContainerMapper): +# @register_map(Events) +# class EventsMap(NWBContainerMapper): - def __init__(self, spec): - super().__init__(spec) - # map object attribute Events.unit -> spec Events/timestamps.unit - # map object attribute Events.resolution -> spec Events/timestamps.resolution - timestamps_spec = self.spec.get_dataset('timestamps') - self.map_spec('unit', timestamps_spec.get_attribute('unit')) - self.map_spec('resolution', timestamps_spec.get_attribute('resolution')) +# def __init__(self, spec): +# super().__init__(spec) +# # map object attribute Events.unit -> spec Events/timestamps.unit +# # map object attribute Events.resolution -> spec Events/timestamps.resolution +# timestamps_spec = self.spec.get_dataset('timestamps') +# self.map_spec('unit', timestamps_spec.get_attribute('unit')) +# self.map_spec('resolution', timestamps_spec.get_attribute('resolution')) # @register_map(LabeledEvents) diff --git a/src/pynwb/tests/integration/hdf5/test_events.py b/src/pynwb/tests/integration/hdf5/test_events.py index 12ca9d6..f9600d9 100644 --- a/src/pynwb/tests/integration/hdf5/test_events.py +++ b/src/pynwb/tests/integration/hdf5/test_events.py @@ -3,167 +3,167 @@ from pynwb import NWBFile, NWBHDF5IO from pynwb.testing import AcquisitionH5IOMixin, TestCase, remove_test_file -from ndx_events import Events, LabeledEvents, TTLs, AnnotatedEventsTable - - -class TestEventsIOSimple(TestCase): - """Simple roundtrip test for CSD.""" - - def setUp(self): - self.nwbfile = NWBFile( - session_description='session_description', - identifier='identifier', - session_start_time=datetime.datetime.now(datetime.timezone.utc) - ) - self.path = 'test.nwb' - - def tearDown(self): - remove_test_file(self.path) - - def test_roundtrip(self): - """ - Add a CSD to an "ecephys" processing module in the NWBFile, write it to file, read the file, and test that the - CSD from the file matches the original CSD. - """ - - events = Events( - name='Events', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5 - ) - self.nwbfile.add_acquisition(events) - - labeled_events = LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - labels=['', '', '', 'event1', 'event2'] - ) - self.nwbfile.add_acquisition(labeled_events) - - ttls = TTLs( - name='TTLs', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - labels=['', '', '', 'event1', 'event2'] - ) - self.nwbfile.add_acquisition(ttls) - - annotated_events = AnnotatedEventsTable( - name='AnnotatedEventsTable', - description='annotated events from my experiment', - resolution=1e-5 - ) - annotated_events.add_column( - name='extra', - description='extra metadata for each event type' - ) - annotated_events.add_event_type( - label='Reward', - event_description='Times when the animal received juice reward.', - event_times=[1., 2., 3.], - extra='extra', - id=3 - ) - annotated_events.add_event_type( - label='Nosepoke', - event_description='Times when the animal poked its noise through the input port.', - event_times=[1., 2., 3.], - extra='extra', - id=5 - ) - - events_module = self.nwbfile.create_processing_module( - name='events', - description='processed events data' - ) - events_module.add(annotated_events) - - with NWBHDF5IO(self.path, mode='w') as io: - io.write(self.nwbfile) - - with NWBHDF5IO(self.path, mode='r', load_namespaces=True) as io: - read_nwbfile = io.read() - self.assertContainerEqual(events, read_nwbfile.acquisition['Events']) - self.assertContainerEqual(labeled_events, read_nwbfile.acquisition['LabeledEvents']) - self.assertContainerEqual(ttls, read_nwbfile.acquisition['TTLs']) - self.assertContainerEqual(annotated_events, read_nwbfile.processing['events']['AnnotatedEventsTable']) - - -class TestEventsIO(AcquisitionH5IOMixin, TestCase): - - def setUpContainer(self): - """ Return the test Events to read/write """ - events = Events( - name='Events', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5 - ) - return events - - -class TestLabeledEventsIO(AcquisitionH5IOMixin, TestCase): - - def setUpContainer(self): - """ Return the test Events to read/write """ - labeled_events = LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - labels=['', '', '', 'event1', 'event2'] - ) - return labeled_events - - -class TestTTLs(AcquisitionH5IOMixin, TestCase): - - def setUpContainer(self): - """ Return the test Events to read/write """ - ttls = TTLs( - name='TTLs', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - labels=['', '', '', 'event1', 'event2'] - ) - return ttls - - -class TestAnnotatedEventsTableIO(AcquisitionH5IOMixin, TestCase): - """ Test adding AnnotatedEventsTable into acquisition and accessing AnnotatedEvents after read """ - - def setUpContainer(self): - """ Return the test AnnotatedEventsTable to read/write """ - annotated_events = AnnotatedEventsTable( - name='AnnotatedEventsTable', - description='annotated events from my experiment', - resolution=1e-5 - ) - annotated_events.add_column( - name='extra', - description='extra metadata for each event type' - ) - annotated_events.add_event_type( - label='Reward', - event_description='Times when the animal received juice reward.', - event_times=[1., 2., 3.], - extra='extra', - id=3 - ) - annotated_events.add_event_type( - label='Nosepoke', - event_description='Times when the animal poked its noise through the input port.', - event_times=[1., 2., 3.], - extra='extra', - id=5 - ) - return annotated_events +# from ndx_events import Events, LabeledEvents, TTLs, AnnotatedEventsTable + + +# class TestEventsIOSimple(TestCase): +# """Simple roundtrip test for CSD.""" + +# def setUp(self): +# self.nwbfile = NWBFile( +# session_description='session_description', +# identifier='identifier', +# session_start_time=datetime.datetime.now(datetime.timezone.utc) +# ) +# self.path = 'test.nwb' + +# def tearDown(self): +# remove_test_file(self.path) + +# def test_roundtrip(self): +# """ +# Add a CSD to an "ecephys" processing module in the NWBFile, write it to file, read the file, and test that the +# CSD from the file matches the original CSD. +# """ + +# events = Events( +# name='Events', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5 +# ) +# self.nwbfile.add_acquisition(events) + +# labeled_events = LabeledEvents( +# name='LabeledEvents', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5, +# data=np.uint([3, 4, 3]), +# labels=['', '', '', 'event1', 'event2'] +# ) +# self.nwbfile.add_acquisition(labeled_events) + +# ttls = TTLs( +# name='TTLs', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5, +# data=np.uint([3, 4, 3]), +# labels=['', '', '', 'event1', 'event2'] +# ) +# self.nwbfile.add_acquisition(ttls) + +# annotated_events = AnnotatedEventsTable( +# name='AnnotatedEventsTable', +# description='annotated events from my experiment', +# resolution=1e-5 +# ) +# annotated_events.add_column( +# name='extra', +# description='extra metadata for each event type' +# ) +# annotated_events.add_event_type( +# label='Reward', +# event_description='Times when the animal received juice reward.', +# event_times=[1., 2., 3.], +# extra='extra', +# id=3 +# ) +# annotated_events.add_event_type( +# label='Nosepoke', +# event_description='Times when the animal poked its noise through the input port.', +# event_times=[1., 2., 3.], +# extra='extra', +# id=5 +# ) + +# events_module = self.nwbfile.create_processing_module( +# name='events', +# description='processed events data' +# ) +# events_module.add(annotated_events) + +# with NWBHDF5IO(self.path, mode='w') as io: +# io.write(self.nwbfile) + +# with NWBHDF5IO(self.path, mode='r', load_namespaces=True) as io: +# read_nwbfile = io.read() +# self.assertContainerEqual(events, read_nwbfile.acquisition['Events']) +# self.assertContainerEqual(labeled_events, read_nwbfile.acquisition['LabeledEvents']) +# self.assertContainerEqual(ttls, read_nwbfile.acquisition['TTLs']) +# self.assertContainerEqual(annotated_events, read_nwbfile.processing['events']['AnnotatedEventsTable']) + + +# class TestEventsIO(AcquisitionH5IOMixin, TestCase): + +# def setUpContainer(self): +# """ Return the test Events to read/write """ +# events = Events( +# name='Events', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5 +# ) +# return events + + +# class TestLabeledEventsIO(AcquisitionH5IOMixin, TestCase): + +# def setUpContainer(self): +# """ Return the test Events to read/write """ +# labeled_events = LabeledEvents( +# name='LabeledEvents', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5, +# data=np.uint([3, 4, 3]), +# labels=['', '', '', 'event1', 'event2'] +# ) +# return labeled_events + + +# class TestTTLs(AcquisitionH5IOMixin, TestCase): + +# def setUpContainer(self): +# """ Return the test Events to read/write """ +# ttls = TTLs( +# name='TTLs', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5, +# data=np.uint([3, 4, 3]), +# labels=['', '', '', 'event1', 'event2'] +# ) +# return ttls + + +# class TestAnnotatedEventsTableIO(AcquisitionH5IOMixin, TestCase): +# """ Test adding AnnotatedEventsTable into acquisition and accessing AnnotatedEvents after read """ + +# def setUpContainer(self): +# """ Return the test AnnotatedEventsTable to read/write """ +# annotated_events = AnnotatedEventsTable( +# name='AnnotatedEventsTable', +# description='annotated events from my experiment', +# resolution=1e-5 +# ) +# annotated_events.add_column( +# name='extra', +# description='extra metadata for each event type' +# ) +# annotated_events.add_event_type( +# label='Reward', +# event_description='Times when the animal received juice reward.', +# event_times=[1., 2., 3.], +# extra='extra', +# id=3 +# ) +# annotated_events.add_event_type( +# label='Nosepoke', +# event_description='Times when the animal poked its noise through the input port.', +# event_times=[1., 2., 3.], +# extra='extra', +# id=5 +# ) +# return annotated_events diff --git a/src/pynwb/tests/test_example_usage.py b/src/pynwb/tests/test_example_usage.py index bc2fa13..536e4a7 100644 --- a/src/pynwb/tests/test_example_usage.py +++ b/src/pynwb/tests/test_example_usage.py @@ -1,92 +1,92 @@ -def test_example_usage(): - from datetime import datetime - from ndx_events import Events, EventsTable, EventTypesTable - from pynwb import NWBFile, NWBHDF5IO +# def test_example_usage(): +# from datetime import datetime +# from ndx_events import Events, EventsTable, EventTypesTable +# from pynwb import NWBFile, NWBHDF5IO - nwb = NWBFile( - session_description="session description", - identifier="cool_experiment_001", - session_start_time=datetime.now().astimezone(), - ) +# nwb = NWBFile( +# session_description="session description", +# identifier="cool_experiment_001", +# session_start_time=datetime.now().astimezone(), +# ) - # create a basic events object - basic_tone_event = Events( - name="tone_onset", - timestamps=[0.0, 0.1, 0.3, 0.5, 0.6], - description="Times when a tone was played.", - ) +# # create a basic events object +# basic_tone_event = Events( +# name="tone_onset", +# timestamps=[0.0, 0.1, 0.3, 0.5, 0.6], +# description="Times when a tone was played.", +# ) - # add the basic events object to the NWBFile object - nwb.add_acquisition(basic_tone_event) +# # add the basic events object to the NWBFile object +# nwb.add_acquisition(basic_tone_event) - # create an event types table - event_types_table = EventTypesTable( - name="EventTypesTable", - description="metadata about event types", - ) +# # create an event types table +# event_types_table = EventTypesTable( +# name="EventTypesTable", +# description="metadata about event types", +# ) - # create a new custom column with additional metadata - event_types_table.add_column( - name="extra_metadata", - description="some additional metadata about each event type", - ) +# # create a new custom column with additional metadata +# event_types_table.add_column( +# name="extra_metadata", +# description="some additional metadata about each event type", +# ) - # add event types one by one - event_types_table.add_row( - id=0, event_name="trial start", extra_metadata="more metadata" - ) - event_types_table.add_row( - id=1, event_name="cue onset", extra_metadata="more metadata" - ) - event_types_table.add_row( - id=2, event_name="cue offset", extra_metadata="more metadata" - ) - event_types_table.add_row( - id=3, event_name="nosepoke left", extra_metadata="more metadata" - ) - event_types_table.add_row( - id=4, event_name="nosepoke right", extra_metadata="more metadata" - ) - event_types_table.add_row(id=5, event_name="reward", extra_metadata="more metadata") +# # add event types one by one +# event_types_table.add_row( +# id=0, event_name="trial start", extra_metadata="more metadata" +# ) +# event_types_table.add_row( +# id=1, event_name="cue onset", extra_metadata="more metadata" +# ) +# event_types_table.add_row( +# id=2, event_name="cue offset", extra_metadata="more metadata" +# ) +# event_types_table.add_row( +# id=3, event_name="nosepoke left", extra_metadata="more metadata" +# ) +# event_types_table.add_row( +# id=4, event_name="nosepoke right", extra_metadata="more metadata" +# ) +# event_types_table.add_row(id=5, event_name="reward", extra_metadata="more metadata") - # add the event types table to the acquisition group for now - # it should be added to the /general/tasks group when merged with core - nwb.add_acquisition(event_types_table) +# # add the event types table to the acquisition group for now +# # it should be added to the /general/tasks group when merged with core +# nwb.add_acquisition(event_types_table) - # create a new EventsTable type to hold events recorded from the data acquisition system - events_table = EventsTable( - name="EventsTable", - description="events from my experiment", - ) - # set the dynamic table region link - events_table["event_type"].table = event_types_table +# # create a new EventsTable type to hold events recorded from the data acquisition system +# events_table = EventsTable( +# name="EventsTable", +# description="events from my experiment", +# ) +# # set the dynamic table region link +# events_table["event_type"].table = event_types_table - # add events one by one - events_table.add_row(timestamp=0.1, event_type=0, duration=0.0) - events_table.add_row(timestamp=0.3, event_type=1, duration=0.0) - events_table.add_row(timestamp=0.4, event_type=2, duration=0.0) - events_table.add_row(timestamp=0.8, event_type=4, duration=0.1) - events_table.add_row(timestamp=0.85, event_type=5, duration=0.0) +# # add events one by one +# events_table.add_row(timestamp=0.1, event_type=0, duration=0.0) +# events_table.add_row(timestamp=0.3, event_type=1, duration=0.0) +# events_table.add_row(timestamp=0.4, event_type=2, duration=0.0) +# events_table.add_row(timestamp=0.8, event_type=4, duration=0.1) +# events_table.add_row(timestamp=0.85, event_type=5, duration=0.0) - # add the EventsTable type to the acquisition group of the NWB file - nwb.add_acquisition(events_table) +# # add the EventsTable type to the acquisition group of the NWB file +# nwb.add_acquisition(events_table) - # write nwb file - filename = "test.nwb" - with NWBHDF5IO(filename, "w") as io: - io.write(nwb) +# # write nwb file +# filename = "test.nwb" +# with NWBHDF5IO(filename, "w") as io: +# io.write(nwb) - # read nwb file and check its contents - with NWBHDF5IO(filename, "r", load_namespaces=True) as io: - nwb = io.read() - print(nwb) - # access the events table and event types table by name from the NWBFile acquisition group and print it - print(nwb.acquisition["tone_onset"]) - print(nwb.acquisition["EventTypesTable"]) - print(nwb.acquisition["EventsTable"]) - print(nwb.acquisition["EventsTable"].to_dataframe()) - print(nwb.acquisition["EventsTable"][0, "event_type"]) +# # read nwb file and check its contents +# with NWBHDF5IO(filename, "r", load_namespaces=True) as io: +# nwb = io.read() +# print(nwb) +# # access the events table and event types table by name from the NWBFile acquisition group and print it +# print(nwb.acquisition["tone_onset"]) +# print(nwb.acquisition["EventTypesTable"]) +# print(nwb.acquisition["EventsTable"]) +# print(nwb.acquisition["EventsTable"].to_dataframe()) +# print(nwb.acquisition["EventsTable"][0, "event_type"]) -if __name__ == "__main__": - test_example_usage() +# if __name__ == "__main__": +# test_example_usage() diff --git a/src/pynwb/tests/unit/test_events.py b/src/pynwb/tests/unit/test_events.py index 7938d99..4def398 100644 --- a/src/pynwb/tests/unit/test_events.py +++ b/src/pynwb/tests/unit/test_events.py @@ -3,193 +3,281 @@ from pynwb.testing import TestCase from pynwb.core import VectorData, VectorIndex -from ndx_events import Events, LabeledEvents, TTLs, AnnotatedEventsTable +from ndx_events import EventsTable, EventTypesTable -class TestEvents(TestCase): +class TestEventTypesTable(TestCase): def test_init(self): - events = Events( - name='Events', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5 + event_types_table = EventTypesTable(description="Metadata about event types") + # TODO description field should not be necessary + assert event_types_table.name == "EventTypesTable" + assert event_types_table.description == "Metadata about event types" + + def test_add_row(self): + event_types_table = EventTypesTable(description="Metadata about event types") + event_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + hed_tags=["Sensory-event", "(White, Circle)", "(Intended-effect, Cue)"], + ) + event_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], ) - self.assertEqual(events.name, 'Events') - self.assertEqual(events.description, 'events from my experiment') - self.assertEqual(events.timestamps, [0., 1., 2.]) - self.assertEqual(events.resolution, 1e-5) - self.assertEqual(events.unit, 'seconds') + assert event_types_table["event_name"].data == ["cue on", "stimulus on"] + assert event_types_table["event_type_description"].data == [ + "Times when the cue was on screen.", + "Times when the stimulus was on screen.", + ] + assert event_types_table["hed_tags"][0] == ["Sensory-event", "(White, Circle)", "(Intended-effect, Cue)"] + assert event_types_table["hed_tags"][1] == ["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"] -class TestLabeledEvents(TestCase): + +class TestEventsTable(TestCase): def test_init(self): - events = LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - labels=['', '', '', 'event1', 'event2'] - ) - self.assertEqual(events.name, 'LabeledEvents') - self.assertEqual(events.description, 'events from my experiment') - self.assertEqual(events.timestamps, [0., 1., 2.]) - self.assertEqual(events.resolution, 1e-5) - self.assertEqual(events.unit, 'seconds') - np.testing.assert_array_equal(events.data, np.uint([3, 4, 3])), - self.assertEqual(events.labels, ['', '', '', 'event1', 'event2']) - - def test_mismatch_length(self): - msg = 'Timestamps and data must have the same length: 3 != 4' - with self.assertRaisesWith(ValueError, msg): - LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3, 5]), - labels=['', '', '', 'event1', 'event2', 'event3'] - ) - - def test_default_labels(self): - events = LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - ) - self.assertEqual(events.labels, ['', '', '', '3', '4']) - - def test_none_in_labels(self): - msg = "None values are not allowed in the labels array. Please use '' for undefined labels." - with self.assertRaisesWith(ValueError, msg): - LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - labels=[None, None, None, 'event1', 'event2'] - ) - - def test_data_negative(self): - msg = "Negative values are not allowed in 'data'." - with self.assertRaisesWith(ValueError, msg): - LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=[1, -2, 3], - labels=['', '', '', 'event1', 'event2'] - ) - - def test_data_int_conversion(self): - le = LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=[1, 2, 3], - labels=['', '', '', 'event1', 'event2'] + events_table = EventsTable(description="Metadata about events") + # TODO description field should not be necessary + assert events_table.name == "EventsTable" + assert events_table.description == "Metadata about events" + + def test_init_dtr(self): + event_types_table = EventTypesTable(description="Metadata about event types") + event_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + hed_tags=["Sensory-event", "(White, Circle)", "(Intended-effect, Cue)"], ) - np.testing.assert_array_equal(le.data, np.array([1, 2, 3])) - self.assertEqual(le.data.dtype, np.uint) - - def test_data_string(self): - msg = ("'data' must be an array of numeric values that have type unsigned int or " - "can be converted to unsigned int, not type <U1") - with self.assertRaisesWith(ValueError, msg): - LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=['1', '2', '3'], - labels=['', '', '', 'event1', 'event2'] - ) - - def test_data_pass_through(self): - data = [1.0, 2.0, 3.0] - le = LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=data, - labels=['', '', '', 'event1', 'event2'] + event_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], ) - self.assertIs(le.data, data) + events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) + assert events_table["event_type"].table is event_types_table -class TestTTLs(TestCase): + # def test_add_row(self): + # event_types_table = EventTypesTable(description="Metadata about event types") + # event_types_table.add_row( + # event_name="cue on", + # event_type_description="Times when the cue was on screen.", + # hed_tags=["Sensory-event", "(White, Circle)", "(Intended-effect, Cue)"], + # ) + # event_types_table.add_row( + # event_name="stimulus on", + # event_type_description="Times when the stimulus was on screen.", + # hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], + # ) - def test_init(self): - events = TTLs( - name='TTLs', - description='ttl pulses from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - labels=['', '', '', 'event1', 'event2'] - ) - self.assertEqual(events.name, 'TTLs') - self.assertEqual(events.description, 'ttl pulses from my experiment') - self.assertEqual(events.timestamps, [0., 1., 2.]) - self.assertEqual(events.resolution, 1e-5) - self.assertEqual(events.unit, 'seconds') - np.testing.assert_array_equal(events.data, np.uint([3, 4, 3])), - self.assertEqual(events.labels, ['', '', '', 'event1', 'event2']) + # events_table = EventsTable(description="Metadata about events", linked_tables={"event_type": event_types_table}) + # events_table.add_row( + # timestamp=0.1, + # event_type_description="Times when the cue was on screen.", + # hed_tags=["Sensory-event", "(White, Circle)", "(Intended-effect, Cue)"], + # ) + # event_types_table.add_row( + # event_name="stimulus on", + # event_type_description="Times when the stimulus was on screen.", + # hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], + # ) + # assert event_types_table["event_name"].data == ["cue on", "stimulus on"] + # assert event_types_table["event_type_description"].data == [ + # "Times when the cue was on screen.", + # "Times when the stimulus was on screen.", + # ] + # assert event_types_table["hed_tags"][0] == ["Sensory-event", "(White, Circle)", "(Intended-effect, Cue)"] + # assert event_types_table["hed_tags"][1] == ["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"] -class TestAnnotatedEventsTable(TestCase): +# class TestEventsTable(TestCase): - def test_init(self): - events = AnnotatedEventsTable( - name='AnnotatedEventsTable', - description='annotated events from my experiment', - resolution=1e-5 - ) - self.assertEqual(events.name, 'AnnotatedEventsTable') - self.assertEqual(events.description, 'annotated events from my experiment') - self.assertEqual(events.resolution, 1e-5) - - def test_add_event_type(self): - events = AnnotatedEventsTable( - name='AnnotatedEventsTable', - description='annotated events from my experiment' - ) - events.add_event_type( - label='Reward', - event_description='Times when the animal received juice reward.', - event_times=[1., 2., 3.], - id=3 - ) - events.add_event_type( - label='Abort', - event_description='Times when the animal aborted the trial.', - event_times=[0.5, 4.5], - id=4 - ) - self.assertEqual(events.id.data, [3, 4]) - self.assertEqual(events['event_times'][0], [1., 2., 3.]) - self.assertEqual(events['event_times'][1], [0.5, 4.5]) - self.assertEqual(events['label'][0], 'Reward') - self.assertEqual(events['label'][1], 'Abort') - self.assertListEqual(events['event_description'].data, ['Times when the animal received juice reward.', - 'Times when the animal aborted the trial.']) - self.assertEqual(events.colnames, ('event_times', 'label', 'event_description')) - self.assertEqual(len(events.columns), 4) - self.assertEqual(events.columns[0].name, 'event_times_index') - self.assertIsInstance(events.columns[0], VectorIndex) - self.assertIs(events.columns[0].target, events.columns[1]) - self.assertEqual(events.columns[1].name, 'event_times') - self.assertIsInstance(events.columns[1], VectorData) - self.assertEqual(events.columns[2].name, 'label') - self.assertIsInstance(events.columns[2], VectorData) - self.assertEqual(events.columns[3].name, 'event_description') - self.assertIsInstance(events.columns[3], VectorData) - self.assertEqual(events.resolution, None) +# def test_init(self): +# event_types_table = EventTypesTable() +# events = EventsTable( +# name='EventsTable', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5 +# ) +# self.assertEqual(events.name, 'Events') +# self.assertEqual(events.description, 'events from my experiment') +# self.assertEqual(events.timestamps, [0., 1., 2.]) +# self.assertEqual(events.resolution, 1e-5) +# self.assertEqual(events.unit, 'seconds') + + +# class TestLabeledEvents(TestCase): + +# def test_init(self): +# events = LabeledEvents( +# name='LabeledEvents', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5, +# data=np.uint([3, 4, 3]), +# labels=['', '', '', 'event1', 'event2'] +# ) +# self.assertEqual(events.name, 'LabeledEvents') +# self.assertEqual(events.description, 'events from my experiment') +# self.assertEqual(events.timestamps, [0., 1., 2.]) +# self.assertEqual(events.resolution, 1e-5) +# self.assertEqual(events.unit, 'seconds') +# np.testing.assert_array_equal(events.data, np.uint([3, 4, 3])), +# self.assertEqual(events.labels, ['', '', '', 'event1', 'event2']) + +# def test_mismatch_length(self): +# msg = 'Timestamps and data must have the same length: 3 != 4' +# with self.assertRaisesWith(ValueError, msg): +# LabeledEvents( +# name='LabeledEvents', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5, +# data=np.uint([3, 4, 3, 5]), +# labels=['', '', '', 'event1', 'event2', 'event3'] +# ) + +# def test_default_labels(self): +# events = LabeledEvents( +# name='LabeledEvents', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5, +# data=np.uint([3, 4, 3]), +# ) +# self.assertEqual(events.labels, ['', '', '', '3', '4']) + +# def test_none_in_labels(self): +# msg = "None values are not allowed in the labels array. Please use '' for undefined labels." +# with self.assertRaisesWith(ValueError, msg): +# LabeledEvents( +# name='LabeledEvents', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5, +# data=np.uint([3, 4, 3]), +# labels=[None, None, None, 'event1', 'event2'] +# ) + +# def test_data_negative(self): +# msg = "Negative values are not allowed in 'data'." +# with self.assertRaisesWith(ValueError, msg): +# LabeledEvents( +# name='LabeledEvents', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5, +# data=[1, -2, 3], +# labels=['', '', '', 'event1', 'event2'] +# ) + +# def test_data_int_conversion(self): +# le = LabeledEvents( +# name='LabeledEvents', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5, +# data=[1, 2, 3], +# labels=['', '', '', 'event1', 'event2'] +# ) +# np.testing.assert_array_equal(le.data, np.array([1, 2, 3])) +# self.assertEqual(le.data.dtype, np.uint) + +# def test_data_string(self): +# msg = ("'data' must be an array of numeric values that have type unsigned int or " +# "can be converted to unsigned int, not type <U1") +# with self.assertRaisesWith(ValueError, msg): +# LabeledEvents( +# name='LabeledEvents', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5, +# data=['1', '2', '3'], +# labels=['', '', '', 'event1', 'event2'] +# ) + +# def test_data_pass_through(self): +# data = [1.0, 2.0, 3.0] +# le = LabeledEvents( +# name='LabeledEvents', +# description='events from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5, +# data=data, +# labels=['', '', '', 'event1', 'event2'] +# ) +# self.assertIs(le.data, data) + + +# class TestTTLs(TestCase): + +# def test_init(self): +# events = TTLs( +# name='TTLs', +# description='ttl pulses from my experiment', +# timestamps=[0., 1., 2.], +# resolution=1e-5, +# data=np.uint([3, 4, 3]), +# labels=['', '', '', 'event1', 'event2'] +# ) +# self.assertEqual(events.name, 'TTLs') +# self.assertEqual(events.description, 'ttl pulses from my experiment') +# self.assertEqual(events.timestamps, [0., 1., 2.]) +# self.assertEqual(events.resolution, 1e-5) +# self.assertEqual(events.unit, 'seconds') +# np.testing.assert_array_equal(events.data, np.uint([3, 4, 3])), +# self.assertEqual(events.labels, ['', '', '', 'event1', 'event2']) + + +# class TestAnnotatedEventsTable(TestCase): + +# def test_init(self): +# events = AnnotatedEventsTable( +# name='AnnotatedEventsTable', +# description='annotated events from my experiment', +# resolution=1e-5 +# ) +# self.assertEqual(events.name, 'AnnotatedEventsTable') +# self.assertEqual(events.description, 'annotated events from my experiment') +# self.assertEqual(events.resolution, 1e-5) + +# def test_add_event_type(self): +# events = AnnotatedEventsTable( +# name='AnnotatedEventsTable', +# description='annotated events from my experiment' +# ) +# events.add_event_type( +# label='Reward', +# event_description='Times when the animal received juice reward.', +# event_times=[1., 2., 3.], +# id=3 +# ) +# events.add_event_type( +# label='Abort', +# event_description='Times when the animal aborted the trial.', +# event_times=[0.5, 4.5], +# id=4 +# ) +# self.assertEqual(events.id.data, [3, 4]) +# self.assertEqual(events['event_times'][0], [1., 2., 3.]) +# self.assertEqual(events['event_times'][1], [0.5, 4.5]) +# self.assertEqual(events['label'][0], 'Reward') +# self.assertEqual(events['label'][1], 'Abort') +# self.assertListEqual(events['event_description'].data, ['Times when the animal received juice reward.', +# 'Times when the animal aborted the trial.']) +# self.assertEqual(events.colnames, ('event_times', 'label', 'event_description')) +# self.assertEqual(len(events.columns), 4) +# self.assertEqual(events.columns[0].name, 'event_times_index') +# self.assertIsInstance(events.columns[0], VectorIndex) +# self.assertIs(events.columns[0].target, events.columns[1]) +# self.assertEqual(events.columns[1].name, 'event_times') +# self.assertIsInstance(events.columns[1], VectorData) +# self.assertEqual(events.columns[2].name, 'label') +# self.assertIsInstance(events.columns[2], VectorData) +# self.assertEqual(events.columns[3].name, 'event_description') +# self.assertIsInstance(events.columns[3], VectorData) +# self.assertEqual(events.resolution, None) diff --git a/src/spec/create_extension_spec.py b/src/spec/create_extension_spec.py index 4f8eb7c..69018f6 100644 --- a/src/spec/create_extension_spec.py +++ b/src/spec/create_extension_spec.py @@ -2,7 +2,7 @@ import os.path -from pynwb.spec import NWBNamespaceBuilder, export_spec, NWBGroupSpec, NWBAttributeSpec, NWBDatasetSpec, NWBLinkSpec +from pynwb.spec import NWBNamespaceBuilder, export_spec, NWBGroupSpec, NWBAttributeSpec, NWBDatasetSpec def main(): @@ -10,60 +10,57 @@ def main(): doc="""NWB extension for storing timestamped event and TTL pulse data""", name="""ndx-events""", version="""0.3.0""", - author=list(map(str.strip, """Ryan Ly""".split(','))), - contact=list(map(str.strip, """rly@lbl.gov""".split(','))) + author=["Ryan Ly"], + contact=["rly@lbl.gov"], ) - ns_builder.include_type('NWBDataInterface', namespace='core') - ns_builder.include_type('DynamicTable', namespace='core') - ns_builder.include_type('VectorData', namespace='core') - ns_builder.include_type('VectorIndex', namespace='core') - ns_builder.include_type('DynamicTableRegion', namespace='core') + ns_builder.include_namespace('core') - events = NWBGroupSpec( - neurodata_type_def='Events', - neurodata_type_inc='NWBDataInterface', - doc=("A simple list of timestamps, stored in seconds, of an event type. For example, this neurodata type " - "could be used to store all the times that a nosepoke was detected. The name may be set to " - "'nosepoke_onset'."), - attributes=[ - NWBAttributeSpec( - name='description', - dtype='text', - doc='Description of the event type.', - ), - ], - datasets=[ - NWBDatasetSpec( - name='timestamps', - dtype='float32', - dims=['num_events'], - shape=[None], - doc=('Event timestamps, in seconds, relative to the common experiment master-clock stored in ' - 'NWBFile.timestamps_reference_time.'), - attributes=[ - NWBAttributeSpec( - name='unit', - dtype='text', - value='seconds', - doc="Unit of measurement for timestamps, which is fixed to 'seconds'.", - ), - NWBAttributeSpec( - name='resolution', - dtype='float32', - doc=('The smallest possible difference between two event times. Usually 1 divided by the ' - 'event time sampling rate on the data acquisition system.'), - required=False, - ), - ], - ), - ], - ) + # events = NWBGroupSpec( + # neurodata_type_def='Events', + # neurodata_type_inc='NWBDataInterface', + # doc=("A simple list of timestamps, stored in seconds, of an event type. For example, this neurodata type " + # "could be used to store all the times that a nosepoke was detected. The name may be set to " + # "'nosepoke_onset'."), + # attributes=[ + # NWBAttributeSpec( + # name='description', + # dtype='text', + # doc='Description of the event type.', + # ), + # ], + # datasets=[ + # NWBDatasetSpec( + # name='timestamps', + # dtype='float32', + # dims=['num_events'], + # shape=[None], + # doc=('Event timestamps, in seconds, relative to the common experiment master-clock stored in ' + # 'NWBFile.timestamps_reference_time.'), + # attributes=[ + # NWBAttributeSpec( + # name='unit', + # dtype='text', + # value='seconds', + # doc="Unit of measurement for timestamps, which is fixed to 'seconds'.", + # ), + # NWBAttributeSpec( + # name='resolution', + # dtype='float32', + # doc=('The smallest possible difference between two event times. Usually 1 divided by the ' + # 'event time sampling rate on the data acquisition system.'), + # required=False, + # ), + # ], + # ), + # ], + # ) event_types_table = NWBGroupSpec( neurodata_type_def="EventTypesTable", neurodata_type_inc='DynamicTable', doc=("A column-based table to store information about each event type, such as name, one event type per row."), + default_name="EventTypesTable", datasets=[ NWBDatasetSpec( name='event_name', @@ -72,35 +69,48 @@ def main(): doc='Name of each event type.', ), NWBDatasetSpec( - name='hed', + name='event_type_description', neurodata_type_inc='VectorData', dtype='text', + doc='Description of each event type.', + ), + NWBDatasetSpec( + name='hed_tags', + neurodata_type_inc='VectorData', + dtype='text', + dims=['num_tags'], + shape=[None], + doc=("Optional column containing the Hierarchical Event Descriptor (HED) tags for each event type."), + quantity="?", + ), + NWBDatasetSpec( + name='hed_tags_index', + neurodata_type_inc='VectorIndex', dims=['num_events'], shape=[None], - doc=("Optional column containing the Hierarchical Event Descriptor of each event."), + doc=("Index column for `hed_tags` column."), quantity="?", ), ], + attributes=[ # override required description attribute from DynamicTable + NWBAttributeSpec( + name='description', + dtype='text', + doc='Description of the event types table.', + default_value="Metadata about event types.", + required=True, + ), + ], ) events_table = NWBGroupSpec( neurodata_type_def='EventsTable', neurodata_type_inc='DynamicTable', - # doc=("Event timestamps that can have different labels and other metadata. " - # "For example, " - # "this type could represent the times that reward was given, as well as which of three different " - # "types of reward was given. In this case, the 'data' dataset would contain unique values {0, 1, 2}, and " - # "the 'labels' table would contain three rows, one for each unique data value, and two columns: 'label', " - # "which stores a unique name for each event type, and a custom column 'reward_type', which stores " - # "information about the type of reward given. The values in row with index i would represent the reward " - # "associated with a data value of i. For example, 'timestamps' may contain values [0, 0.05, 0.15, 0.2], " - # "'data' may contain values [0, 1, 0, 2], and the 'labels' table may contain three rows, where the row " - # "at index 0 has label='" - # "Users may specify custom columns " - # "in the 'labels' table to store arbitrary metadata associated with each event type. " - # "The values in the 'data' dataset do not have to be continuous and start at 0, " - # "but this is recommended so that there are not empty rows in the 'labels' table."), - doc="A column-based table to store information about events, one event per row.", + doc=("A column-based table to store information about events (event instances), one event per row. " + "Each event must have an event_type, which is a row in the EventTypesTable. Additional columns " + "may be added to store metadata about each event, such as the duration of the event, or a " + "text value of the event."), + default_name="EventsTable", datasets=[ NWBDatasetSpec( name='timestamp', @@ -139,8 +149,7 @@ def main(): dtype='text', dims=['num_events'], shape=[None], - doc=("Optional column containing the text value of each event. Values should be described using " - "Hierarchical Event Descriptors in the linked EventTypesTable."), + doc=("Optional column containing the text value of each event."), quantity="?", ), NWBDatasetSpec( @@ -152,6 +161,34 @@ def main(): doc=("Optional column containing the duration of each event, in seconds."), quantity="?", ), + NWBDatasetSpec( + name='hed_tags', + neurodata_type_inc='VectorData', + dtype='text', + dims=['num_tags'], + shape=[None], + doc=("Optional column containing the Hierarchical Event Descriptor (HED) tags for each event. " + "HED tags should be used at the event type level, not at the event instance level, when " + "possible, unless it is important to annotate events individually."), + quantity="?", + ), + NWBDatasetSpec( + name='hed_tags_index', + neurodata_type_inc='VectorIndex', + dims=['num_events'], + shape=[None], + doc=("Index column for `hed_tags` column."), + quantity="?", + ), + ], + attributes=[ # override required description attribute from DynamicTable + NWBAttributeSpec( + name='description', + dtype='text', + doc='Description of the events table.', + default_value="Metadata about events.", + required=True, + ), ], ) @@ -160,11 +197,12 @@ def main(): neurodata_type_inc='EventTypesTable', doc=("A column-based table to store information about each TTL type, such as name and pulse value, " "one TTL type per row."), + default_name="TtlTypesTable", datasets=[ NWBDatasetSpec( name='pulse_value', neurodata_type_inc='VectorData', - dtype='int8', + dtype='uint8', doc='TTL pulse value for each event type.', ), ], @@ -174,6 +212,7 @@ def main(): neurodata_type_def='TtlsTable', neurodata_type_inc='EventsTable', doc=("Data type to hold timestamps of TTL pulses."), + default_name="TtlsTable", datasets=[ NWBDatasetSpec( name='event_type', @@ -186,7 +225,7 @@ def main(): ], ) - new_data_types = [events, event_types_table, events_table, ttl_types_table, ttls_table] + new_data_types = [event_types_table, events_table, ttl_types_table, ttls_table] # export the spec to yaml files in the spec folder output_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'spec')) From 8d763929738a72c15c52d2251387461ebde21a01 Mon Sep 17 00:00:00 2001 From: rly <rly@lbl.gov> Date: Tue, 24 Oct 2023 23:39:01 -0700 Subject: [PATCH 05/15] Update from feedback --- spec/ndx-events.extensions.yaml | 111 +++++------ src/pynwb/ndx_events/__init__.py | 11 +- src/pynwb/ndx_events/events.py | 10 - src/pynwb/tests/unit/test_events.py | 278 +++++++++++++++++++++++----- src/spec/create_extension_spec.py | 190 +++++++------------ 5 files changed, 364 insertions(+), 236 deletions(-) diff --git a/spec/ndx-events.extensions.yaml b/spec/ndx-events.extensions.yaml index 385bc46..27b6f4b 100644 --- a/spec/ndx-events.extensions.yaml +++ b/spec/ndx-events.extensions.yaml @@ -1,3 +1,40 @@ +datasets: +- neurodata_type_def: TimestampVectorData + neurodata_type_inc: VectorData + dtype: float64 + dims: + - num_times + shape: + - null + doc: A VectorData that stores timestamps in seconds. + attributes: + - name: unit + dtype: text + value: seconds + doc: The unit of measurement for the timestamps, fixed to 'seconds'. + - name: resolution + dtype: float64 + doc: The smallest possible difference between two timestamps. Usually 1 divided + by the sampling rate for timestamps of the data acquisition system. + required: false +- neurodata_type_def: DurationVectorData + neurodata_type_inc: VectorData + dtype: float64 + dims: + - num_events + shape: + - null + doc: A VectorData that stores durations in seconds. + attributes: + - name: unit + dtype: text + value: seconds + doc: The unit of measurement for the durations, fixed to 'seconds'. + - name: resolution + dtype: float64 + doc: The smallest possible difference between two timestamps. Usually 1 divided + by the sampling rate for timestamps of the data acquisition system. + required: false groups: - neurodata_type_def: EventTypesTable neurodata_type_inc: DynamicTable @@ -19,24 +56,6 @@ groups: neurodata_type_inc: VectorData dtype: text doc: Description of each event type. - - name: hed_tags - neurodata_type_inc: VectorData - dtype: text - dims: - - num_tags - shape: - - null - doc: Optional column containing the Hierarchical Event Descriptor (HED) tags for - each event type. - quantity: '?' - - name: hed_tags_index - neurodata_type_inc: VectorIndex - dims: - - num_events - shape: - - null - doc: Index column for `hed_tags` column. - quantity: '?' - neurodata_type_def: EventsTable neurodata_type_inc: DynamicTable default_name: EventsTable @@ -52,23 +71,8 @@ groups: required: false datasets: - name: timestamp - neurodata_type_inc: VectorData - dtype: float32 - dims: - - num_events - shape: - - null - doc: The time that the event occurred, in seconds, from the session start time. - attributes: - - name: unit - dtype: text - value: seconds - doc: Unit of measurement for timestamps, which is fixed to 'seconds'. - - name: resolution - dtype: float32 - doc: The smallest possible difference between two event times. Usually 1 divided - by the event time sampling rate on the data acquisition system. - required: false + neurodata_type_inc: TimestampVectorData + doc: The time that each event occurred, in seconds, from the session start time. - name: event_type neurodata_type_inc: DynamicTableRegion dims: @@ -87,33 +91,9 @@ groups: doc: Optional column containing the text value of each event. quantity: '?' - name: duration - neurodata_type_inc: VectorData - dtype: float32 - dims: - - num_events - shape: - - null + neurodata_type_inc: DurationVectorData doc: Optional column containing the duration of each event, in seconds. quantity: '?' - - name: hed_tags - neurodata_type_inc: VectorData - dtype: text - dims: - - num_tags - shape: - - null - doc: Optional column containing the Hierarchical Event Descriptor (HED) tags for - each event. HED tags should be used at the event type level, not at the event - instance level, when possible, unless it is important to annotate events individually. - quantity: '?' - - name: hed_tags_index - neurodata_type_inc: VectorIndex - dims: - - num_events - shape: - - null - doc: Index column for `hed_tags` column. - quantity: '?' - neurodata_type_def: TtlTypesTable neurodata_type_inc: EventTypesTable default_name: TtlTypesTable @@ -137,3 +117,14 @@ groups: - null doc: The type of TTL that occured. This is represented as a reference to a row of the TtlTypesTable. +- neurodata_type_def: Task + neurodata_type_inc: LabMetaData + name: task + doc: A group to store task-related general metadata. TODO When merged with core, + this will no longer inherit from LabMetaData but from NWBContainer and be placed + optionally in /general. + groups: + - name: event_types + neurodata_type_inc: EventTypesTable + doc: Table to store information about each task event type. + quantity: '?' diff --git a/src/pynwb/ndx_events/__init__.py b/src/pynwb/ndx_events/__init__.py index 3877752..82abd08 100644 --- a/src/pynwb/ndx_events/__init__.py +++ b/src/pynwb/ndx_events/__init__.py @@ -1,5 +1,5 @@ import os -from pynwb import load_namespaces +from pynwb import load_namespaces, get_class # Set path of the namespace.yaml file to the expected install location ndx_events_specpath = os.path.join( @@ -22,4 +22,11 @@ load_namespaces(ndx_events_specpath) from . import io as __io # noqa: E402,F401 -from .events import EventTypesTable, EventsTable #, TTLs, AnnotatedEventsTable # noqa: E402,F401 + +Task = get_class('Task', 'ndx-events') +TimestampVectorData = get_class('TimestampVectorData', 'ndx-events') +DurationVectorData = get_class('DurationVectorData', 'ndx-events') +EventTypesTable = get_class('EventTypesTable', 'ndx-events') +EventsTable = get_class('EventsTable', 'ndx-events') +TtlTypesTable = get_class('TtlTypesTable', 'ndx-events') +TtlsTable = get_class('TtlsTable', 'ndx-events') diff --git a/src/pynwb/ndx_events/events.py b/src/pynwb/ndx_events/events.py index 1632709..e69de29 100644 --- a/src/pynwb/ndx_events/events.py +++ b/src/pynwb/ndx_events/events.py @@ -1,10 +0,0 @@ -import numpy as np - -from pynwb import register_class, get_class -from pynwb.core import NWBDataInterface, DynamicTable -from hdmf.utils import docval, getargs, popargs, get_docval - - -EventTypesTable = get_class('EventTypesTable', 'ndx-events') -EventsTable = get_class('EventsTable', 'ndx-events') -# TTLTypesTable = get_class('TTLTypesTable', 'ndx-events') diff --git a/src/pynwb/tests/unit/test_events.py b/src/pynwb/tests/unit/test_events.py index 4def398..395cffe 100644 --- a/src/pynwb/tests/unit/test_events.py +++ b/src/pynwb/tests/unit/test_events.py @@ -1,46 +1,232 @@ -import numpy as np +from hdmf.common import DynamicTable +from pynwb import NWBHDF5IO +from pynwb.testing import TestCase, remove_test_file +from pynwb.testing.mock.file import mock_NWBFile -from pynwb.testing import TestCase -from pynwb.core import VectorData, VectorIndex +from ndx_events import EventsTable, EventTypesTable, Task, DurationVectorData, TimestampVectorData -from ndx_events import EventsTable, EventTypesTable + +class TestTimestampVectorData(TestCase): + + def test_init(self): + data = TimestampVectorData(name="test", description="description") + assert data.name == "test" + assert data.description == "description" + assert data.unit == "seconds" + + def test_add_to_dynamic_table(self): + col = TimestampVectorData(name="test", description="description") + table = DynamicTable(name="table", description="test", columns=[col]) + table.add_row(test=0.1) + assert table.test is col + assert table.test[0] == 0.1 + + +class TestTimestampVectorDataSimpleRoundtrip(TestCase): + """Simple roundtrip test for TimestampVectorData.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Create a TimestampVectorData, write it to file, read the file, and test that the read object matches the + original. + """ + col = TimestampVectorData(name="test", description="description") + table = DynamicTable(name="table", description="description", columns=[col]) + table.add_row(test=0.1) + + nwbfile = mock_NWBFile() + nwbfile.add_acquisition(table) + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + read_col = read_nwbfile.acquisition["table"]["test"] + assert isinstance(read_col, TimestampVectorData) + assert read_col.name == "test" + assert read_col.description == "description" + assert read_col.unit == "seconds" + assert read_col[0] == 0.1 + + +class TestDurationVectorData(TestCase): + + def test_init(self): + data = DurationVectorData(name="test", description="description") + assert data.name == "test" + assert data.description == "description" + assert data.unit == "seconds" + + def test_add_to_dynamic_table(self): + col = DurationVectorData(name="test", description="description") + table = DynamicTable(name="table", description="test", columns=[col]) + table.add_row(test=0.1) + assert table.test is col + assert table.test[0] == 0.1 + + +class TestDurationVectorDataSimpleRoundtrip(TestCase): + """Simple roundtrip test for DurationVectorData.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Create a DurationVectorData, write it to file, read the file, and test that the read object matches the + original. + """ + col = DurationVectorData(name="test", description="description") + table = DynamicTable(name="table", description="description", columns=[col]) + table.add_row(test=0.1) + + nwbfile = mock_NWBFile() + nwbfile.add_acquisition(table) + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + read_col = read_nwbfile.acquisition["table"]["test"] + assert isinstance(read_col, DurationVectorData) + assert read_col.name == "test" + assert read_col.description == "description" + assert read_col.unit == "seconds" + assert read_col[0] == 0.1 + + +class TestTask(TestCase): + + def test_init(self): + task = Task() + assert task.name == "task" + + def test_add_to_nwbfile(self): + nwbfile = mock_NWBFile() + task = Task() + nwbfile.add_lab_meta_data(task) + assert nwbfile.get_lab_meta_data("task") is task + assert nwbfile.lab_meta_data["task"] is task + + +class TestTaskSimpleRoundtrip(TestCase): + """Simple roundtrip test for Task.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Create a Task, write it to file, read the file, and test that the read object matches the original. + """ + task = Task() + nwbfile = mock_NWBFile() + nwbfile.add_lab_meta_data(task) + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + assert isinstance(read_nwbfile.get_lab_meta_data("task"), Task) + assert read_nwbfile.get_lab_meta_data("task").name == "task" + assert read_nwbfile.lab_meta_data["task"].name == "task" class TestEventTypesTable(TestCase): def test_init(self): event_types_table = EventTypesTable(description="Metadata about event types") - # TODO description field should not be necessary assert event_types_table.name == "EventTypesTable" assert event_types_table.description == "Metadata about event types" + def test_init_name(self): + event_types_table = EventTypesTable(name="event_types", description="Metadata about event types") + assert event_types_table.name == "event_types" + assert event_types_table.description == "Metadata about event types" + def test_add_row(self): event_types_table = EventTypesTable(description="Metadata about event types") event_types_table.add_row( event_name="cue on", event_type_description="Times when the cue was on screen.", - hed_tags=["Sensory-event", "(White, Circle)", "(Intended-effect, Cue)"], ) event_types_table.add_row( event_name="stimulus on", event_type_description="Times when the stimulus was on screen.", - hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], ) assert event_types_table["event_name"].data == ["cue on", "stimulus on"] assert event_types_table["event_type_description"].data == [ "Times when the cue was on screen.", "Times when the stimulus was on screen.", ] - assert event_types_table["hed_tags"][0] == ["Sensory-event", "(White, Circle)", "(Intended-effect, Cue)"] - assert event_types_table["hed_tags"][1] == ["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"] +class TestEventTypesTableSimpleRoundtrip(TestCase): + """Simple roundtrip test for EventTypesTable.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Create an EventTypesTable, write it to file, read the file, and test that the read table matches the original. + """ + # NOTE that when adding an EventTypesTable to a Task, the EventTypesTable + # must be named "event_types" according to the spec + event_types_table = EventTypesTable(name="event_types", description="Metadata about event types") + event_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + ) + event_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + ) + task = Task() + task.event_types = event_types_table + nwbfile = mock_NWBFile() + nwbfile.add_lab_meta_data(task) + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + read_event_types_table = read_nwbfile.get_lab_meta_data("task").event_types + assert isinstance(read_event_types_table, EventTypesTable) + assert read_event_types_table.name == "event_types" + assert read_event_types_table.description == "Metadata about event types" + assert all(read_event_types_table["event_name"].data[:] == ["cue on", "stimulus on"]) + assert all( + read_event_types_table["event_type_description"].data[:] == [ + "Times when the cue was on screen.", + "Times when the stimulus was on screen.", + ] + ) + class TestEventsTable(TestCase): def test_init(self): events_table = EventsTable(description="Metadata about events") - # TODO description field should not be necessary assert events_table.name == "EventsTable" assert events_table.description == "Metadata about events" @@ -49,48 +235,54 @@ def test_init_dtr(self): event_types_table.add_row( event_name="cue on", event_type_description="Times when the cue was on screen.", - hed_tags=["Sensory-event", "(White, Circle)", "(Intended-effect, Cue)"], + # hed_tags=["Sensory-event", "(Intended-effect, Cue)"], ) event_types_table.add_row( event_name="stimulus on", event_type_description="Times when the stimulus was on screen.", - hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], + # hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], ) events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) assert events_table["event_type"].table is event_types_table - # def test_add_row(self): - # event_types_table = EventTypesTable(description="Metadata about event types") - # event_types_table.add_row( - # event_name="cue on", - # event_type_description="Times when the cue was on screen.", - # hed_tags=["Sensory-event", "(White, Circle)", "(Intended-effect, Cue)"], - # ) - # event_types_table.add_row( - # event_name="stimulus on", - # event_type_description="Times when the stimulus was on screen.", - # hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], - # ) - - # events_table = EventsTable(description="Metadata about events", linked_tables={"event_type": event_types_table}) - # events_table.add_row( - # timestamp=0.1, - # event_type_description="Times when the cue was on screen.", - # hed_tags=["Sensory-event", "(White, Circle)", "(Intended-effect, Cue)"], - # ) - # event_types_table.add_row( - # event_name="stimulus on", - # event_type_description="Times when the stimulus was on screen.", - # hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], - # ) - # assert event_types_table["event_name"].data == ["cue on", "stimulus on"] - # assert event_types_table["event_type_description"].data == [ - # "Times when the cue was on screen.", - # "Times when the stimulus was on screen.", - # ] - # assert event_types_table["hed_tags"][0] == ["Sensory-event", "(White, Circle)", "(Intended-effect, Cue)"] - # assert event_types_table["hed_tags"][1] == ["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"] + def test_add_row(self): + event_types_table = EventTypesTable(description="Metadata about event types") + event_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + # hed_tags=["Sensory-event", "(Intended-effect, Cue)"], + ) + event_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + # hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], + ) + + events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) + events_table.add_row( + timestamp=0.1, + value="white circle", + event_type=0, + duration=0.2, + # hed_tags=["(White, Circle)"], + ) + events_table.add_row( + timestamp=1.1, + value="green square", + event_type=0, + duration=0.15, + # hed_tags=["(Green, Square)"], + ) + assert events_table["timestamp"].data == [0.1, 1.1] + assert events_table["value"].data == ["white circle", "green square"] + assert events_table["duration"].data == [0.2, 0.15] + # assert events_table["hed_tags"][0] == ["(White, Circle)"] + # assert events_table["hed_tags"][1] == ["(Green, Square)"] + + +## TODO: TestEventsTableSimpleRoundtrip + # class TestEventsTable(TestCase): diff --git a/src/spec/create_extension_spec.py b/src/spec/create_extension_spec.py index 69018f6..b5282a0 100644 --- a/src/spec/create_extension_spec.py +++ b/src/spec/create_extension_spec.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- - import os.path - from pynwb.spec import NWBNamespaceBuilder, export_spec, NWBGroupSpec, NWBAttributeSpec, NWBDatasetSpec @@ -16,45 +14,53 @@ def main(): ns_builder.include_namespace('core') - # events = NWBGroupSpec( - # neurodata_type_def='Events', - # neurodata_type_inc='NWBDataInterface', - # doc=("A simple list of timestamps, stored in seconds, of an event type. For example, this neurodata type " - # "could be used to store all the times that a nosepoke was detected. The name may be set to " - # "'nosepoke_onset'."), - # attributes=[ - # NWBAttributeSpec( - # name='description', - # dtype='text', - # doc='Description of the event type.', - # ), - # ], - # datasets=[ - # NWBDatasetSpec( - # name='timestamps', - # dtype='float32', - # dims=['num_events'], - # shape=[None], - # doc=('Event timestamps, in seconds, relative to the common experiment master-clock stored in ' - # 'NWBFile.timestamps_reference_time.'), - # attributes=[ - # NWBAttributeSpec( - # name='unit', - # dtype='text', - # value='seconds', - # doc="Unit of measurement for timestamps, which is fixed to 'seconds'.", - # ), - # NWBAttributeSpec( - # name='resolution', - # dtype='float32', - # doc=('The smallest possible difference between two event times. Usually 1 divided by the ' - # 'event time sampling rate on the data acquisition system.'), - # required=False, - # ), - # ], - # ), - # ], - # ) + timestamp_vector_data = NWBDatasetSpec( + neurodata_type_def="TimestampVectorData", + neurodata_type_inc="VectorData", + doc="A VectorData that stores timestamps in seconds.", + dtype="float64", + dims=['num_times'], + shape=[None], + attributes=[ + NWBAttributeSpec( + name="unit", + dtype="text", + doc="The unit of measurement for the timestamps, fixed to 'seconds'.", + value="seconds", + ), + NWBAttributeSpec( + name="resolution", + dtype="float64", + doc=("The smallest possible difference between two timestamps. Usually 1 divided by the " + "sampling rate for timestamps of the data acquisition system."), + required=False, + ), + ], + ) + + duration_vector_data = NWBDatasetSpec( + neurodata_type_def="DurationVectorData", + neurodata_type_inc="VectorData", + doc="A VectorData that stores durations in seconds.", + dtype="float64", + dims=['num_events'], + shape=[None], + attributes=[ + NWBAttributeSpec( + name="unit", + dtype="text", + doc="The unit of measurement for the durations, fixed to 'seconds'.", + value="seconds", + ), + NWBAttributeSpec( + name="resolution", + dtype="float64", + doc=("The smallest possible difference between two timestamps. Usually 1 divided by the " + "sampling rate for timestamps of the data acquisition system."), + required=False, + ), + ], + ) event_types_table = NWBGroupSpec( neurodata_type_def="EventTypesTable", @@ -74,32 +80,6 @@ def main(): dtype='text', doc='Description of each event type.', ), - NWBDatasetSpec( - name='hed_tags', - neurodata_type_inc='VectorData', - dtype='text', - dims=['num_tags'], - shape=[None], - doc=("Optional column containing the Hierarchical Event Descriptor (HED) tags for each event type."), - quantity="?", - ), - NWBDatasetSpec( - name='hed_tags_index', - neurodata_type_inc='VectorIndex', - dims=['num_events'], - shape=[None], - doc=("Index column for `hed_tags` column."), - quantity="?", - ), - ], - attributes=[ # override required description attribute from DynamicTable - NWBAttributeSpec( - name='description', - dtype='text', - doc='Description of the event types table.', - default_value="Metadata about event types.", - required=True, - ), ], ) @@ -114,26 +94,8 @@ def main(): datasets=[ NWBDatasetSpec( name='timestamp', - neurodata_type_inc='VectorData', - dtype='float32', - dims=['num_events'], - shape=[None], - doc=("The time that the event occurred, in seconds, from the session start time."), - attributes=[ - NWBAttributeSpec( - name='unit', - dtype='text', - value='seconds', - doc="Unit of measurement for timestamps, which is fixed to 'seconds'.", - ), - NWBAttributeSpec( - name='resolution', - dtype='float32', - doc=('The smallest possible difference between two event times. Usually 1 divided by the ' - 'event time sampling rate on the data acquisition system.'), - required=False, - ), - ], + neurodata_type_inc='TimestampVectorData', + doc="The time that each event occurred, in seconds, from the session start time.", ), NWBDatasetSpec( name='event_type', @@ -154,41 +116,10 @@ def main(): ), NWBDatasetSpec( name='duration', - neurodata_type_inc='VectorData', - dtype='float32', - dims=['num_events'], - shape=[None], - doc=("Optional column containing the duration of each event, in seconds."), - quantity="?", - ), - NWBDatasetSpec( - name='hed_tags', - neurodata_type_inc='VectorData', - dtype='text', - dims=['num_tags'], - shape=[None], - doc=("Optional column containing the Hierarchical Event Descriptor (HED) tags for each event. " - "HED tags should be used at the event type level, not at the event instance level, when " - "possible, unless it is important to annotate events individually."), + neurodata_type_inc='DurationVectorData', + doc="Optional column containing the duration of each event, in seconds.", quantity="?", ), - NWBDatasetSpec( - name='hed_tags_index', - neurodata_type_inc='VectorIndex', - dims=['num_events'], - shape=[None], - doc=("Index column for `hed_tags` column."), - quantity="?", - ), - ], - attributes=[ # override required description attribute from DynamicTable - NWBAttributeSpec( - name='description', - dtype='text', - doc='Description of the events table.', - default_value="Metadata about events.", - required=True, - ), ], ) @@ -225,7 +156,24 @@ def main(): ], ) - new_data_types = [event_types_table, events_table, ttl_types_table, ttls_table] + task = NWBGroupSpec( + neurodata_type_def='Task', + neurodata_type_inc='LabMetaData', + doc=("A group to store task-related general metadata. TODO When merged with core, " + "this will no longer inherit from LabMetaData but from NWBContainer and be placed " + "optionally in /general."), + name="task", + groups=[ + NWBGroupSpec( + name="event_types", + neurodata_type_inc="EventTypesTable", + doc="Table to store information about each task event type.", + quantity="?", + ), + ], + ) + + new_data_types = [timestamp_vector_data, duration_vector_data, event_types_table, events_table, ttl_types_table, ttls_table, task, ] # export the spec to yaml files in the spec folder output_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'spec')) From 438cff3ff88759fa8d5e755b4bf48814d1863f09 Mon Sep 17 00:00:00 2001 From: rly <rly@lbl.gov> Date: Tue, 24 Oct 2023 23:40:53 -0700 Subject: [PATCH 06/15] Fix spelling --- spec/ndx-events.extensions.yaml | 14 +------------- src/spec/create_extension_spec.py | 2 +- 2 files changed, 2 insertions(+), 14 deletions(-) diff --git a/spec/ndx-events.extensions.yaml b/spec/ndx-events.extensions.yaml index 27b6f4b..f0684b8 100644 --- a/spec/ndx-events.extensions.yaml +++ b/spec/ndx-events.extensions.yaml @@ -41,12 +41,6 @@ groups: default_name: EventTypesTable doc: A column-based table to store information about each event type, such as name, one event type per row. - attributes: - - name: description - dtype: text - default_value: Metadata about event types. - doc: Description of the event types table. - required: false datasets: - name: event_name neurodata_type_inc: VectorData @@ -63,12 +57,6 @@ groups: event per row. Each event must have an event_type, which is a row in the EventTypesTable. Additional columns may be added to store metadata about each event, such as the duration of the event, or a text value of the event. - attributes: - - name: description - dtype: text - default_value: Metadata about events. - doc: Description of the events table. - required: false datasets: - name: timestamp neurodata_type_inc: TimestampVectorData @@ -115,7 +103,7 @@ groups: - num_events shape: - null - doc: The type of TTL that occured. This is represented as a reference to a row + doc: The type of TTL that occurred. This is represented as a reference to a row of the TtlTypesTable. - neurodata_type_def: Task neurodata_type_inc: LabMetaData diff --git a/src/spec/create_extension_spec.py b/src/spec/create_extension_spec.py index b5282a0..08cfdb1 100644 --- a/src/spec/create_extension_spec.py +++ b/src/spec/create_extension_spec.py @@ -150,7 +150,7 @@ def main(): neurodata_type_inc='DynamicTableRegion', dims=['num_events'], shape=[None], - doc=("The type of TTL that occured. This is represented as a reference " + doc=("The type of TTL that occurred. This is represented as a reference " "to a row of the TtlTypesTable."), ), ], From a9daa4a1a793114bde91dc026d0bf96098da5e74 Mon Sep 17 00:00:00 2001 From: rly <rly@lbl.gov> Date: Wed, 25 Oct 2023 09:41:59 -0700 Subject: [PATCH 07/15] Move test_events.py --- src/pynwb/tests/integration/__init__.py | 0 src/pynwb/tests/integration/hdf5/__init__.py | 0 .../tests/integration/hdf5/test_events.py | 169 ------------------ src/pynwb/tests/{unit => }/test_events.py | 73 +++++++- src/pynwb/tests/unit/__init__.py | 0 5 files changed, 70 insertions(+), 172 deletions(-) delete mode 100644 src/pynwb/tests/integration/__init__.py delete mode 100644 src/pynwb/tests/integration/hdf5/__init__.py delete mode 100644 src/pynwb/tests/integration/hdf5/test_events.py rename src/pynwb/tests/{unit => }/test_events.py (87%) delete mode 100644 src/pynwb/tests/unit/__init__.py diff --git a/src/pynwb/tests/integration/__init__.py b/src/pynwb/tests/integration/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/pynwb/tests/integration/hdf5/__init__.py b/src/pynwb/tests/integration/hdf5/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/pynwb/tests/integration/hdf5/test_events.py b/src/pynwb/tests/integration/hdf5/test_events.py deleted file mode 100644 index f9600d9..0000000 --- a/src/pynwb/tests/integration/hdf5/test_events.py +++ /dev/null @@ -1,169 +0,0 @@ -import datetime -import numpy as np -from pynwb import NWBFile, NWBHDF5IO -from pynwb.testing import AcquisitionH5IOMixin, TestCase, remove_test_file - -# from ndx_events import Events, LabeledEvents, TTLs, AnnotatedEventsTable - - -# class TestEventsIOSimple(TestCase): -# """Simple roundtrip test for CSD.""" - -# def setUp(self): -# self.nwbfile = NWBFile( -# session_description='session_description', -# identifier='identifier', -# session_start_time=datetime.datetime.now(datetime.timezone.utc) -# ) -# self.path = 'test.nwb' - -# def tearDown(self): -# remove_test_file(self.path) - -# def test_roundtrip(self): -# """ -# Add a CSD to an "ecephys" processing module in the NWBFile, write it to file, read the file, and test that the -# CSD from the file matches the original CSD. -# """ - -# events = Events( -# name='Events', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5 -# ) -# self.nwbfile.add_acquisition(events) - -# labeled_events = LabeledEvents( -# name='LabeledEvents', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5, -# data=np.uint([3, 4, 3]), -# labels=['', '', '', 'event1', 'event2'] -# ) -# self.nwbfile.add_acquisition(labeled_events) - -# ttls = TTLs( -# name='TTLs', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5, -# data=np.uint([3, 4, 3]), -# labels=['', '', '', 'event1', 'event2'] -# ) -# self.nwbfile.add_acquisition(ttls) - -# annotated_events = AnnotatedEventsTable( -# name='AnnotatedEventsTable', -# description='annotated events from my experiment', -# resolution=1e-5 -# ) -# annotated_events.add_column( -# name='extra', -# description='extra metadata for each event type' -# ) -# annotated_events.add_event_type( -# label='Reward', -# event_description='Times when the animal received juice reward.', -# event_times=[1., 2., 3.], -# extra='extra', -# id=3 -# ) -# annotated_events.add_event_type( -# label='Nosepoke', -# event_description='Times when the animal poked its noise through the input port.', -# event_times=[1., 2., 3.], -# extra='extra', -# id=5 -# ) - -# events_module = self.nwbfile.create_processing_module( -# name='events', -# description='processed events data' -# ) -# events_module.add(annotated_events) - -# with NWBHDF5IO(self.path, mode='w') as io: -# io.write(self.nwbfile) - -# with NWBHDF5IO(self.path, mode='r', load_namespaces=True) as io: -# read_nwbfile = io.read() -# self.assertContainerEqual(events, read_nwbfile.acquisition['Events']) -# self.assertContainerEqual(labeled_events, read_nwbfile.acquisition['LabeledEvents']) -# self.assertContainerEqual(ttls, read_nwbfile.acquisition['TTLs']) -# self.assertContainerEqual(annotated_events, read_nwbfile.processing['events']['AnnotatedEventsTable']) - - -# class TestEventsIO(AcquisitionH5IOMixin, TestCase): - -# def setUpContainer(self): -# """ Return the test Events to read/write """ -# events = Events( -# name='Events', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5 -# ) -# return events - - -# class TestLabeledEventsIO(AcquisitionH5IOMixin, TestCase): - -# def setUpContainer(self): -# """ Return the test Events to read/write """ -# labeled_events = LabeledEvents( -# name='LabeledEvents', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5, -# data=np.uint([3, 4, 3]), -# labels=['', '', '', 'event1', 'event2'] -# ) -# return labeled_events - - -# class TestTTLs(AcquisitionH5IOMixin, TestCase): - -# def setUpContainer(self): -# """ Return the test Events to read/write """ -# ttls = TTLs( -# name='TTLs', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5, -# data=np.uint([3, 4, 3]), -# labels=['', '', '', 'event1', 'event2'] -# ) -# return ttls - - -# class TestAnnotatedEventsTableIO(AcquisitionH5IOMixin, TestCase): -# """ Test adding AnnotatedEventsTable into acquisition and accessing AnnotatedEvents after read """ - -# def setUpContainer(self): -# """ Return the test AnnotatedEventsTable to read/write """ -# annotated_events = AnnotatedEventsTable( -# name='AnnotatedEventsTable', -# description='annotated events from my experiment', -# resolution=1e-5 -# ) -# annotated_events.add_column( -# name='extra', -# description='extra metadata for each event type' -# ) -# annotated_events.add_event_type( -# label='Reward', -# event_description='Times when the animal received juice reward.', -# event_times=[1., 2., 3.], -# extra='extra', -# id=3 -# ) -# annotated_events.add_event_type( -# label='Nosepoke', -# event_description='Times when the animal poked its noise through the input port.', -# event_times=[1., 2., 3.], -# extra='extra', -# id=5 -# ) -# return annotated_events diff --git a/src/pynwb/tests/unit/test_events.py b/src/pynwb/tests/test_events.py similarity index 87% rename from src/pynwb/tests/unit/test_events.py rename to src/pynwb/tests/test_events.py index 395cffe..dab7aeb 100644 --- a/src/pynwb/tests/unit/test_events.py +++ b/src/pynwb/tests/test_events.py @@ -235,12 +235,10 @@ def test_init_dtr(self): event_types_table.add_row( event_name="cue on", event_type_description="Times when the cue was on screen.", - # hed_tags=["Sensory-event", "(Intended-effect, Cue)"], ) event_types_table.add_row( event_name="stimulus on", event_type_description="Times when the stimulus was on screen.", - # hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], ) events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) @@ -277,11 +275,80 @@ def test_add_row(self): assert events_table["timestamp"].data == [0.1, 1.1] assert events_table["value"].data == ["white circle", "green square"] assert events_table["duration"].data == [0.2, 0.15] + assert events_table["event_type"].data == [0, 0] # assert events_table["hed_tags"][0] == ["(White, Circle)"] # assert events_table["hed_tags"][1] == ["(Green, Square)"] -## TODO: TestEventsTableSimpleRoundtrip +class TestEventsTableSimpleRoundtrip(TestCase): + """Simple roundtrip test for EventsTable.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Create an EventsTable, write it to file, read the file, and test that the read table matches the original. + """ + # NOTE that when adding an EventTypesTable to a Task, the EventTypesTable + # must be named "event_types" according to the spec + event_types_table = EventTypesTable(name="event_types", description="Metadata about event types") + event_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + # hed_tags=["Sensory-event", "(Intended-effect, Cue)"], + ) + event_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + # hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], + ) + + events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) + events_table.add_row( + timestamp=0.1, + value="white circle", + event_type=0, + duration=0.2, + # hed_tags=["(White, Circle)"], + ) + events_table.add_row( + timestamp=1.1, + value="green square", + event_type=0, + duration=0.15, + # hed_tags=["(Green, Square)"], + ) + task = Task() + task.event_types = event_types_table + nwbfile = mock_NWBFile() + nwbfile.add_lab_meta_data(task) + nwbfile.add_acquisition(events_table) + breakpoint() + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + read_event_types_table = read_nwbfile.get_lab_meta_data("task").event_types + read_events_table = read_nwbfile.acquisition["EventsTable"] + assert isinstance(read_events_table, EventsTable) + assert read_events_table.name == "EventsTable" + assert read_events_table.description == "Metadata about events" + assert all(read_events_table["timestamp"].data[:] == [0.1, 1.1]) + assert all( + read_events_table["value"].data[:] == [ + "white circle", + "green square", + ] + ) + assert all(read_events_table["duration"].data[:] == [0.2, 0.15]) + assert all(read_events_table["event_type"].data[:] == [0, 0]) + assert read_events_table["event_type"].table is read_event_types_table diff --git a/src/pynwb/tests/unit/__init__.py b/src/pynwb/tests/unit/__init__.py deleted file mode 100644 index e69de29..0000000 From 5881afb109060f46ca754462dca7f6ae7de58295 Mon Sep 17 00:00:00 2001 From: rly <rly@lbl.gov> Date: Wed, 25 Oct 2023 12:12:19 -0700 Subject: [PATCH 08/15] Update spec and tests --- spec/ndx-events.extensions.yaml | 4 + src/pynwb/tests/test_events.py | 360 ++++++++++++++++-------------- src/spec/create_extension_spec.py | 16 +- 3 files changed, 209 insertions(+), 171 deletions(-) diff --git a/spec/ndx-events.extensions.yaml b/spec/ndx-events.extensions.yaml index f0684b8..259d094 100644 --- a/spec/ndx-events.extensions.yaml +++ b/spec/ndx-events.extensions.yaml @@ -116,3 +116,7 @@ groups: neurodata_type_inc: EventTypesTable doc: Table to store information about each task event type. quantity: '?' + - name: ttl_types + neurodata_type_inc: TtlTypesTable + doc: Table to store information about each task TTL type. + quantity: '?' diff --git a/src/pynwb/tests/test_events.py b/src/pynwb/tests/test_events.py index dab7aeb..cc2e113 100644 --- a/src/pynwb/tests/test_events.py +++ b/src/pynwb/tests/test_events.py @@ -1,9 +1,10 @@ from hdmf.common import DynamicTable +import numpy as np from pynwb import NWBHDF5IO from pynwb.testing import TestCase, remove_test_file from pynwb.testing.mock.file import mock_NWBFile -from ndx_events import EventsTable, EventTypesTable, Task, DurationVectorData, TimestampVectorData +from ndx_events import EventsTable, EventTypesTable, TtlsTable, TtlTypesTable, Task, DurationVectorData, TimestampVectorData class TestTimestampVectorData(TestCase): @@ -327,7 +328,6 @@ def test_roundtrip(self): nwbfile = mock_NWBFile() nwbfile.add_lab_meta_data(task) nwbfile.add_acquisition(events_table) - breakpoint() with NWBHDF5IO(self.path, mode="w") as io: io.write(nwbfile) @@ -351,192 +351,212 @@ def test_roundtrip(self): assert read_events_table["event_type"].table is read_event_types_table +class TestTtlTypesTable(TestCase): -# class TestEventsTable(TestCase): + def test_init(self): + ttl_types_table = TtlTypesTable(description="Metadata about TTL types") + assert ttl_types_table.name == "TtlTypesTable" + assert ttl_types_table.description == "Metadata about TTL types" -# def test_init(self): -# event_types_table = EventTypesTable() -# events = EventsTable( -# name='EventsTable', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5 -# ) -# self.assertEqual(events.name, 'Events') -# self.assertEqual(events.description, 'events from my experiment') -# self.assertEqual(events.timestamps, [0., 1., 2.]) -# self.assertEqual(events.resolution, 1e-5) -# self.assertEqual(events.unit, 'seconds') + def test_init_name(self): + ttl_types_table = TtlTypesTable(name="ttl_types", description="Metadata about TTL types") + assert ttl_types_table.name == "ttl_types" + assert ttl_types_table.description == "Metadata about TTL types" + def test_add_row(self): + ttl_types_table = TtlTypesTable(description="Metadata about TTL types") + ttl_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + pulse_value=np.uint(1), + ) + ttl_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + pulse_value=np.uint(2), + ) + assert ttl_types_table["event_name"].data == ["cue on", "stimulus on"] + assert ttl_types_table["event_type_description"].data == [ + "Times when the cue was on screen.", + "Times when the stimulus was on screen.", + ] + assert all(ttl_types_table["pulse_value"].data == np.uint([1, 2])) -# class TestLabeledEvents(TestCase): -# def test_init(self): -# events = LabeledEvents( -# name='LabeledEvents', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5, -# data=np.uint([3, 4, 3]), -# labels=['', '', '', 'event1', 'event2'] -# ) -# self.assertEqual(events.name, 'LabeledEvents') -# self.assertEqual(events.description, 'events from my experiment') -# self.assertEqual(events.timestamps, [0., 1., 2.]) -# self.assertEqual(events.resolution, 1e-5) -# self.assertEqual(events.unit, 'seconds') -# np.testing.assert_array_equal(events.data, np.uint([3, 4, 3])), -# self.assertEqual(events.labels, ['', '', '', 'event1', 'event2']) - -# def test_mismatch_length(self): -# msg = 'Timestamps and data must have the same length: 3 != 4' -# with self.assertRaisesWith(ValueError, msg): -# LabeledEvents( -# name='LabeledEvents', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5, -# data=np.uint([3, 4, 3, 5]), -# labels=['', '', '', 'event1', 'event2', 'event3'] -# ) +class TestTtlTypesTableSimpleRoundtrip(TestCase): + """Simple roundtrip test for TtlTypesTable.""" -# def test_default_labels(self): -# events = LabeledEvents( -# name='LabeledEvents', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5, -# data=np.uint([3, 4, 3]), -# ) -# self.assertEqual(events.labels, ['', '', '', '3', '4']) - -# def test_none_in_labels(self): -# msg = "None values are not allowed in the labels array. Please use '' for undefined labels." -# with self.assertRaisesWith(ValueError, msg): -# LabeledEvents( -# name='LabeledEvents', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5, -# data=np.uint([3, 4, 3]), -# labels=[None, None, None, 'event1', 'event2'] -# ) + def setUp(self): + self.path = "test.nwb" -# def test_data_negative(self): -# msg = "Negative values are not allowed in 'data'." -# with self.assertRaisesWith(ValueError, msg): -# LabeledEvents( -# name='LabeledEvents', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5, -# data=[1, -2, 3], -# labels=['', '', '', 'event1', 'event2'] -# ) + def tearDown(self): + remove_test_file(self.path) -# def test_data_int_conversion(self): -# le = LabeledEvents( -# name='LabeledEvents', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5, -# data=[1, 2, 3], -# labels=['', '', '', 'event1', 'event2'] -# ) -# np.testing.assert_array_equal(le.data, np.array([1, 2, 3])) -# self.assertEqual(le.data.dtype, np.uint) - -# def test_data_string(self): -# msg = ("'data' must be an array of numeric values that have type unsigned int or " -# "can be converted to unsigned int, not type <U1") -# with self.assertRaisesWith(ValueError, msg): -# LabeledEvents( -# name='LabeledEvents', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5, -# data=['1', '2', '3'], -# labels=['', '', '', 'event1', 'event2'] -# ) + def test_roundtrip(self): + """ + Create an TtlTypesTable, write it to file, read the file, and test that the read table matches the original. + """ + # NOTE that when adding an TtlTypesTable to a Task, the TtlTypesTable + # must be named "ttl_types" according to the spec + ttl_types_table = TtlTypesTable(name="ttl_types", description="Metadata about TTL types") + ttl_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + pulse_value=np.uint(1), + ) + ttl_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + pulse_value=np.uint(2), + ) + task = Task() + task.ttl_types = ttl_types_table + nwbfile = mock_NWBFile() + nwbfile.add_lab_meta_data(task) -# def test_data_pass_through(self): -# data = [1.0, 2.0, 3.0] -# le = LabeledEvents( -# name='LabeledEvents', -# description='events from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5, -# data=data, -# labels=['', '', '', 'event1', 'event2'] -# ) -# self.assertIs(le.data, data) + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + read_ttl_types_table = read_nwbfile.get_lab_meta_data("task").ttl_types + assert isinstance(read_ttl_types_table, EventTypesTable) + assert read_ttl_types_table.name == "ttl_types" + assert read_ttl_types_table.description == "Metadata about TTL types" + assert all(read_ttl_types_table["event_name"].data[:] == ["cue on", "stimulus on"]) + assert all( + read_ttl_types_table["event_type_description"].data[:] == [ + "Times when the cue was on screen.", + "Times when the stimulus was on screen.", + ] + ) + assert all(read_ttl_types_table["pulse_value"].data[:] == np.uint([1, 2])) -# class TestTTLs(TestCase): +# class TestTtlsTable(TestCase): # def test_init(self): -# events = TTLs( -# name='TTLs', -# description='ttl pulses from my experiment', -# timestamps=[0., 1., 2.], -# resolution=1e-5, -# data=np.uint([3, 4, 3]), -# labels=['', '', '', 'event1', 'event2'] +# ttls_table = TtlsTable(description="Metadata about TTLs") +# assert events_table.name == "EventsTable" +# assert events_table.description == "Metadata about events" + +# def test_init_dtr(self): +# event_types_table = EventTypesTable(description="Metadata about event types") +# event_types_table.add_row( +# event_name="cue on", +# event_type_description="Times when the cue was on screen.", +# ) +# event_types_table.add_row( +# event_name="stimulus on", +# event_type_description="Times when the stimulus was on screen.", # ) -# self.assertEqual(events.name, 'TTLs') -# self.assertEqual(events.description, 'ttl pulses from my experiment') -# self.assertEqual(events.timestamps, [0., 1., 2.]) -# self.assertEqual(events.resolution, 1e-5) -# self.assertEqual(events.unit, 'seconds') -# np.testing.assert_array_equal(events.data, np.uint([3, 4, 3])), -# self.assertEqual(events.labels, ['', '', '', 'event1', 'event2']) +# events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) +# assert events_table["event_type"].table is event_types_table -# class TestAnnotatedEventsTable(TestCase): +# def test_add_row(self): +# event_types_table = EventTypesTable(description="Metadata about event types") +# event_types_table.add_row( +# event_name="cue on", +# event_type_description="Times when the cue was on screen.", +# # hed_tags=["Sensory-event", "(Intended-effect, Cue)"], +# ) +# event_types_table.add_row( +# event_name="stimulus on", +# event_type_description="Times when the stimulus was on screen.", +# # hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], +# ) -# def test_init(self): -# events = AnnotatedEventsTable( -# name='AnnotatedEventsTable', -# description='annotated events from my experiment', -# resolution=1e-5 +# events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) +# events_table.add_row( +# timestamp=0.1, +# value="white circle", +# event_type=0, +# duration=0.2, +# # hed_tags=["(White, Circle)"], +# ) +# events_table.add_row( +# timestamp=1.1, +# value="green square", +# event_type=0, +# duration=0.15, +# # hed_tags=["(Green, Square)"], # ) -# self.assertEqual(events.name, 'AnnotatedEventsTable') -# self.assertEqual(events.description, 'annotated events from my experiment') -# self.assertEqual(events.resolution, 1e-5) - -# def test_add_event_type(self): -# events = AnnotatedEventsTable( -# name='AnnotatedEventsTable', -# description='annotated events from my experiment' +# assert events_table["timestamp"].data == [0.1, 1.1] +# assert events_table["value"].data == ["white circle", "green square"] +# assert events_table["duration"].data == [0.2, 0.15] +# assert events_table["event_type"].data == [0, 0] +# # assert events_table["hed_tags"][0] == ["(White, Circle)"] +# # assert events_table["hed_tags"][1] == ["(Green, Square)"] + + +# class TestEventsTableSimpleRoundtrip(TestCase): +# """Simple roundtrip test for EventsTable.""" + +# def setUp(self): +# self.path = "test.nwb" + +# def tearDown(self): +# remove_test_file(self.path) + +# def test_roundtrip(self): +# """ +# Create an EventsTable, write it to file, read the file, and test that the read table matches the original. +# """ +# # NOTE that when adding an EventTypesTable to a Task, the EventTypesTable +# # must be named "event_types" according to the spec +# event_types_table = EventTypesTable(name="event_types", description="Metadata about event types") +# event_types_table.add_row( +# event_name="cue on", +# event_type_description="Times when the cue was on screen.", +# # hed_tags=["Sensory-event", "(Intended-effect, Cue)"], # ) -# events.add_event_type( -# label='Reward', -# event_description='Times when the animal received juice reward.', -# event_times=[1., 2., 3.], -# id=3 +# event_types_table.add_row( +# event_name="stimulus on", +# event_type_description="Times when the stimulus was on screen.", +# # hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], +# ) + +# events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) +# events_table.add_row( +# timestamp=0.1, +# value="white circle", +# event_type=0, +# duration=0.2, +# # hed_tags=["(White, Circle)"], # ) -# events.add_event_type( -# label='Abort', -# event_description='Times when the animal aborted the trial.', -# event_times=[0.5, 4.5], -# id=4 +# events_table.add_row( +# timestamp=1.1, +# value="green square", +# event_type=0, +# duration=0.15, +# # hed_tags=["(Green, Square)"], # ) -# self.assertEqual(events.id.data, [3, 4]) -# self.assertEqual(events['event_times'][0], [1., 2., 3.]) -# self.assertEqual(events['event_times'][1], [0.5, 4.5]) -# self.assertEqual(events['label'][0], 'Reward') -# self.assertEqual(events['label'][1], 'Abort') -# self.assertListEqual(events['event_description'].data, ['Times when the animal received juice reward.', -# 'Times when the animal aborted the trial.']) -# self.assertEqual(events.colnames, ('event_times', 'label', 'event_description')) -# self.assertEqual(len(events.columns), 4) -# self.assertEqual(events.columns[0].name, 'event_times_index') -# self.assertIsInstance(events.columns[0], VectorIndex) -# self.assertIs(events.columns[0].target, events.columns[1]) -# self.assertEqual(events.columns[1].name, 'event_times') -# self.assertIsInstance(events.columns[1], VectorData) -# self.assertEqual(events.columns[2].name, 'label') -# self.assertIsInstance(events.columns[2], VectorData) -# self.assertEqual(events.columns[3].name, 'event_description') -# self.assertIsInstance(events.columns[3], VectorData) -# self.assertEqual(events.resolution, None) +# task = Task() +# task.event_types = event_types_table +# nwbfile = mock_NWBFile() +# nwbfile.add_lab_meta_data(task) +# nwbfile.add_acquisition(events_table) + +# with NWBHDF5IO(self.path, mode="w") as io: +# io.write(nwbfile) + +# with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: +# read_nwbfile = io.read() +# read_event_types_table = read_nwbfile.get_lab_meta_data("task").event_types +# read_events_table = read_nwbfile.acquisition["EventsTable"] +# assert isinstance(read_events_table, EventsTable) +# assert read_events_table.name == "EventsTable" +# assert read_events_table.description == "Metadata about events" +# assert all(read_events_table["timestamp"].data[:] == [0.1, 1.1]) +# assert all( +# read_events_table["value"].data[:] == [ +# "white circle", +# "green square", +# ] +# ) +# assert all(read_events_table["duration"].data[:] == [0.2, 0.15]) +# assert all(read_events_table["event_type"].data[:] == [0, 0]) +# assert read_events_table["event_type"].table is read_event_types_table + + diff --git a/src/spec/create_extension_spec.py b/src/spec/create_extension_spec.py index 08cfdb1..d5cd08d 100644 --- a/src/spec/create_extension_spec.py +++ b/src/spec/create_extension_spec.py @@ -170,10 +170,24 @@ def main(): doc="Table to store information about each task event type.", quantity="?", ), + NWBGroupSpec( + name="ttl_types", + neurodata_type_inc="TtlTypesTable", + doc="Table to store information about each task TTL type.", + quantity="?", + ), ], ) - new_data_types = [timestamp_vector_data, duration_vector_data, event_types_table, events_table, ttl_types_table, ttls_table, task, ] + new_data_types = [ + timestamp_vector_data, + duration_vector_data, + event_types_table, + events_table, + ttl_types_table, + ttls_table, + task, + ] # export the spec to yaml files in the spec folder output_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'spec')) From 75d5d039d67839808dddd6daac90365bf2ea7d8f Mon Sep 17 00:00:00 2001 From: rly <rly@lbl.gov> Date: Wed, 25 Oct 2023 16:00:10 -0700 Subject: [PATCH 09/15] Update tests --- README.md | 89 -------- spec/ndx-events.extensions.yaml | 10 +- src/pynwb/tests/test_events.py | 316 ++++++++++++++------------ src/pynwb/tests/test_example_usage.py | 256 ++++++++++++++------- src/spec/create_extension_spec.py | 10 +- 5 files changed, 348 insertions(+), 333 deletions(-) diff --git a/README.md b/README.md index 7961a04..3aeedb5 100644 --- a/README.md +++ b/README.md @@ -40,94 +40,5 @@ generateExtension('<directory path>/ndx-events/spec/ndx-events.namespace.yaml'); ## Example usage Python: -```python -from datetime import datetime -from ndx_events import Events, EventsTable, EventTypesTable -from pynwb import NWBFile, NWBHDF5IO - -nwb = NWBFile( - session_description="session description", - identifier="cool_experiment_001", - session_start_time=datetime.now().astimezone(), -) - -# create a basic events object -basic_tone_event = Events( - name="tone_onset", - timestamps=[0.0, 0.1, 0.3, 0.5, 0.6], - description="Times when a tone was played.", -) - -# add the basic events object to the NWBFile object -nwb.add_acquisition(basic_tone_event) - -# create an event types table -event_types_table = EventTypesTable( - name="EventTypesTable", - description="metadata about event types", -) - -# create a new custom column with additional metadata -event_types_table.add_column( - name="extra_metadata", - description="some additional metadata about each event type", -) - -# add event types one by one -event_types_table.add_row( - id=0, event_name="trial start", extra_metadata="more metadata" -) -event_types_table.add_row( - id=1, event_name="cue onset", extra_metadata="more metadata" -) -event_types_table.add_row( - id=2, event_name="cue offset", extra_metadata="more metadata" -) -event_types_table.add_row( - id=3, event_name="nosepoke left", extra_metadata="more metadata" -) -event_types_table.add_row( - id=4, event_name="nosepoke right", extra_metadata="more metadata" -) -event_types_table.add_row(id=5, event_name="reward", extra_metadata="more metadata") - -# add the event types table to the acquisition group for now -# it should be added to the /general/tasks group when merged with core -nwb.add_acquisition(event_types_table) - -# create a new EventsTable type to hold events recorded from the data acquisition system -events_table = EventsTable( - name="EventsTable", - description="events from my experiment", -) -# set the dynamic table region link -events_table["event_type"].table = event_types_table - -# add events one by one -events_table.add_row(timestamp=0.1, event_type=0, duration=0.0) -events_table.add_row(timestamp=0.3, event_type=1, duration=0.0) -events_table.add_row(timestamp=0.4, event_type=2, duration=0.0) -events_table.add_row(timestamp=0.8, event_type=4, duration=0.1) -events_table.add_row(timestamp=0.85, event_type=5, duration=0.0) - -# add the EventsTable type to the acquisition group of the NWB file -nwb.add_acquisition(events_table) - -# write nwb file -filename = "test.nwb" -with NWBHDF5IO(filename, "w") as io: - io.write(nwb) - -# read nwb file and check its contents -with NWBHDF5IO(filename, "r", load_namespaces=True) as io: - nwb = io.read() - print(nwb) - # access the events table and event types table by name from the NWBFile acquisition group and print it - print(nwb.acquisition["tone_onset"]) - print(nwb.acquisition["EventTypesTable"]) - print(nwb.acquisition["EventsTable"]) - print(nwb.acquisition["EventsTable"].to_dataframe()) - print(nwb.acquisition["EventsTable"][0, "event_type"]) -``` This extension was created using [ndx-template](https://github.com/nwb-extensions/ndx-template). diff --git a/spec/ndx-events.extensions.yaml b/spec/ndx-events.extensions.yaml index 259d094..d749773 100644 --- a/spec/ndx-events.extensions.yaml +++ b/spec/ndx-events.extensions.yaml @@ -69,14 +69,6 @@ groups: - null doc: The type of event that occurred. This is represented as a reference to a row of the EventTypesTable. - - name: value - neurodata_type_inc: VectorData - dtype: text - dims: - - num_events - shape: - - null - doc: Optional column containing the text value of each event. quantity: '?' - name: duration neurodata_type_inc: DurationVectorData @@ -97,7 +89,7 @@ groups: default_name: TtlsTable doc: Data type to hold timestamps of TTL pulses. datasets: - - name: event_type + - name: ttl_type neurodata_type_inc: DynamicTableRegion dims: - num_events diff --git a/src/pynwb/tests/test_events.py b/src/pynwb/tests/test_events.py index cc2e113..14e49c6 100644 --- a/src/pynwb/tests/test_events.py +++ b/src/pynwb/tests/test_events.py @@ -14,6 +14,7 @@ def test_init(self): assert data.name == "test" assert data.description == "description" assert data.unit == "seconds" + assert data.resolution == None def test_add_to_dynamic_table(self): col = TimestampVectorData(name="test", description="description") @@ -22,6 +23,15 @@ def test_add_to_dynamic_table(self): assert table.test is col assert table.test[0] == 0.1 + def test_set_resolution_init(self): + data = TimestampVectorData(name="test", description="description", resolution=1/32000.0) + assert data.resolution == 1/32000.0 + + def test_set_resolution_attr(self): + data = TimestampVectorData(name="test", description="description") + data.resolution = 1/32000.0 + assert data.resolution == 1/32000.0 + class TestTimestampVectorDataSimpleRoundtrip(TestCase): """Simple roundtrip test for TimestampVectorData.""" @@ -259,26 +269,45 @@ def test_add_row(self): ) events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) + events_table.add_column(name="cue_type", description="The cue type.") + events_table.add_column(name="stimulus_type", description="The stimulus type.") events_table.add_row( timestamp=0.1, - value="white circle", + cue_type="white circle", + stimulus_type="", event_type=0, - duration=0.2, + duration=0.1, # hed_tags=["(White, Circle)"], ) + events_table.add_row( + timestamp=0.3, + cue_type="", + stimulus_type="animal", + event_type=1, + duration=0.15, + ) events_table.add_row( timestamp=1.1, - value="green square", + cue_type="green square", + stimulus_type="", event_type=0, - duration=0.15, + duration=0.1, # hed_tags=["(Green, Square)"], ) - assert events_table["timestamp"].data == [0.1, 1.1] - assert events_table["value"].data == ["white circle", "green square"] - assert events_table["duration"].data == [0.2, 0.15] - assert events_table["event_type"].data == [0, 0] + events_table.add_row( + timestamp=1.3, + cue_type="", + stimulus_type="landscape", + event_type=1, + duration=0.15, + ) + assert events_table["timestamp"].data == [0.1, 0.3, 1.1, 1.3] + assert events_table["cue_type"].data == ["white circle", "", "green square", ""] + assert events_table["stimulus_type"].data == ["", "animal", "", "landscape"] + assert events_table["duration"].data == [0.1, 0.15, 0.1, 0.15] + assert events_table["event_type"].data == [0, 1, 0, 1] # assert events_table["hed_tags"][0] == ["(White, Circle)"] - # assert events_table["hed_tags"][1] == ["(Green, Square)"] + # assert events_table["hed_tags"][2] == ["(Green, Square)"] class TestEventsTableSimpleRoundtrip(TestCase): @@ -309,20 +338,39 @@ def test_roundtrip(self): ) events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) + events_table.add_column(name="cue_type", description="The cue type.") + events_table.add_column(name="stimulus_type", description="The stimulus type.") events_table.add_row( timestamp=0.1, - value="white circle", + cue_type="white circle", + stimulus_type="", event_type=0, - duration=0.2, + duration=0.1, # hed_tags=["(White, Circle)"], ) + events_table.add_row( + timestamp=0.3, + cue_type="", + stimulus_type="animal", + event_type=1, + duration=0.15, + ) events_table.add_row( timestamp=1.1, - value="green square", + cue_type="green square", + stimulus_type="", event_type=0, - duration=0.15, + duration=0.1, # hed_tags=["(Green, Square)"], ) + events_table.add_row( + timestamp=1.3, + cue_type="", + stimulus_type="landscape", + event_type=1, + duration=0.15, + ) + task = Task() task.event_types = event_types_table nwbfile = mock_NWBFile() @@ -339,15 +387,11 @@ def test_roundtrip(self): assert isinstance(read_events_table, EventsTable) assert read_events_table.name == "EventsTable" assert read_events_table.description == "Metadata about events" - assert all(read_events_table["timestamp"].data[:] == [0.1, 1.1]) - assert all( - read_events_table["value"].data[:] == [ - "white circle", - "green square", - ] - ) - assert all(read_events_table["duration"].data[:] == [0.2, 0.15]) - assert all(read_events_table["event_type"].data[:] == [0, 0]) + assert all(read_events_table["timestamp"].data[:] == [0.1, 0.3, 1.1, 1.3]) + assert all(read_events_table["cue_type"].data[:] == ["white circle", "", "green square", ""]) + assert all(read_events_table["stimulus_type"].data[:] == ["", "animal", "", "landscape"]) + assert all(read_events_table["duration"].data[:] == [0.1, 0.15, 0.1, 0.15]) + assert all(read_events_table["event_type"].data[:] == [0, 1, 0, 1]) assert read_events_table["event_type"].table is read_event_types_table @@ -433,130 +477,108 @@ def test_roundtrip(self): assert all(read_ttl_types_table["pulse_value"].data[:] == np.uint([1, 2])) -# class TestTtlsTable(TestCase): - -# def test_init(self): -# ttls_table = TtlsTable(description="Metadata about TTLs") -# assert events_table.name == "EventsTable" -# assert events_table.description == "Metadata about events" - -# def test_init_dtr(self): -# event_types_table = EventTypesTable(description="Metadata about event types") -# event_types_table.add_row( -# event_name="cue on", -# event_type_description="Times when the cue was on screen.", -# ) -# event_types_table.add_row( -# event_name="stimulus on", -# event_type_description="Times when the stimulus was on screen.", -# ) - -# events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) -# assert events_table["event_type"].table is event_types_table - -# def test_add_row(self): -# event_types_table = EventTypesTable(description="Metadata about event types") -# event_types_table.add_row( -# event_name="cue on", -# event_type_description="Times when the cue was on screen.", -# # hed_tags=["Sensory-event", "(Intended-effect, Cue)"], -# ) -# event_types_table.add_row( -# event_name="stimulus on", -# event_type_description="Times when the stimulus was on screen.", -# # hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], -# ) - -# events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) -# events_table.add_row( -# timestamp=0.1, -# value="white circle", -# event_type=0, -# duration=0.2, -# # hed_tags=["(White, Circle)"], -# ) -# events_table.add_row( -# timestamp=1.1, -# value="green square", -# event_type=0, -# duration=0.15, -# # hed_tags=["(Green, Square)"], -# ) -# assert events_table["timestamp"].data == [0.1, 1.1] -# assert events_table["value"].data == ["white circle", "green square"] -# assert events_table["duration"].data == [0.2, 0.15] -# assert events_table["event_type"].data == [0, 0] -# # assert events_table["hed_tags"][0] == ["(White, Circle)"] -# # assert events_table["hed_tags"][1] == ["(Green, Square)"] - - -# class TestEventsTableSimpleRoundtrip(TestCase): -# """Simple roundtrip test for EventsTable.""" - -# def setUp(self): -# self.path = "test.nwb" - -# def tearDown(self): -# remove_test_file(self.path) - -# def test_roundtrip(self): -# """ -# Create an EventsTable, write it to file, read the file, and test that the read table matches the original. -# """ -# # NOTE that when adding an EventTypesTable to a Task, the EventTypesTable -# # must be named "event_types" according to the spec -# event_types_table = EventTypesTable(name="event_types", description="Metadata about event types") -# event_types_table.add_row( -# event_name="cue on", -# event_type_description="Times when the cue was on screen.", -# # hed_tags=["Sensory-event", "(Intended-effect, Cue)"], -# ) -# event_types_table.add_row( -# event_name="stimulus on", -# event_type_description="Times when the stimulus was on screen.", -# # hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], -# ) - -# events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) -# events_table.add_row( -# timestamp=0.1, -# value="white circle", -# event_type=0, -# duration=0.2, -# # hed_tags=["(White, Circle)"], -# ) -# events_table.add_row( -# timestamp=1.1, -# value="green square", -# event_type=0, -# duration=0.15, -# # hed_tags=["(Green, Square)"], -# ) -# task = Task() -# task.event_types = event_types_table -# nwbfile = mock_NWBFile() -# nwbfile.add_lab_meta_data(task) -# nwbfile.add_acquisition(events_table) - -# with NWBHDF5IO(self.path, mode="w") as io: -# io.write(nwbfile) - -# with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: -# read_nwbfile = io.read() -# read_event_types_table = read_nwbfile.get_lab_meta_data("task").event_types -# read_events_table = read_nwbfile.acquisition["EventsTable"] -# assert isinstance(read_events_table, EventsTable) -# assert read_events_table.name == "EventsTable" -# assert read_events_table.description == "Metadata about events" -# assert all(read_events_table["timestamp"].data[:] == [0.1, 1.1]) -# assert all( -# read_events_table["value"].data[:] == [ -# "white circle", -# "green square", -# ] -# ) -# assert all(read_events_table["duration"].data[:] == [0.2, 0.15]) -# assert all(read_events_table["event_type"].data[:] == [0, 0]) -# assert read_events_table["event_type"].table is read_event_types_table +class TestTtlsTable(TestCase): + + def test_init(self): + ttls_table = TtlsTable(description="Metadata about TTLs") + assert ttls_table.name == "TtlsTable" + assert ttls_table.description == "Metadata about TTLs" + + def test_init_dtr(self): + ttl_types_table = TtlTypesTable(description="Metadata about TTL types") + ttl_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + pulse_value=np.uint(1), + ) + ttl_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + pulse_value=np.uint(2), + ) + ttls_table = TtlsTable(description="Metadata about TTLs", target_tables={"ttl_type": ttl_types_table}) + assert ttls_table["ttl_type"].table is ttl_types_table + def test_add_row(self): + ttl_types_table = TtlTypesTable(description="Metadata about TTL types") + ttl_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + pulse_value=np.uint(1), + ) + ttl_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + pulse_value=np.uint(2), + ) + + ttls_table = TtlsTable(description="Metadata about TTLs", target_tables={"ttl_type": ttl_types_table}) + ttls_table.add_row( + timestamp=0.1, + ttl_type=0, + ) + ttls_table.add_row( + timestamp=1.1, + ttl_type=0, + ) + assert ttls_table["timestamp"].data == [0.1, 1.1] + assert ttls_table["ttl_type"].data == [0, 0] + + +class TestTtlsTableSimpleRoundtrip(TestCase): + """Simple roundtrip test for TtlsTable.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Create a TtlsTable, write it to file, read the file, and test that the read table matches the original. + """ + # NOTE that when adding an TtlTypesTable to a Task, the TtlTypesTable + # must be named "ttl_types" according to the spec + ttl_types_table = TtlTypesTable(name="ttl_types", description="Metadata about TTL types") + ttl_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + pulse_value=np.uint(1), + ) + ttl_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + pulse_value=np.uint(2), + ) + + ttls_table = TtlsTable(description="Metadata about TTLs", target_tables={"ttl_type": ttl_types_table}) + ttls_table.add_row( + timestamp=0.1, + ttl_type=0, + ) + ttls_table.add_row( + timestamp=1.1, + ttl_type=0, + ) + + task = Task() + task.ttl_types = ttl_types_table + nwbfile = mock_NWBFile() + nwbfile.add_lab_meta_data(task) + nwbfile.add_acquisition(ttls_table) + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + read_ttl_types_table = read_nwbfile.get_lab_meta_data("task").ttl_types + read_ttls_table = read_nwbfile.acquisition["TtlsTable"] + assert isinstance(read_ttls_table, TtlsTable) + assert read_ttls_table.name == "TtlsTable" + assert read_ttls_table.description == "Metadata about TTLs" + assert all(read_ttls_table["timestamp"].data[:] == [0.1, 1.1]) + assert all(read_ttls_table["ttl_type"].data[:] == [0, 0]) + assert read_ttls_table["ttl_type"].table is read_ttl_types_table \ No newline at end of file diff --git a/src/pynwb/tests/test_example_usage.py b/src/pynwb/tests/test_example_usage.py index 536e4a7..3df07fa 100644 --- a/src/pynwb/tests/test_example_usage.py +++ b/src/pynwb/tests/test_example_usage.py @@ -1,92 +1,190 @@ -# def test_example_usage(): -# from datetime import datetime -# from ndx_events import Events, EventsTable, EventTypesTable -# from pynwb import NWBFile, NWBHDF5IO +def test_example_usage(): + from datetime import datetime + from ndx_events import EventsTable, EventTypesTable, TtlsTable, TtlTypesTable, Task + import numpy as np + from pynwb import NWBFile, NWBHDF5IO -# nwb = NWBFile( -# session_description="session description", -# identifier="cool_experiment_001", -# session_start_time=datetime.now().astimezone(), -# ) + nwbfile = NWBFile( + session_description="session description", + identifier="cool_experiment_001", + session_start_time=datetime.now().astimezone(), + ) -# # create a basic events object -# basic_tone_event = Events( -# name="tone_onset", -# timestamps=[0.0, 0.1, 0.3, 0.5, 0.6], -# description="Times when a tone was played.", -# ) + # in this experiment, TTL pulses were sent by the stimulus computer + # to signal important time markers during the experiment/trial, + # when the stimulus was placed on the screen and removed from the screen, + # when the question appeared, and the responses of the subject. -# # add the basic events object to the NWBFile object -# nwb.add_acquisition(basic_tone_event) + # ref: https://www.nature.com/articles/s41597-020-0415-9, DANDI:000004 -# # create an event types table -# event_types_table = EventTypesTable( -# name="EventTypesTable", -# description="metadata about event types", -# ) + # NOTE that when adding an TtlTypesTable to a Task, the TtlTypesTable + # must be named "ttl_types" according to the spec + ttl_types_table = TtlTypesTable(name="ttl_types", description="Metadata about TTL types") + ttl_types_table.add_row( + event_name="start experiment", + event_type_description="Start of experiment", + pulse_value=np.uint(55), + ) + ttl_types_table.add_row( + event_name="stimulus onset", + event_type_description="Stimulus onset", + pulse_value=np.uint(1), + ) + ttl_types_table.add_row( + event_name="stimulus offset", + event_type_description="Stimulus offset", + pulse_value=np.uint(2), + ) + ttl_types_table.add_row( + event_name="question onset", + event_type_description="Question screen onset", + pulse_value=np.uint(3), + ) + learning_response_description = ( + "During the learning phase, subjects are instructed to respond to the following " + "question: 'Is this an animal?' in each trial. Response are encoded as 'Yes, this " + "is an animal' (20) and 'No, this is not an animal' (21)." + ) + ttl_types_table.add_row( + event_name="yes response during learning", + event_type_description=learning_response_description, + pulse_value=np.uint(20), + ) + ttl_types_table.add_row( + event_name="no response during learning", + event_type_description=learning_response_description, + pulse_value=np.uint(21), + ) + recognition_response_description = ( + "During the recognition phase, subjects are instructed to respond to the following " + "question: 'Have you seen this image before?' in each trial. Responses are encoded " + "as: 31 (new, confident), 32 (new, probably), 33 (new, guess), 34 (old, guess), 35 " + "(old, probably), 36 (old, confident)." + ) + ttl_types_table.add_row( + event_name="(new, confident) response during recognition", + event_type_description=recognition_response_description, + pulse_value=np.uint(31), + ) + ttl_types_table.add_row( + event_name="(new, probably) response during recognition", + event_type_description=recognition_response_description, + pulse_value=np.uint(32), + ) + ttl_types_table.add_row( + event_name="(new, guess) response during recognition", + event_type_description=recognition_response_description, + pulse_value=np.uint(33), + ) + ttl_types_table.add_row( + event_name="(old, guess) response during recognition", + event_type_description=recognition_response_description, + pulse_value=np.uint(34), + ) + ttl_types_table.add_row( + event_name="(old, probably) response during recognition", + event_type_description=recognition_response_description, + pulse_value=np.uint(35), + ) + ttl_types_table.add_row( + event_name="(old, confident) response during recognition", + event_type_description=recognition_response_description, + pulse_value=np.uint(36), + ) + ttl_types_table.add_row( + event_name="end trial", + event_type_description="End of trial", + pulse_value=np.uint(6), + ) + ttl_types_table.add_row( + event_name="end experiment", + event_type_description="End of experiment", + pulse_value=np.uint(66), + ) -# # create a new custom column with additional metadata -# event_types_table.add_column( -# name="extra_metadata", -# description="some additional metadata about each event type", -# ) + ttls_table = TtlsTable(description="Metadata about TTLs", target_tables={"ttl_type": ttl_types_table}) + ttls_table.add_row( + timestamp=6820.092244, + ttl_type=0, # NOT the pulse value, but a row index into the ttl_types_table + ) + ttls_table.add_row( + timestamp=6821.208244, + ttl_type=1, + ) + ttls_table.add_row( + timestamp=6822.210644, + ttl_type=2, + ) + ttls_table.add_row( + timestamp=6822.711364, + ttl_type=3, + ) + ttls_table.add_row( + timestamp=6825.934244, + ttl_type=6, + ) + ttls_table.timestamp.resolution = 1/50000.0 # specify the resolution of the timestamps (optional) -# # add event types one by one -# event_types_table.add_row( -# id=0, event_name="trial start", extra_metadata="more metadata" -# ) -# event_types_table.add_row( -# id=1, event_name="cue onset", extra_metadata="more metadata" -# ) -# event_types_table.add_row( -# id=2, event_name="cue offset", extra_metadata="more metadata" -# ) -# event_types_table.add_row( -# id=3, event_name="nosepoke left", extra_metadata="more metadata" -# ) -# event_types_table.add_row( -# id=4, event_name="nosepoke right", extra_metadata="more metadata" -# ) -# event_types_table.add_row(id=5, event_name="reward", extra_metadata="more metadata") + # if TTLs are recorded, then the events table should hold any non-TTL events + # recorded by the acquisition system + # OR the events table can hold more processed information than the TTLs table + # e.g., converting stimulus onset and offset into a single stimulus event with metadata. + # this may be redundant with information in the trials table if the task is + # structured into trials -# # add the event types table to the acquisition group for now -# # it should be added to the /general/tasks group when merged with core -# nwb.add_acquisition(event_types_table) + # NOTE that when adding an EventTypesTable to a Task, the EventTypesTable + # must be named "event_types" according to the spec + event_types_table = EventTypesTable(name="event_types", description="Metadata about event types") + event_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen", + ) -# # create a new EventsTable type to hold events recorded from the data acquisition system -# events_table = EventsTable( -# name="EventsTable", -# description="events from my experiment", -# ) -# # set the dynamic table region link -# events_table["event_type"].table = event_types_table + events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) + events_table.add_column(name="category_name", description="Name of the category of the stimulus") + events_table.add_column( + name="stimulus_image_index", + description="Frame index of the stimulus image in the StimulusPresentation object" + ) + events_table.add_row( + timestamp=6821.208244, + category_name="smallAnimal", + stimulus_image_index=0, + event_type=0, + duration=1.0024, # this comes from the stimulus onset and offset TTLs + ) + events_table.add_row( + timestamp=6821.208244, + category_name="phones", + stimulus_image_index=1, + event_type=0, + duration=0.99484, + ) + events_table.timestamp.resolution = 1/50000.0 # specify the resolution of the timestamps (optional) + events_table.duration.resolution = 1/50000.0 # specify the resolution of the durations (optional) -# # add events one by one -# events_table.add_row(timestamp=0.1, event_type=0, duration=0.0) -# events_table.add_row(timestamp=0.3, event_type=1, duration=0.0) -# events_table.add_row(timestamp=0.4, event_type=2, duration=0.0) -# events_table.add_row(timestamp=0.8, event_type=4, duration=0.1) -# events_table.add_row(timestamp=0.85, event_type=5, duration=0.0) + task = Task() + task.event_types = event_types_table + task.ttl_types = ttl_types_table + nwbfile.add_lab_meta_data(task) + nwbfile.add_acquisition(events_table) + nwbfile.add_acquisition(ttls_table) -# # add the EventsTable type to the acquisition group of the NWB file -# nwb.add_acquisition(events_table) + # write nwb file + filename = "test.nwb" + with NWBHDF5IO(filename, "w") as io: + io.write(nwbfile) -# # write nwb file -# filename = "test.nwb" -# with NWBHDF5IO(filename, "w") as io: -# io.write(nwb) + # read nwb file and check its contents + with NWBHDF5IO(filename, "r", load_namespaces=True) as io: + read_nwbfile = io.read() + print(read_nwbfile) + # access the events table, ttls table, event types table, and ttl types table and print them + print(read_nwbfile.get_lab_meta_data("task").event_types.to_dataframe()) + print(read_nwbfile.acquisition["EventsTable"].to_dataframe()) + print(read_nwbfile.get_lab_meta_data("task").ttl_types.to_dataframe()) + print(read_nwbfile.acquisition["TtlsTable"].to_dataframe()) -# # read nwb file and check its contents -# with NWBHDF5IO(filename, "r", load_namespaces=True) as io: -# nwb = io.read() -# print(nwb) -# # access the events table and event types table by name from the NWBFile acquisition group and print it -# print(nwb.acquisition["tone_onset"]) -# print(nwb.acquisition["EventTypesTable"]) -# print(nwb.acquisition["EventsTable"]) -# print(nwb.acquisition["EventsTable"].to_dataframe()) -# print(nwb.acquisition["EventsTable"][0, "event_type"]) - -# if __name__ == "__main__": -# test_example_usage() +if __name__ == "__main__": + test_example_usage() diff --git a/src/spec/create_extension_spec.py b/src/spec/create_extension_spec.py index d5cd08d..9576de0 100644 --- a/src/spec/create_extension_spec.py +++ b/src/spec/create_extension_spec.py @@ -104,14 +104,6 @@ def main(): shape=[None], doc=("The type of event that occurred. This is represented as a reference " "to a row of the EventTypesTable."), - ), - NWBDatasetSpec( - name='value', - neurodata_type_inc='VectorData', - dtype='text', - dims=['num_events'], - shape=[None], - doc=("Optional column containing the text value of each event."), quantity="?", ), NWBDatasetSpec( @@ -146,7 +138,7 @@ def main(): default_name="TtlsTable", datasets=[ NWBDatasetSpec( - name='event_type', + name='ttl_type', neurodata_type_inc='DynamicTableRegion', dims=['num_events'], shape=[None], From bc28f4c06d9808b73602448dce78f355642dead9 Mon Sep 17 00:00:00 2001 From: rly <rly@lbl.gov> Date: Wed, 25 Oct 2023 16:11:23 -0700 Subject: [PATCH 10/15] Update tests, infrastructure --- .gitignore | 72 ++++++++++++++-- CHANGELOG.md | 5 ++ LICENSE.txt | 29 +++++++ MANIFEST.in | 5 -- README.md | 4 +- pyproject.toml | 112 ++++++++++++++++++++++++ requirements-dev.txt | 15 ++++ requirements-min.txt | 5 ++ requirements.txt | 3 - setup.cfg | 20 ----- setup.py | 64 -------------- src/pynwb/ndx_events/__init__.py | 50 +++++------ src/pynwb/ndx_events/events.py | 0 src/pynwb/ndx_events/io/__init__.py | 1 - src/pynwb/ndx_events/io/events.py | 71 ---------------- src/pynwb/tests/test_events.py | 35 ++++---- src/pynwb/tests/test_example_usage.py | 9 +- src/spec/create_extension_spec.py | 117 ++++++++++++++------------ 18 files changed, 344 insertions(+), 273 deletions(-) create mode 100644 CHANGELOG.md delete mode 100644 MANIFEST.in create mode 100644 pyproject.toml create mode 100644 requirements-dev.txt create mode 100644 requirements-min.txt delete mode 100644 requirements.txt delete mode 100644 setup.cfg delete mode 100644 setup.py delete mode 100644 src/pynwb/ndx_events/events.py delete mode 100644 src/pynwb/ndx_events/io/__init__.py delete mode 100644 src/pynwb/ndx_events/io/events.py diff --git a/.gitignore b/.gitignore index 0e5ce43..fac0f30 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,8 @@ +# output NWB files +*.nwb + # generated docs -docs/_build docs/source/_format_auto_docs -docs/source/_static -!docs/source/_static/theme_overrides.css - -# copied spec files -src/pynwb/ndx_events/spec/*.yaml # Byte-compiled / optimized / DLL files __pycache__/ @@ -29,6 +26,7 @@ parts/ sdist/ var/ wheels/ +share/python-wheels/ *.egg-info/ .installed.cfg *.egg @@ -47,14 +45,18 @@ pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ +.nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover +*.py,cover .hypothesis/ .pytest_cache/ +cover/ +.ruff_cache/ # Translations *.mo @@ -64,6 +66,7 @@ coverage.xml *.log local_settings.py db.sqlite3 +db.sqlite3-journal # Flask stuff: instance/ @@ -76,16 +79,49 @@ instance/ docs/_build/ # PyBuilder +.pybuilder/ target/ # Jupyter Notebook .ipynb_checkpoints -# pyenv -.python-version +# IPython +profile_default/ +ipython_config.py -# celery beat schedule file +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff celerybeat-schedule +celerybeat.pid # SageMath parsed files *.sage.py @@ -111,6 +147,24 @@ venv.bak/ # mypy .mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ # Mac finder .DS_Store diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..448372a --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,5 @@ +# Changelog for ndx-events + +## 0.3.0 (Upcoming) + + diff --git a/LICENSE.txt b/LICENSE.txt index e69de29..8850436 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2023, Ryan Ly +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index fe511eb..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include LICENSE.txt README.md requirements.txt -include spec/*.yaml - -graft src/pynwb/tests -global-exclude __pycache__ *.py[co] diff --git a/README.md b/README.md index 3aeedb5..52b22ea 100644 --- a/README.md +++ b/README.md @@ -22,8 +22,8 @@ subtype of `DynamicTable`, where each row corresponds to a different event type. Unlike for the other event types, users can add their own custom columns to annotate each event type or event time. This can be useful for storing event metadata related to data preprocessing and analysis, such as marking bad events. -This extension was developed by Ryan Ly, Ben Dichter, Oliver Rübel, and Andrew Tritt. Information about the rationale, -background, and alternative approaches to this extension can be found here: +This extension was developed by Ryan Ly, Oliver Rübel, and the NWB Technical Advisory Board. +Information about the rationale, background, and alternative approaches to this extension can be found here: https://docs.google.com/document/d/1qcsjyFVX9oI_746RdMoDdmQPu940s0YtDjb1en1Xtdw ## Installation diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..cfa56a2 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,112 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "ndx-events" +version = "0.3.0" +authors = [ + { name="Ryan Ly", email="rly@lbl.gov" } +] +description = "NWB extension for storing timestamped event and TTL pulse data" +readme = "README.md" +requires-python = ">=3.8" +license = {text = "BSD-3"} +classifiers = [ + # TODO: add classifiers before release + "Programming Language :: Python", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", +] +keywords = [ + 'NeurodataWithoutBorders', + 'NWB', + 'nwb-extension', + 'ndx-extension', +] +dependencies = [ + "pynwb>=2.5.0", + "hdmf>=3.10.0", +] + +# TODO: add URLs before release +[project.urls] +"Homepage" = "https://github.com/rly/ndx-events" +# "Documentation" = "https://package.readthedocs.io/" +"Bug Tracker" = "https://github.com/rly/ndx-events/issues" +"Discussions" = "https://github.com/rly/ndx-events/discussions" +"Changelog" = "https://github.com/rly/ndx-events/CHANGELOG.md" + +[tool.hatch.build] +include = [ + "src/pynwb", + "spec/ndx-events.extensions.yaml", + "spec/ndx-events.namespace.yaml", +] +exclude = [ + "src/pynwb/tests", +] + +[tool.hatch.build.targets.wheel] +packages = [ + "src/pynwb/ndx_events", + "spec" +] + +[tool.hatch.build.targets.wheel.sources] +"spec" = "ndx_events/spec" + +[tool.hatch.build.targets.sdist] +include = [ + "src/pynwb", + "spec/ndx-events.extensions.yaml", + "spec/ndx-events.namespace.yaml", + "docs", +] +exclude = [] + +[tool.pytest.ini_options] +addopts = "--cov --cov-report html" + +[tool.codespell] +skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs,.tox,hdmf-common-schema,./docs/_build/*,*.ipynb" + +[tool.coverage.run] +branch = true +source = ["src/pynwb"] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "@abstract" +] + +[tool.black] +line-length = 120 +preview = true +exclude = ".git|.mypy_cache|.tox|.venv|venv|.ipynb_checkpoints|_build/|dist/|__pypackages__|.ipynb|docs/" + +[tool.ruff] +select = ["E", "F", "T100", "T201", "T203"] +exclude = [ + ".git", + ".tox", + "__pycache__", + "build/", + "dist/", +] +line-length = 120 + +[tool.ruff.per-file-ignores] +"src/spec/create_extension_spec.py" = ["T201"] +"src/pynwb/tests/test_example_usage.py" = ["T201"] + +[tool.ruff.mccabe] +max-complexity = 17 \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..7655a0a --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,15 @@ +# pinned dependencies to reproduce an entire development environment to +# run tests, check code style, and generate documentation +black==23.9.1 +codespell==2.2.6 +coverage==7.3.2 +hdmf==3.10.0 +hdmf-docutils==0.4.5 +pre-commit==3.4.0 +pynwb==2.5.0 +pytest==7.4.2 +pytest-cov==4.1.0 +python-dateutil==2.8.2 +pytest-subtests==0.6.0 +ruff==0.0.292 +tox==4.11.3 diff --git a/requirements-min.txt b/requirements-min.txt new file mode 100644 index 0000000..695410a --- /dev/null +++ b/requirements-min.txt @@ -0,0 +1,5 @@ +# minimum versions of package dependencies for installation +# these should match the minimum versions specified in pyproject.toml +# NOTE: it may be possible to relax these minimum requirements +pynwb==2.5.0 +hdmf==3.10.0 diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index d68ad89..0000000 --- a/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -pynwb>=1.1.2 -hdmf_docutils -pytest \ No newline at end of file diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index b3e6c0b..0000000 --- a/setup.cfg +++ /dev/null @@ -1,20 +0,0 @@ -[wheel] -universal = 1 - -[flake8] -max-line-length = 120 -max-complexity = 17 -exclude = - .git, - .tox, - __pycache__, - build/, - dist/, - docs/source/conf.py - versioneer.py -per-file-ignores = - src/pynwb/tests/test_example_usage.py:T001 - - -[metadata] -description-file = README.md diff --git a/setup.py b/setup.py deleted file mode 100644 index 29e384d..0000000 --- a/setup.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- - -import os - -from setuptools import setup, find_packages -from shutil import copy2 - -# load README.md/README.rst file -try: - if os.path.exists('README.md'): - with open('README.md', 'r') as fp: - readme = fp.read() - readme_type = 'text/markdown; charset=UTF-8' - elif os.path.exists('README.rst'): - with open('README.rst', 'r') as fp: - readme = fp.read() - readme_type = 'text/x-rst; charset=UTF-8' - else: - readme = "" -except Exception: - readme = "" - -setup_args = { - 'name': 'ndx-events', - 'version': '0.2.0', - 'description': 'NWB extension for storing timestamped event and TTL pulse data', - 'long_description': readme, - 'long_description_content_type': readme_type, - 'author': 'Ryan Ly', - 'author_email': 'rly@lbl.gov', - 'url': 'https://github.com/rly/ndx-events', - 'license': 'BSD 3-Clause', - 'install_requires': [ - 'pynwb>=1.1.2' - ], - 'packages': find_packages('src/pynwb'), - 'package_dir': {'': 'src/pynwb'}, - 'package_data': {'ndx_events': [ - 'spec/ndx-events.namespace.yaml', - 'spec/ndx-events.extensions.yaml', - ]}, - 'classifiers': [ - "Intended Audience :: Developers", - "Intended Audience :: Science/Research", - ], - 'zip_safe': False -} - - -def _copy_spec_files(project_dir): - ns_path = os.path.join(project_dir, 'spec', 'ndx-events.namespace.yaml') - ext_path = os.path.join(project_dir, 'spec', 'ndx-events.extensions.yaml') - - dst_dir = os.path.join(project_dir, 'src', 'pynwb', 'ndx_events', 'spec') - if not os.path.exists(dst_dir): - os.mkdir(dst_dir) - - copy2(ns_path, dst_dir) - copy2(ext_path, dst_dir) - - -if __name__ == '__main__': - _copy_spec_files(os.path.dirname(__file__)) - setup(**setup_args) diff --git a/src/pynwb/ndx_events/__init__.py b/src/pynwb/ndx_events/__init__.py index 82abd08..1a58541 100644 --- a/src/pynwb/ndx_events/__init__.py +++ b/src/pynwb/ndx_events/__init__.py @@ -1,32 +1,34 @@ import os from pynwb import load_namespaces, get_class -# Set path of the namespace.yaml file to the expected install location -ndx_events_specpath = os.path.join( - os.path.dirname(__file__), - 'spec', - 'ndx-events.namespace.yaml' -) +try: + from importlib.resources import files +except ImportError: + # TODO: Remove when python 3.9 becomes the new minimum + from importlib_resources import files -# If the extension has not been installed yet but we are running directly from -# the git repo -if not os.path.exists(ndx_events_specpath): - ndx_events_specpath = os.path.abspath(os.path.join( - os.path.dirname(__file__), - '..', '..', '..', - 'spec', - 'ndx-events.namespace.yaml' - )) +# Get path to the namespace.yaml file with the expected location when installed not in editable mode +__location_of_this_file = files(__name__) +__spec_path = __location_of_this_file / "spec" / "ndx-events.namespace.yaml" + +# If that path does not exist, we are likely running in editable mode. Use the local path instead +if not os.path.exists(__spec_path): + __spec_path = __location_of_this_file.parent.parent.parent / "spec" / "ndx-events.namespace.yaml" # Load the namespace -load_namespaces(ndx_events_specpath) +load_namespaces(str(__spec_path)) -from . import io as __io # noqa: E402,F401 +# TODO: Define your classes here to make them accessible at the package level. +# Either have PyNWB generate a class from the spec using `get_class` as shown +# below or write a custom class and register it using the class decorator +# `@register_class("TetrodeSeries", "ndx-hed")` +Task = get_class("Task", "ndx-events") +TimestampVectorData = get_class("TimestampVectorData", "ndx-events") +DurationVectorData = get_class("DurationVectorData", "ndx-events") +EventTypesTable = get_class("EventTypesTable", "ndx-events") +EventsTable = get_class("EventsTable", "ndx-events") +TtlTypesTable = get_class("TtlTypesTable", "ndx-events") +TtlsTable = get_class("TtlsTable", "ndx-events") -Task = get_class('Task', 'ndx-events') -TimestampVectorData = get_class('TimestampVectorData', 'ndx-events') -DurationVectorData = get_class('DurationVectorData', 'ndx-events') -EventTypesTable = get_class('EventTypesTable', 'ndx-events') -EventsTable = get_class('EventsTable', 'ndx-events') -TtlTypesTable = get_class('TtlTypesTable', 'ndx-events') -TtlsTable = get_class('TtlsTable', 'ndx-events') +# Remove these functions from the package +del load_namespaces, get_class diff --git a/src/pynwb/ndx_events/events.py b/src/pynwb/ndx_events/events.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/pynwb/ndx_events/io/__init__.py b/src/pynwb/ndx_events/io/__init__.py deleted file mode 100644 index 24af8ee..0000000 --- a/src/pynwb/ndx_events/io/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from . import events as __events # noqa: E402,F401 diff --git a/src/pynwb/ndx_events/io/events.py b/src/pynwb/ndx_events/io/events.py deleted file mode 100644 index 29f3089..0000000 --- a/src/pynwb/ndx_events/io/events.py +++ /dev/null @@ -1,71 +0,0 @@ -from pynwb import register_map -from pynwb.io.core import NWBContainerMapper -# from hdmf.common.io.table import DynamicTableMap -# from hdmf.build import ObjectMapper, BuildManager -# from hdmf.common import VectorData -# from hdmf.utils import getargs, docval -# from hdmf.spec import AttributeSpec - -# from ..events import Events - - -# @register_map(Events) -# class EventsMap(NWBContainerMapper): - -# def __init__(self, spec): -# super().__init__(spec) -# # map object attribute Events.unit -> spec Events/timestamps.unit -# # map object attribute Events.resolution -> spec Events/timestamps.resolution -# timestamps_spec = self.spec.get_dataset('timestamps') -# self.map_spec('unit', timestamps_spec.get_attribute('unit')) -# self.map_spec('resolution', timestamps_spec.get_attribute('resolution')) - - -# @register_map(LabeledEvents) -# class LabeledEventsMap(EventsMap): - -# def __init__(self, spec): -# super().__init__(spec) -# # map object attribute LabeledEvents.labels -> spec LabeledEvents/data.labels -# data_spec = self.spec.get_dataset('data') -# self.map_spec('labels', data_spec.get_attribute('labels')) - - -# @register_map(AnnotatedEventsTable) -# class AnnotatedEventsTableMap(DynamicTableMap): - -# def __init__(self, spec): -# super().__init__(spec) -# # map object attribute AnnotatedEventsTable.resolution -> spec AnnotatedEventsTable/event_times.resolution -# event_times_spec = self.spec.get_dataset('event_times') -# self.map_spec('resolution', event_times_spec.get_attribute('resolution')) - -# @DynamicTableMap.constructor_arg('resolution') -# def resolution_carg(self, builder, manager): -# # on construct, map builder for AnnotatedEventsTable.datasets['event_times'].attributes['resolution'] -# # -> AnnotatedEventsTable.__init__ argument 'resolution' -# if 'event_times' in builder: -# return builder['event_times'].attributes.get('resolution') -# return None - - -# @register_map(VectorData) -# class VectorDataMap(ObjectMapper): - -# # TODO when merging into NWB core, fold this into pynwb.io.core.VectorDataMap - -# @docval({"name": "spec", "type": AttributeSpec, "doc": "the spec to get the attribute value for"}, -# {"name": "container", "type": VectorData, "doc": "the container to get the attribute value from"}, -# {"name": "manager", "type": BuildManager, "doc": "the BuildManager used for managing this build"}, -# returns='the value of the attribute') -# def get_attr_value(self, **kwargs): -# ''' Get the value of the attribute corresponding to this spec from the given container ''' -# spec, container, manager = getargs('spec', 'container', 'manager', kwargs) - -# # on build of VectorData objects, map object attribute AnnotatedEventsTable.resolution -# # -> spec AnnotatedEventsTable/event_times.resolution -# if isinstance(container.parent, AnnotatedEventsTable): -# if container.name == 'event_times': -# if spec.name == 'resolution': -# return container.parent.resolution -# return super().get_attr_value(**kwargs) diff --git a/src/pynwb/tests/test_events.py b/src/pynwb/tests/test_events.py index 14e49c6..0709041 100644 --- a/src/pynwb/tests/test_events.py +++ b/src/pynwb/tests/test_events.py @@ -4,17 +4,24 @@ from pynwb.testing import TestCase, remove_test_file from pynwb.testing.mock.file import mock_NWBFile -from ndx_events import EventsTable, EventTypesTable, TtlsTable, TtlTypesTable, Task, DurationVectorData, TimestampVectorData +from ndx_events import ( + EventsTable, + EventTypesTable, + TtlsTable, + TtlTypesTable, + Task, + DurationVectorData, + TimestampVectorData, +) class TestTimestampVectorData(TestCase): - def test_init(self): data = TimestampVectorData(name="test", description="description") assert data.name == "test" assert data.description == "description" assert data.unit == "seconds" - assert data.resolution == None + assert data.resolution is None def test_add_to_dynamic_table(self): col = TimestampVectorData(name="test", description="description") @@ -24,13 +31,13 @@ def test_add_to_dynamic_table(self): assert table.test[0] == 0.1 def test_set_resolution_init(self): - data = TimestampVectorData(name="test", description="description", resolution=1/32000.0) - assert data.resolution == 1/32000.0 + data = TimestampVectorData(name="test", description="description", resolution=1 / 32000.0) + assert data.resolution == 1 / 32000.0 def test_set_resolution_attr(self): data = TimestampVectorData(name="test", description="description") - data.resolution = 1/32000.0 - assert data.resolution == 1/32000.0 + data.resolution = 1 / 32000.0 + assert data.resolution == 1 / 32000.0 class TestTimestampVectorDataSimpleRoundtrip(TestCase): @@ -68,7 +75,6 @@ def test_roundtrip(self): class TestDurationVectorData(TestCase): - def test_init(self): data = DurationVectorData(name="test", description="description") assert data.name == "test" @@ -118,7 +124,6 @@ def test_roundtrip(self): class TestTask(TestCase): - def test_init(self): task = Task() assert task.name == "task" @@ -159,7 +164,6 @@ def test_roundtrip(self): class TestEventTypesTable(TestCase): - def test_init(self): event_types_table = EventTypesTable(description="Metadata about event types") assert event_types_table.name == "EventTypesTable" @@ -227,7 +231,8 @@ def test_roundtrip(self): assert read_event_types_table.description == "Metadata about event types" assert all(read_event_types_table["event_name"].data[:] == ["cue on", "stimulus on"]) assert all( - read_event_types_table["event_type_description"].data[:] == [ + read_event_types_table["event_type_description"].data[:] + == [ "Times when the cue was on screen.", "Times when the stimulus was on screen.", ] @@ -235,7 +240,6 @@ def test_roundtrip(self): class TestEventsTable(TestCase): - def test_init(self): events_table = EventsTable(description="Metadata about events") assert events_table.name == "EventsTable" @@ -396,7 +400,6 @@ def test_roundtrip(self): class TestTtlTypesTable(TestCase): - def test_init(self): ttl_types_table = TtlTypesTable(description="Metadata about TTL types") assert ttl_types_table.name == "TtlTypesTable" @@ -469,7 +472,8 @@ def test_roundtrip(self): assert read_ttl_types_table.description == "Metadata about TTL types" assert all(read_ttl_types_table["event_name"].data[:] == ["cue on", "stimulus on"]) assert all( - read_ttl_types_table["event_type_description"].data[:] == [ + read_ttl_types_table["event_type_description"].data[:] + == [ "Times when the cue was on screen.", "Times when the stimulus was on screen.", ] @@ -478,7 +482,6 @@ def test_roundtrip(self): class TestTtlsTable(TestCase): - def test_init(self): ttls_table = TtlsTable(description="Metadata about TTLs") assert ttls_table.name == "TtlsTable" @@ -581,4 +584,4 @@ def test_roundtrip(self): assert read_ttls_table.description == "Metadata about TTLs" assert all(read_ttls_table["timestamp"].data[:] == [0.1, 1.1]) assert all(read_ttls_table["ttl_type"].data[:] == [0, 0]) - assert read_ttls_table["ttl_type"].table is read_ttl_types_table \ No newline at end of file + assert read_ttls_table["ttl_type"].table is read_ttl_types_table diff --git a/src/pynwb/tests/test_example_usage.py b/src/pynwb/tests/test_example_usage.py index 3df07fa..ba32e10 100644 --- a/src/pynwb/tests/test_example_usage.py +++ b/src/pynwb/tests/test_example_usage.py @@ -123,7 +123,7 @@ def test_example_usage(): timestamp=6825.934244, ttl_type=6, ) - ttls_table.timestamp.resolution = 1/50000.0 # specify the resolution of the timestamps (optional) + ttls_table.timestamp.resolution = 1 / 50000.0 # specify the resolution of the timestamps (optional) # if TTLs are recorded, then the events table should hold any non-TTL events # recorded by the acquisition system @@ -143,8 +143,7 @@ def test_example_usage(): events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) events_table.add_column(name="category_name", description="Name of the category of the stimulus") events_table.add_column( - name="stimulus_image_index", - description="Frame index of the stimulus image in the StimulusPresentation object" + name="stimulus_image_index", description="Frame index of the stimulus image in the StimulusPresentation object" ) events_table.add_row( timestamp=6821.208244, @@ -160,8 +159,8 @@ def test_example_usage(): event_type=0, duration=0.99484, ) - events_table.timestamp.resolution = 1/50000.0 # specify the resolution of the timestamps (optional) - events_table.duration.resolution = 1/50000.0 # specify the resolution of the durations (optional) + events_table.timestamp.resolution = 1 / 50000.0 # specify the resolution of the timestamps (optional) + events_table.duration.resolution = 1 / 50000.0 # specify the resolution of the durations (optional) task = Task() task.event_types = event_types_table diff --git a/src/spec/create_extension_spec.py b/src/spec/create_extension_spec.py index 9576de0..75d47f9 100644 --- a/src/spec/create_extension_spec.py +++ b/src/spec/create_extension_spec.py @@ -12,14 +12,14 @@ def main(): contact=["rly@lbl.gov"], ) - ns_builder.include_namespace('core') + ns_builder.include_namespace("core") timestamp_vector_data = NWBDatasetSpec( neurodata_type_def="TimestampVectorData", neurodata_type_inc="VectorData", doc="A VectorData that stores timestamps in seconds.", dtype="float64", - dims=['num_times'], + dims=["num_times"], shape=[None], attributes=[ NWBAttributeSpec( @@ -31,8 +31,10 @@ def main(): NWBAttributeSpec( name="resolution", dtype="float64", - doc=("The smallest possible difference between two timestamps. Usually 1 divided by the " - "sampling rate for timestamps of the data acquisition system."), + doc=( + "The smallest possible difference between two timestamps. Usually 1 divided by the " + "sampling rate for timestamps of the data acquisition system." + ), required=False, ), ], @@ -43,7 +45,7 @@ def main(): neurodata_type_inc="VectorData", doc="A VectorData that stores durations in seconds.", dtype="float64", - dims=['num_events'], + dims=["num_events"], shape=[None], attributes=[ NWBAttributeSpec( @@ -55,8 +57,10 @@ def main(): NWBAttributeSpec( name="resolution", dtype="float64", - doc=("The smallest possible difference between two timestamps. Usually 1 divided by the " - "sampling rate for timestamps of the data acquisition system."), + doc=( + "The smallest possible difference between two timestamps. Usually 1 divided by the " + "sampling rate for timestamps of the data acquisition system." + ), required=False, ), ], @@ -64,51 +68,55 @@ def main(): event_types_table = NWBGroupSpec( neurodata_type_def="EventTypesTable", - neurodata_type_inc='DynamicTable', - doc=("A column-based table to store information about each event type, such as name, one event type per row."), + neurodata_type_inc="DynamicTable", + doc="A column-based table to store information about each event type, such as name, one event type per row.", default_name="EventTypesTable", datasets=[ NWBDatasetSpec( - name='event_name', - neurodata_type_inc='VectorData', - dtype='text', - doc='Name of each event type.', + name="event_name", + neurodata_type_inc="VectorData", + dtype="text", + doc="Name of each event type.", ), NWBDatasetSpec( - name='event_type_description', - neurodata_type_inc='VectorData', - dtype='text', - doc='Description of each event type.', + name="event_type_description", + neurodata_type_inc="VectorData", + dtype="text", + doc="Description of each event type.", ), ], ) events_table = NWBGroupSpec( - neurodata_type_def='EventsTable', - neurodata_type_inc='DynamicTable', - doc=("A column-based table to store information about events (event instances), one event per row. " - "Each event must have an event_type, which is a row in the EventTypesTable. Additional columns " - "may be added to store metadata about each event, such as the duration of the event, or a " - "text value of the event."), + neurodata_type_def="EventsTable", + neurodata_type_inc="DynamicTable", + doc=( + "A column-based table to store information about events (event instances), one event per row. " + "Each event must have an event_type, which is a row in the EventTypesTable. Additional columns " + "may be added to store metadata about each event, such as the duration of the event, or a " + "text value of the event." + ), default_name="EventsTable", datasets=[ NWBDatasetSpec( - name='timestamp', - neurodata_type_inc='TimestampVectorData', + name="timestamp", + neurodata_type_inc="TimestampVectorData", doc="The time that each event occurred, in seconds, from the session start time.", ), NWBDatasetSpec( - name='event_type', - neurodata_type_inc='DynamicTableRegion', - dims=['num_events'], + name="event_type", + neurodata_type_inc="DynamicTableRegion", + dims=["num_events"], shape=[None], - doc=("The type of event that occurred. This is represented as a reference " - "to a row of the EventTypesTable."), + doc=( + "The type of event that occurred. This is represented as a reference " + "to a row of the EventTypesTable." + ), quantity="?", ), NWBDatasetSpec( - name='duration', - neurodata_type_inc='DurationVectorData', + name="duration", + neurodata_type_inc="DurationVectorData", doc="Optional column containing the duration of each event, in seconds.", quantity="?", ), @@ -117,43 +125,46 @@ def main(): ttl_types_table = NWBGroupSpec( neurodata_type_def="TtlTypesTable", - neurodata_type_inc='EventTypesTable', - doc=("A column-based table to store information about each TTL type, such as name and pulse value, " - "one TTL type per row."), + neurodata_type_inc="EventTypesTable", + doc=( + "A column-based table to store information about each TTL type, such as name and pulse value, " + "one TTL type per row." + ), default_name="TtlTypesTable", datasets=[ NWBDatasetSpec( - name='pulse_value', - neurodata_type_inc='VectorData', - dtype='uint8', - doc='TTL pulse value for each event type.', + name="pulse_value", + neurodata_type_inc="VectorData", + dtype="uint8", + doc="TTL pulse value for each event type.", ), ], ) ttls_table = NWBGroupSpec( - neurodata_type_def='TtlsTable', - neurodata_type_inc='EventsTable', - doc=("Data type to hold timestamps of TTL pulses."), + neurodata_type_def="TtlsTable", + neurodata_type_inc="EventsTable", + doc="Data type to hold timestamps of TTL pulses.", default_name="TtlsTable", datasets=[ NWBDatasetSpec( - name='ttl_type', - neurodata_type_inc='DynamicTableRegion', - dims=['num_events'], + name="ttl_type", + neurodata_type_inc="DynamicTableRegion", + dims=["num_events"], shape=[None], - doc=("The type of TTL that occurred. This is represented as a reference " - "to a row of the TtlTypesTable."), + doc="The type of TTL that occurred. This is represented as a reference to a row of the TtlTypesTable.", ), ], ) task = NWBGroupSpec( - neurodata_type_def='Task', - neurodata_type_inc='LabMetaData', - doc=("A group to store task-related general metadata. TODO When merged with core, " - "this will no longer inherit from LabMetaData but from NWBContainer and be placed " - "optionally in /general."), + neurodata_type_def="Task", + neurodata_type_inc="LabMetaData", + doc=( + "A group to store task-related general metadata. TODO When merged with core, " + "this will no longer inherit from LabMetaData but from NWBContainer and be placed " + "optionally in /general." + ), name="task", groups=[ NWBGroupSpec( @@ -182,7 +193,7 @@ def main(): ] # export the spec to yaml files in the spec folder - output_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'spec')) + output_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "spec")) export_spec(ns_builder, new_data_types, output_dir) From e75803d3115c0461c33ccea9eb778f14db535a8d Mon Sep 17 00:00:00 2001 From: rly <rly@lbl.gov> Date: Thu, 26 Oct 2023 23:19:02 -0700 Subject: [PATCH 11/15] Use updated hdmf-docutils to fix build bug --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 7655a0a..8026bee 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,7 +4,7 @@ black==23.9.1 codespell==2.2.6 coverage==7.3.2 hdmf==3.10.0 -hdmf-docutils==0.4.5 +hdmf-docutils==0.4.6 pre-commit==3.4.0 pynwb==2.5.0 pytest==7.4.2 From 7badf10f5a8337ff181a42c614f115b1502625d9 Mon Sep 17 00:00:00 2001 From: Ryan Ly <rly@lbl.gov> Date: Mon, 30 Oct 2023 12:03:48 -0700 Subject: [PATCH 12/15] Update pyproject.toml --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index cfa56a2..6ef711f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,7 @@ keywords = [ ] dependencies = [ "pynwb>=2.5.0", - "hdmf>=3.10.0", + "hdmf>=3.11.0", ] # TODO: add URLs before release @@ -109,4 +109,4 @@ line-length = 120 "src/pynwb/tests/test_example_usage.py" = ["T201"] [tool.ruff.mccabe] -max-complexity = 17 \ No newline at end of file +max-complexity = 17 From 715e895339229d41042b860899763b5b30ea0c7f Mon Sep 17 00:00:00 2001 From: Ryan Ly <rly@lbl.gov> Date: Mon, 30 Oct 2023 12:04:04 -0700 Subject: [PATCH 13/15] Update requirements-dev.txt --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 8026bee..38eb48c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -3,7 +3,7 @@ black==23.9.1 codespell==2.2.6 coverage==7.3.2 -hdmf==3.10.0 +hdmf==3.11.0 hdmf-docutils==0.4.6 pre-commit==3.4.0 pynwb==2.5.0 From 38e594a02015f0a0d3b5c3e9ffedc7a554ad0629 Mon Sep 17 00:00:00 2001 From: Ryan Ly <rly@lbl.gov> Date: Mon, 30 Oct 2023 12:04:31 -0700 Subject: [PATCH 14/15] Update requirements-min.txt --- requirements-min.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/requirements-min.txt b/requirements-min.txt index 695410a..12925ce 100644 --- a/requirements-min.txt +++ b/requirements-min.txt @@ -1,5 +1,4 @@ # minimum versions of package dependencies for installation # these should match the minimum versions specified in pyproject.toml -# NOTE: it may be possible to relax these minimum requirements pynwb==2.5.0 -hdmf==3.10.0 +hdmf==3.11.0 # required for bug fixes for generating some classes From b0c87a0abfb673868b5def42f1433d91bbd98c0a Mon Sep 17 00:00:00 2001 From: rly <rly@lbl.gov> Date: Mon, 22 Jan 2024 08:58:30 -0800 Subject: [PATCH 15/15] Add notes and infra changes --- .codespellrc | 3 -- .pre-commit-config.yaml | 28 ++++++++++++++ README.md | 24 ++++++++++++ src/pynwb/tests/test_example_usage.py | 56 +++++++++++++++++++++++++-- src/spec/create_extension_spec.py | 17 +++++--- 5 files changed, 116 insertions(+), 12 deletions(-) delete mode 100644 .codespellrc create mode 100644 .pre-commit-config.yaml diff --git a/.codespellrc b/.codespellrc deleted file mode 100644 index 5aa4b5e..0000000 --- a/.codespellrc +++ /dev/null @@ -1,3 +0,0 @@ -[codespell] -skip = .git,*.pdf,*.svg -# ignore-words-list = diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..902b3b2 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,28 @@ +# NOTE: run `pre-commit autoupdate` to update hooks to latest version +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-added-large-files + - id: check-json + - id: check-toml + - id: name-tests-test + args: [--pytest-test-first] + - id: check-docstring-first +- repo: https://github.com/psf/black + rev: 23.12.0 + hooks: + - id: black +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.8 + hooks: + - id: ruff +- repo: https://github.com/codespell-project/codespell + rev: v2.2.6 + hooks: + - id: codespell + additional_dependencies: + - tomli diff --git a/README.md b/README.md index 52b22ea..410ca5b 100644 --- a/README.md +++ b/README.md @@ -37,6 +37,30 @@ Matlab: generateExtension('<directory path>/ndx-events/spec/ndx-events.namespace.yaml'); ``` +## Developer installation +In a Python 3.8-3.12 environment: +```bash +pip install -r requirements-dev.txt +pip install -e . +``` + +Run tests: +```bash +pytest +``` + +Install pre-commit hooks: +```bash +pre-commit install +``` + +Style and other checks: +```bash +black . +ruff . +codespell . +``` + ## Example usage Python: diff --git a/src/pynwb/tests/test_example_usage.py b/src/pynwb/tests/test_example_usage.py index ba32e10..1518a19 100644 --- a/src/pynwb/tests/test_example_usage.py +++ b/src/pynwb/tests/test_example_usage.py @@ -1,4 +1,4 @@ -def test_example_usage(): +def test_example_usage1(): from datetime import datetime from ndx_events import EventsTable, EventTypesTable, TtlsTable, TtlTypesTable, Task import numpy as np @@ -42,7 +42,7 @@ def test_example_usage(): ) learning_response_description = ( "During the learning phase, subjects are instructed to respond to the following " - "question: 'Is this an animal?' in each trial. Response are encoded as 'Yes, this " + "question: 'Is this an animal?' in each trial. Responses are encoded as 'Yes, this " "is an animal' (20) and 'No, this is not an animal' (21)." ) ttl_types_table.add_row( @@ -185,5 +185,55 @@ def test_example_usage(): print(read_nwbfile.acquisition["TtlsTable"].to_dataframe()) +def test_example_usage2(): + """Example storing lick times""" + from datetime import datetime + from ndx_events import EventsTable, EventTypesTable, Task + import numpy as np + from pynwb import NWBFile, NWBHDF5IO + + nwbfile = NWBFile( + session_description="session description", + identifier="cool_experiment_001", + session_start_time=datetime.now().astimezone(), + ) + + # NOTE that when adding an EventTypesTable to a Task, the EventTypesTable + # must be named "event_types" according to the spec + event_types_table = EventTypesTable(name="event_types", description="Metadata about event types") + event_types_table.add_row( + event_name="lick", + event_type_description="Times when the subject licked the port", + ) + + # create a random sorted array of 1000 lick timestamps (dtype=float) from 0 to 3600 seconds + lick_times = sorted(np.random.uniform(0, 3600, 1000)) + + events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) + for t in lick_times: + # event_type=0 corresponds to the first row in the event_types_table + events_table.add_row(timestamp=t, event_type=0) + events_table.timestamp.resolution = 1 / 30000.0 # licks were detected at 30 kHz + + task = Task() + task.event_types = event_types_table + nwbfile.add_lab_meta_data(task) + nwbfile.add_acquisition(events_table) + + # write nwb file + filename = "test.nwb" + with NWBHDF5IO(filename, "w") as io: + io.write(nwbfile) + + # read nwb file and check its contents + with NWBHDF5IO(filename, "r", load_namespaces=True) as io: + read_nwbfile = io.read() + print(read_nwbfile) + # access the events table and event types table and print them + print(read_nwbfile.get_lab_meta_data("task").event_types.to_dataframe()) + print(read_nwbfile.acquisition["EventsTable"].to_dataframe()) + + if __name__ == "__main__": - test_example_usage() + test_example_usage1() + test_example_usage2() diff --git a/src/spec/create_extension_spec.py b/src/spec/create_extension_spec.py index 75d47f9..32a5f60 100644 --- a/src/spec/create_extension_spec.py +++ b/src/spec/create_extension_spec.py @@ -17,7 +17,7 @@ def main(): timestamp_vector_data = NWBDatasetSpec( neurodata_type_def="TimestampVectorData", neurodata_type_inc="VectorData", - doc="A VectorData that stores timestamps in seconds.", + doc="A 1-dimensional VectorData that stores timestamps in seconds.", dtype="float64", dims=["num_times"], shape=[None], @@ -26,8 +26,11 @@ def main(): name="unit", dtype="text", doc="The unit of measurement for the timestamps, fixed to 'seconds'.", - value="seconds", + value="xseconds", ), + # NOTE: this requires all timestamps to have the same resolution which may not be true + # if they come from different acquisition systems or processing pipelines... + # maybe this should be a column of the event type table instead? NWBAttributeSpec( name="resolution", dtype="float64", @@ -43,7 +46,7 @@ def main(): duration_vector_data = NWBDatasetSpec( neurodata_type_def="DurationVectorData", neurodata_type_inc="VectorData", - doc="A VectorData that stores durations in seconds.", + doc="A 1-dimensional VectorData that stores durations in seconds.", dtype="float64", dims=["num_events"], shape=[None], @@ -54,6 +57,7 @@ def main(): doc="The unit of measurement for the durations, fixed to 'seconds'.", value="seconds", ), + # NOTE: this is usually the same as the timestamp resolution NWBAttributeSpec( name="resolution", dtype="float64", @@ -92,10 +96,11 @@ def main(): neurodata_type_inc="DynamicTable", doc=( "A column-based table to store information about events (event instances), one event per row. " - "Each event must have an event_type, which is a row in the EventTypesTable. Additional columns " - "may be added to store metadata about each event, such as the duration of the event, or a " - "text value of the event." + "Each event must have an event_type, which is a reference to a row in the EventTypesTable. " + "Additional columns may be added to store metadata about each event, such as the duration " + "of the event, or a text value of the event." ), + # NOTE: custom columns should apply to every event in the table which may not be the case default_name="EventsTable", datasets=[ NWBDatasetSpec(