diff --git a/.codespellrc b/.codespellrc deleted file mode 100644 index 5aa4b5e..0000000 --- a/.codespellrc +++ /dev/null @@ -1,3 +0,0 @@ -[codespell] -skip = .git,*.pdf,*.svg -# ignore-words-list = diff --git a/.gitignore b/.gitignore index 0e5ce43..fac0f30 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,8 @@ +# output NWB files +*.nwb + # generated docs -docs/_build docs/source/_format_auto_docs -docs/source/_static -!docs/source/_static/theme_overrides.css - -# copied spec files -src/pynwb/ndx_events/spec/*.yaml # Byte-compiled / optimized / DLL files __pycache__/ @@ -29,6 +26,7 @@ parts/ sdist/ var/ wheels/ +share/python-wheels/ *.egg-info/ .installed.cfg *.egg @@ -47,14 +45,18 @@ pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ +.nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover +*.py,cover .hypothesis/ .pytest_cache/ +cover/ +.ruff_cache/ # Translations *.mo @@ -64,6 +66,7 @@ coverage.xml *.log local_settings.py db.sqlite3 +db.sqlite3-journal # Flask stuff: instance/ @@ -76,16 +79,49 @@ instance/ docs/_build/ # PyBuilder +.pybuilder/ target/ # Jupyter Notebook .ipynb_checkpoints -# pyenv -.python-version +# IPython +profile_default/ +ipython_config.py -# celery beat schedule file +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff celerybeat-schedule +celerybeat.pid # SageMath parsed files *.sage.py @@ -111,6 +147,24 @@ venv.bak/ # mypy .mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ # Mac finder .DS_Store diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..902b3b2 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,28 @@ +# NOTE: run `pre-commit autoupdate` to update hooks to latest version +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-added-large-files + - id: check-json + - id: check-toml + - id: name-tests-test + args: [--pytest-test-first] + - id: check-docstring-first +- repo: https://github.com/psf/black + rev: 23.12.0 + hooks: + - id: black +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.8 + hooks: + - id: ruff +- repo: https://github.com/codespell-project/codespell + rev: v2.2.6 + hooks: + - id: codespell + additional_dependencies: + - tomli diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..448372a --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,5 @@ +# Changelog for ndx-events + +## 0.3.0 (Upcoming) + + diff --git a/LICENSE.txt b/LICENSE.txt index e69de29..8850436 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2023, Ryan Ly +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index fe511eb..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include LICENSE.txt README.md requirements.txt -include spec/*.yaml - -graft src/pynwb/tests -global-exclude __pycache__ *.py[co] diff --git a/README.md b/README.md index 56c1845..410ca5b 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,8 @@ This is an NWB extension for storing timestamped event data and TTL pulses. +The latest version is 0.3.0. This is a major change from previous versions. + Events can be: 1. **Simple events**. These are stored in the `Events` type. The `Events` type consists of only a name, a description, and a 1D array of timestamps. This should be used instead of a `TimeSeries` when the time series has no data. @@ -20,124 +22,47 @@ subtype of `DynamicTable`, where each row corresponds to a different event type. Unlike for the other event types, users can add their own custom columns to annotate each event type or event time. This can be useful for storing event metadata related to data preprocessing and analysis, such as marking bad events. -This extension was developed by Ryan Ly, Ben Dichter, Oliver Rübel, and Andrew Tritt. Information about the rationale, -background, and alternative approaches to this extension can be found here: +This extension was developed by Ryan Ly, Oliver Rübel, and the NWB Technical Advisory Board. +Information about the rationale, background, and alternative approaches to this extension can be found here: https://docs.google.com/document/d/1qcsjyFVX9oI_746RdMoDdmQPu940s0YtDjb1en1Xtdw ## Installation -Python: -``` -pip install ndx-events -``` -Matlab: +Python: +```bash +pip install -U ndx-events ``` + +Matlab: +```matlab generateExtension('/ndx-events/spec/ndx-events.namespace.yaml'); ``` -## Example usage -Python: -```python -from datetime import datetime - -from pynwb import NWBFile, NWBHDF5IO -from ndx_events import LabeledEvents, AnnotatedEventsTable - - -nwb = NWBFile( - session_description='session description', - identifier='cool_experiment_001', - session_start_time=datetime.now().astimezone() -) -# create a new LabeledEvents type to hold events recorded from the data acquisition system -events = LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 0.5, 0.6, 2., 2.05, 3., 3.5, 3.6, 4.], - resolution=1e-5, # resolution of the timestamps, i.e., smallest possible difference between timestamps - data=[0, 1, 2, 3, 5, 0, 1, 2, 4], - labels=['trial_start', 'cue_onset', 'cue_offset', 'response_left', 'response_right', 'reward'] -) - -# add the LabeledEvents type to the acquisition group of the NWB file -nwb.add_acquisition(events) - -# create a new AnnotatedEventsTable type to hold annotated events -# each row of the table represents a single event type -annotated_events = AnnotatedEventsTable( - name='AnnotatedEventsTable', - description='annotated events from my experiment', - resolution=1e-5 # resolution of the timestamps, i.e., smallest possible difference between timestamps -) -# add a custom indexed (ragged) column to represent whether each event time was a bad event -annotated_events.add_column( - name='bad_event', - description='whether each event time should be excluded', - index=True -) -# add an event type (row) to the AnnotatedEventsTable instance -annotated_events.add_event_type( - label='Reward', - event_description='Times when the subject received juice reward.', - event_times=[1., 2., 3.], - bad_event=[False, False, True], - id=3 -) -# convert the AnnotatedEventsTable to a pandas.DataFrame and print it -print(annotated_events.to_dataframe()) - -# create a processing module in the NWB file to hold processed events data -events_module = nwb.create_processing_module( - name='events', - description='processed event data' -) +## Developer installation +In a Python 3.8-3.12 environment: +```bash +pip install -r requirements-dev.txt +pip install -e . +``` -# add the AnnotatedEventsTable instance to the processing module -events_module.add(annotated_events) +Run tests: +```bash +pytest +``` -# write nwb file -filename = 'test.nwb' -with NWBHDF5IO(filename, 'w') as io: - io.write(nwb) +Install pre-commit hooks: +```bash +pre-commit install +``` -# read nwb file and check its contents -with NWBHDF5IO(filename, 'r', load_namespaces=True) as io: - nwb = io.read() - print(nwb) - # access the LabeledEvents container by name from the NWBFile acquisition group and print it - print(nwb.acquisition['LabeledEvents']) - # access the AnnotatedEventsTable by name from the 'events' processing module, convert it to - # a pandas.DataFrame, and print that - print(nwb.processing['events']['AnnotatedEventsTable'].to_dataframe()) +Style and other checks: +```bash +black . +ruff . +codespell . ``` -Matlab (see discussion [here](https://github.com/NeurodataWithoutBorders/helpdesk/discussions/27#discussioncomment-2612231)): -```matlab -bad_event_col = types.hdmf_common.VectorData( ... - 'description', 'whether each event time should be excluded', ... - 'data', [false, false, true, false, true] ... -); -bad_event_col_index = types.hdmf_common.VectorIndex( ... - 'description', 'bad_event column index', ... - 'target', types.untyped.ObjectView(bad_event_col), ... - 'data', [3; 5] ... -); -annotated_events = types.ndx_events.AnnotatedEventsTable( ... - 'description', 'annotated events from my experiment', ... - 'colnames', {'bad_event'}, ... - 'bad_event', bad_event_col, ... - 'bad_event_index', bad_event_col_index, ... - 'id', types.hdmf_common.ElementIdentifiers('data', [0; 1]) ... % 0-indexed, for compatibility with Python -); -% place the annotated events table in a "behavior" processing module in the NWB file -behavior_mod = types.core.ProcessingModule('description', 'processed behavioral data'); -behavior_mod.dynamictable.set('AnnotatedEvents', annotated_events); +## Example usage +Python: -nwb = NwbFile( ... - 'session_description', 'mouse in open exploration', ... - 'identifier', 'Mouse5_Day3', ... - 'session_start_time', datetime(2018, 4, 25, 2, 30, 3) ... -); -nwb.processing.set('behavior', behavior_mod); -``` This extension was created using [ndx-template](https://github.com/nwb-extensions/ndx-template). diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..6ef711f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,112 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "ndx-events" +version = "0.3.0" +authors = [ + { name="Ryan Ly", email="rly@lbl.gov" } +] +description = "NWB extension for storing timestamped event and TTL pulse data" +readme = "README.md" +requires-python = ">=3.8" +license = {text = "BSD-3"} +classifiers = [ + # TODO: add classifiers before release + "Programming Language :: Python", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", +] +keywords = [ + 'NeurodataWithoutBorders', + 'NWB', + 'nwb-extension', + 'ndx-extension', +] +dependencies = [ + "pynwb>=2.5.0", + "hdmf>=3.11.0", +] + +# TODO: add URLs before release +[project.urls] +"Homepage" = "https://github.com/rly/ndx-events" +# "Documentation" = "https://package.readthedocs.io/" +"Bug Tracker" = "https://github.com/rly/ndx-events/issues" +"Discussions" = "https://github.com/rly/ndx-events/discussions" +"Changelog" = "https://github.com/rly/ndx-events/CHANGELOG.md" + +[tool.hatch.build] +include = [ + "src/pynwb", + "spec/ndx-events.extensions.yaml", + "spec/ndx-events.namespace.yaml", +] +exclude = [ + "src/pynwb/tests", +] + +[tool.hatch.build.targets.wheel] +packages = [ + "src/pynwb/ndx_events", + "spec" +] + +[tool.hatch.build.targets.wheel.sources] +"spec" = "ndx_events/spec" + +[tool.hatch.build.targets.sdist] +include = [ + "src/pynwb", + "spec/ndx-events.extensions.yaml", + "spec/ndx-events.namespace.yaml", + "docs", +] +exclude = [] + +[tool.pytest.ini_options] +addopts = "--cov --cov-report html" + +[tool.codespell] +skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs,.tox,hdmf-common-schema,./docs/_build/*,*.ipynb" + +[tool.coverage.run] +branch = true +source = ["src/pynwb"] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "@abstract" +] + +[tool.black] +line-length = 120 +preview = true +exclude = ".git|.mypy_cache|.tox|.venv|venv|.ipynb_checkpoints|_build/|dist/|__pypackages__|.ipynb|docs/" + +[tool.ruff] +select = ["E", "F", "T100", "T201", "T203"] +exclude = [ + ".git", + ".tox", + "__pycache__", + "build/", + "dist/", +] +line-length = 120 + +[tool.ruff.per-file-ignores] +"src/spec/create_extension_spec.py" = ["T201"] +"src/pynwb/tests/test_example_usage.py" = ["T201"] + +[tool.ruff.mccabe] +max-complexity = 17 diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..38eb48c --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,15 @@ +# pinned dependencies to reproduce an entire development environment to +# run tests, check code style, and generate documentation +black==23.9.1 +codespell==2.2.6 +coverage==7.3.2 +hdmf==3.11.0 +hdmf-docutils==0.4.6 +pre-commit==3.4.0 +pynwb==2.5.0 +pytest==7.4.2 +pytest-cov==4.1.0 +python-dateutil==2.8.2 +pytest-subtests==0.6.0 +ruff==0.0.292 +tox==4.11.3 diff --git a/requirements-min.txt b/requirements-min.txt new file mode 100644 index 0000000..12925ce --- /dev/null +++ b/requirements-min.txt @@ -0,0 +1,4 @@ +# minimum versions of package dependencies for installation +# these should match the minimum versions specified in pyproject.toml +pynwb==2.5.0 +hdmf==3.11.0 # required for bug fixes for generating some classes diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index d68ad89..0000000 --- a/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -pynwb>=1.1.2 -hdmf_docutils -pytest \ No newline at end of file diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index b3e6c0b..0000000 --- a/setup.cfg +++ /dev/null @@ -1,20 +0,0 @@ -[wheel] -universal = 1 - -[flake8] -max-line-length = 120 -max-complexity = 17 -exclude = - .git, - .tox, - __pycache__, - build/, - dist/, - docs/source/conf.py - versioneer.py -per-file-ignores = - src/pynwb/tests/test_example_usage.py:T001 - - -[metadata] -description-file = README.md diff --git a/setup.py b/setup.py deleted file mode 100644 index 29e384d..0000000 --- a/setup.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- - -import os - -from setuptools import setup, find_packages -from shutil import copy2 - -# load README.md/README.rst file -try: - if os.path.exists('README.md'): - with open('README.md', 'r') as fp: - readme = fp.read() - readme_type = 'text/markdown; charset=UTF-8' - elif os.path.exists('README.rst'): - with open('README.rst', 'r') as fp: - readme = fp.read() - readme_type = 'text/x-rst; charset=UTF-8' - else: - readme = "" -except Exception: - readme = "" - -setup_args = { - 'name': 'ndx-events', - 'version': '0.2.0', - 'description': 'NWB extension for storing timestamped event and TTL pulse data', - 'long_description': readme, - 'long_description_content_type': readme_type, - 'author': 'Ryan Ly', - 'author_email': 'rly@lbl.gov', - 'url': 'https://github.com/rly/ndx-events', - 'license': 'BSD 3-Clause', - 'install_requires': [ - 'pynwb>=1.1.2' - ], - 'packages': find_packages('src/pynwb'), - 'package_dir': {'': 'src/pynwb'}, - 'package_data': {'ndx_events': [ - 'spec/ndx-events.namespace.yaml', - 'spec/ndx-events.extensions.yaml', - ]}, - 'classifiers': [ - "Intended Audience :: Developers", - "Intended Audience :: Science/Research", - ], - 'zip_safe': False -} - - -def _copy_spec_files(project_dir): - ns_path = os.path.join(project_dir, 'spec', 'ndx-events.namespace.yaml') - ext_path = os.path.join(project_dir, 'spec', 'ndx-events.extensions.yaml') - - dst_dir = os.path.join(project_dir, 'src', 'pynwb', 'ndx_events', 'spec') - if not os.path.exists(dst_dir): - os.mkdir(dst_dir) - - copy2(ns_path, dst_dir) - copy2(ext_path, dst_dir) - - -if __name__ == '__main__': - _copy_spec_files(os.path.dirname(__file__)) - setup(**setup_args) diff --git a/spec/ndx-events.extensions.yaml b/spec/ndx-events.extensions.yaml index d52ba98..d749773 100644 --- a/spec/ndx-events.extensions.yaml +++ b/spec/ndx-events.extensions.yaml @@ -1,96 +1,114 @@ -groups: -- neurodata_type_def: Events - neurodata_type_inc: NWBDataInterface - doc: A list of timestamps, stored in seconds, of an event. +datasets: +- neurodata_type_def: TimestampVectorData + neurodata_type_inc: VectorData + dtype: float64 + dims: + - num_times + shape: + - null + doc: A VectorData that stores timestamps in seconds. + attributes: + - name: unit + dtype: text + value: seconds + doc: The unit of measurement for the timestamps, fixed to 'seconds'. + - name: resolution + dtype: float64 + doc: The smallest possible difference between two timestamps. Usually 1 divided + by the sampling rate for timestamps of the data acquisition system. + required: false +- neurodata_type_def: DurationVectorData + neurodata_type_inc: VectorData + dtype: float64 + dims: + - num_events + shape: + - null + doc: A VectorData that stores durations in seconds. attributes: - - name: description + - name: unit + dtype: text + value: seconds + doc: The unit of measurement for the durations, fixed to 'seconds'. + - name: resolution + dtype: float64 + doc: The smallest possible difference between two timestamps. Usually 1 divided + by the sampling rate for timestamps of the data acquisition system. + required: false +groups: +- neurodata_type_def: EventTypesTable + neurodata_type_inc: DynamicTable + default_name: EventTypesTable + doc: A column-based table to store information about each event type, such as name, + one event type per row. + datasets: + - name: event_name + neurodata_type_inc: VectorData + dtype: text + doc: Name of each event type. + - name: event_type_description + neurodata_type_inc: VectorData dtype: text - doc: Description of the event. + doc: Description of each event type. +- neurodata_type_def: EventsTable + neurodata_type_inc: DynamicTable + default_name: EventsTable + doc: A column-based table to store information about events (event instances), one + event per row. Each event must have an event_type, which is a row in the EventTypesTable. + Additional columns may be added to store metadata about each event, such as the + duration of the event, or a text value of the event. datasets: - - name: timestamps - dtype: float32 + - name: timestamp + neurodata_type_inc: TimestampVectorData + doc: The time that each event occurred, in seconds, from the session start time. + - name: event_type + neurodata_type_inc: DynamicTableRegion dims: - num_events shape: - null - doc: Event timestamps, in seconds, relative to the common experiment master-clock - stored in NWBFile.timestamps_reference_time. - attributes: - - name: unit - dtype: text - value: seconds - doc: Unit of measurement for timestamps, which is fixed to 'seconds'. - - name: resolution - dtype: float32 - doc: The smallest possible difference between two event times. Usually 1 divided - by the event time sampling rate on the data acquisition system. - required: false -- neurodata_type_def: LabeledEvents - neurodata_type_inc: Events - doc: A list of timestamps, stored in seconds, of an event that can have different - labels. For example, this type could represent the times that reward was given, - as well as which of three different types of reward was given. In this case, the - 'data' dataset would contain values {0, 1, 2}, its 'labels' attribute would contain - three text elements, where the first (index 0) specifies the name of the reward - associated with data = 0, the second (index 1) specifies the name of the reward - associated with data = 1, etc. The labels do not have to start at 0 and do not - need to be continuous, e.g. the 'data' dataset could contain values {0, 10, 100}, - and the 'labels' attribute could contain 101 values, where labels[0] is 'No reward', - labels[10] is '10% reward', labels[100] is 'Full reward', and all other entries - in 'labels' are the empty string. + doc: The type of event that occurred. This is represented as a reference to a + row of the EventTypesTable. + quantity: '?' + - name: duration + neurodata_type_inc: DurationVectorData + doc: Optional column containing the duration of each event, in seconds. + quantity: '?' +- neurodata_type_def: TtlTypesTable + neurodata_type_inc: EventTypesTable + default_name: TtlTypesTable + doc: A column-based table to store information about each TTL type, such as name + and pulse value, one TTL type per row. datasets: - - name: data + - name: pulse_value + neurodata_type_inc: VectorData dtype: uint8 + doc: TTL pulse value for each event type. +- neurodata_type_def: TtlsTable + neurodata_type_inc: EventsTable + default_name: TtlsTable + doc: Data type to hold timestamps of TTL pulses. + datasets: + - name: ttl_type + neurodata_type_inc: DynamicTableRegion dims: - num_events shape: - null - doc: Unsigned integer labels that map onto strings using the mapping in the 'labels' - array attribute. This dataset should have the same number of elements as the - 'timestamps' dataset. - attributes: - - name: labels - dtype: text - dims: - - num_labels - shape: - - null - doc: Mapping from an unsigned integer (the zero-based index) to a string, used - to understand the values in the 'data' dataset. Use an empty string to represent - a label value that is not mapped to any text. -- neurodata_type_def: TTLs - neurodata_type_inc: LabeledEvents - doc: Data type to hold timestamps of TTL pulses. The 'data' dataset contains the - integer pulse values (or channel IDs), and the 'labels' dataset contains user-defined - labels associated with each pulse value (or channel ID). The value at index i - of the 'labels' dataset corresponds to a pulse value (or channel ID) of i in the - 'data' dataset. For example, the first value (index 0) of the 'labels' dataset - corresponds to a pulse value of 0. See the LabeledEvents type for more details. -- neurodata_type_def: AnnotatedEventsTable - neurodata_type_inc: DynamicTable - doc: Table to hold event timestamps and event metadata relevant to data preprocessing - and analysis. Each row corresponds to a different event type. Use the 'event_times' - dataset to store timestamps for each event type. Add user-defined columns to add - metadata for each event type or event time. - datasets: - - name: event_times_index - neurodata_type_inc: VectorIndex - doc: Index into the event_times dataset. - - name: event_times - neurodata_type_inc: VectorData - dtype: float32 - doc: Event times for each event type. - attributes: - - name: resolution - dtype: float32 - doc: The smallest possible difference between two event times. Usually 1 divided - by the event time sampling rate on the data acquisition system. - required: false - - name: label - neurodata_type_inc: VectorData - dtype: text - doc: Label for each event type. - - name: event_description - neurodata_type_inc: VectorData - dtype: text - doc: Description for each event type. + doc: The type of TTL that occurred. This is represented as a reference to a row + of the TtlTypesTable. +- neurodata_type_def: Task + neurodata_type_inc: LabMetaData + name: task + doc: A group to store task-related general metadata. TODO When merged with core, + this will no longer inherit from LabMetaData but from NWBContainer and be placed + optionally in /general. + groups: + - name: event_types + neurodata_type_inc: EventTypesTable + doc: Table to store information about each task event type. + quantity: '?' + - name: ttl_types + neurodata_type_inc: TtlTypesTable + doc: Table to store information about each task TTL type. + quantity: '?' diff --git a/spec/ndx-events.namespace.yaml b/spec/ndx-events.namespace.yaml index 157abc0..ee73a13 100644 --- a/spec/ndx-events.namespace.yaml +++ b/spec/ndx-events.namespace.yaml @@ -7,10 +7,5 @@ namespaces: name: ndx-events schema: - namespace: core - neurodata_types: - - NWBDataInterface - - DynamicTable - - VectorData - - VectorIndex - source: ndx-events.extensions.yaml - version: 0.2.0 + version: 0.3.0 diff --git a/src/pynwb/ndx_events/__init__.py b/src/pynwb/ndx_events/__init__.py index 689c59c..1a58541 100644 --- a/src/pynwb/ndx_events/__init__.py +++ b/src/pynwb/ndx_events/__init__.py @@ -1,25 +1,34 @@ import os -from pynwb import load_namespaces +from pynwb import load_namespaces, get_class -# Set path of the namespace.yaml file to the expected install location -ndx_events_specpath = os.path.join( - os.path.dirname(__file__), - 'spec', - 'ndx-events.namespace.yaml' -) +try: + from importlib.resources import files +except ImportError: + # TODO: Remove when python 3.9 becomes the new minimum + from importlib_resources import files -# If the extension has not been installed yet but we are running directly from -# the git repo -if not os.path.exists(ndx_events_specpath): - ndx_events_specpath = os.path.abspath(os.path.join( - os.path.dirname(__file__), - '..', '..', '..', - 'spec', - 'ndx-events.namespace.yaml' - )) +# Get path to the namespace.yaml file with the expected location when installed not in editable mode +__location_of_this_file = files(__name__) +__spec_path = __location_of_this_file / "spec" / "ndx-events.namespace.yaml" + +# If that path does not exist, we are likely running in editable mode. Use the local path instead +if not os.path.exists(__spec_path): + __spec_path = __location_of_this_file.parent.parent.parent / "spec" / "ndx-events.namespace.yaml" # Load the namespace -load_namespaces(ndx_events_specpath) +load_namespaces(str(__spec_path)) + +# TODO: Define your classes here to make them accessible at the package level. +# Either have PyNWB generate a class from the spec using `get_class` as shown +# below or write a custom class and register it using the class decorator +# `@register_class("TetrodeSeries", "ndx-hed")` +Task = get_class("Task", "ndx-events") +TimestampVectorData = get_class("TimestampVectorData", "ndx-events") +DurationVectorData = get_class("DurationVectorData", "ndx-events") +EventTypesTable = get_class("EventTypesTable", "ndx-events") +EventsTable = get_class("EventsTable", "ndx-events") +TtlTypesTable = get_class("TtlTypesTable", "ndx-events") +TtlsTable = get_class("TtlsTable", "ndx-events") -from . import io as __io # noqa: E402,F401 -from .events import Events, LabeledEvents, TTLs, AnnotatedEventsTable # noqa: E402,F401 +# Remove these functions from the package +del load_namespaces, get_class diff --git a/src/pynwb/ndx_events/events.py b/src/pynwb/ndx_events/events.py deleted file mode 100644 index 50c0985..0000000 --- a/src/pynwb/ndx_events/events.py +++ /dev/null @@ -1,165 +0,0 @@ -import numpy as np - -from pynwb import register_class -from pynwb.core import NWBDataInterface, DynamicTable -from hdmf.utils import docval, getargs, popargs, get_docval - - -@register_class('Events', 'ndx-events') -class Events(NWBDataInterface): - """ - A list of timestamps, stored in seconds, of an event. - """ - - __nwbfields__ = ('description', - 'timestamps', - 'resolution', - {'name': 'unit', 'settable': False}) - - @docval({'name': 'name', 'type': str, 'doc': 'The name of this Events object'}, # required - {'name': 'description', 'type': str, 'doc': 'The name of this Events object'}, # required - {'name': 'timestamps', 'type': ('array_data', 'data'), # required - 'doc': ('Event timestamps, in seconds, relative to the common experiment master-clock ' - 'stored in NWBFile.timestamps_reference_time.'), - 'shape': (None,)}, - {'name': 'resolution', 'type': float, - 'doc': ('The smallest possible difference between two event times. Usually 1 divided ' - 'by the event time sampling rate on the data acquisition system.'), - 'default': None}) - def __init__(self, **kwargs): - description, timestamps, resolution = popargs('description', 'timestamps', 'resolution', kwargs) - super().__init__(**kwargs) - self.description = description - self.timestamps = timestamps - self.resolution = resolution - self.fields['unit'] = 'seconds' - - -@register_class('LabeledEvents', 'ndx-events') -class LabeledEvents(Events): - """ - A list of timestamps, stored in seconds, of an event that can have different - labels. For example, this type could represent the times that reward was given, - as well as which of three different types of reward was given. In this case, the - 'data' dataset would contain values {0, 1, 2}, and the 'labels' dataset - would contain three text elements, where the first (index 0) specifies the name - of the reward associated with data = 0, the second (index 1) specifies - the name of the reward associated with data = 1, etc. The labels do not - have to start at 0 and do not need to be sequential, e.g. the 'data' dataset - could contain values {0, 10, 100}, and the 'labels' dataset could contain 101 - values, where labels[0] is 'No reward', labels[10] is '10% reward', labels[100] - is 'Full reward', and all other entries in 'labels' are the empty string. - """ - - __nwbfields__ = ('data', - 'labels') - - @docval(*get_docval(Events.__init__, 'name', 'description', 'timestamps'), # required - {'name': 'data', 'type': ('array_data', 'data'), # required - 'doc': ("Unsigned integer labels that map onto strings using the mapping in the 'labels' dataset. " - "Values must be 0 or greater and need not be sequential. If a list/tuple/array of integer values " - "is passed, it will be converted to a numpy array of unsigned integer values. This dataset should " - "have the same number of elements as the 'timestamps' dataset."), - 'shape': (None,)}, - {'name': 'labels', 'type': ('array_data', 'data'), - 'doc': ("Mapping from an integer (the zero-based index) to a string, used to understand " - "the integer values in the 'data' dataset. Use an empty string to represent " - "a label value that is not mapped to any text. Use '' to represent any values " - "that are None or empty. If the argument is not specified, the label " - "will be set to the string representation of the data value and '' for other values."), - 'shape': (None,), 'default': None}, - *get_docval(Events.__init__, 'resolution')) - def __init__(self, **kwargs): - timestamps = getargs('timestamps', kwargs) - data, labels = popargs('data', 'labels', kwargs) - super().__init__(**kwargs) - if len(timestamps) != len(data): - raise ValueError('Timestamps and data must have the same length: %d != %d' - % (len(timestamps), len(data))) - data = self.__check_label_indices_uint(data) - self.data = data - if labels is None: - unique_indices = np.unique(data) - self.labels = [''] * int(max(unique_indices) + 1) - for k in unique_indices: - self.labels[k] = str(k) - else: - if None in labels: - raise ValueError("None values are not allowed in the labels array. Please use '' for undefined labels.") - self.labels = labels - - def __check_label_indices_uint(self, data): - """Convert a list/tuple of integer label indices to a numpy array of unsigned integers. Raise error if negative - or non-numeric values are found. If something other than a list/tuple/np.ndarray of ints or unsigned ints - is provided, return the original array. - """ - new_data = data - if isinstance(new_data, (list, tuple)): - new_data = np.array(new_data) - if isinstance(new_data, np.ndarray): - if not np.issubdtype(new_data.dtype, np.number): - raise ValueError("'data' must be an array of numeric values that have type unsigned int or " - "can be converted to unsigned int, not type %s" % new_data.dtype) - if np.issubdtype(new_data.dtype, np.unsignedinteger): - return new_data - if (new_data < 0).any(): - raise ValueError("Negative values are not allowed in 'data'.") - if np.issubdtype(new_data.dtype, np.integer): - return new_data.astype(np.uint) - # all other array dtypes will not be handled. the objectmapper will attempt to convert the data - return data - - -@register_class('TTLs', 'ndx-events') -class TTLs(LabeledEvents): - """ - Data type to hold timestamps of TTL pulses. The 'data' dataset contains the integer pulse values - (or channel IDs), and the 'labels' dataset contains user-defined labels associated with each pulse - value (or channel ID). The value at index i of the 'labels' dataset corresponds to a pulse value (or - channel ID) of i in the 'data' dataset. For example, the first value (index 0) of the 'labels' dataset - corresponds to a pulse value of 0. See the LabeledEvents type for more details. - """ - pass - - -@register_class('AnnotatedEventsTable', 'ndx-events') -class AnnotatedEventsTable(DynamicTable): - """ - Table to hold event timestamps and event metadata relevant to data preprocessing - and analysis. Each row corresponds to a different event type. Use the 'event_time' - dataset to store timestamps for each event type. Add user-defined columns to add - metadata for each event type or event time. - """ - - __fields__ = ( - 'resolution', - ) - - __columns__ = ( - {'name': 'event_times', 'description': 'Event times for each event type.', 'index': True}, - {'name': 'label', 'description': 'Label for each event type.'}, - {'name': 'event_description', 'description': 'Description for each event type.'} - # note that the name 'description' cannot be used because it is already an attribute on VectorData - ) - - @docval({'name': 'description', 'type': str, 'doc': 'Description of what is in this table'}, - {'name': 'name', 'type': str, 'doc': 'Name of this AnnotatedEventsTable', - 'default': 'AnnotatedEventsTable'}, - {'name': 'resolution', 'type': float, - 'doc': ('The smallest possible difference between two event times. Usually 1 divided ' - 'by the event time sampling rate on the data acquisition system.'), - 'default': None}, - *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames')) - def __init__(self, **kwargs): - resolution = popargs('resolution', kwargs) - super().__init__(**kwargs) - self.resolution = resolution - - @docval({'name': 'label', 'type': str, 'doc': 'Label for each event type.'}, - {'name': 'event_description', 'type': str, 'doc': 'Description for each event type.'}, - {'name': 'event_times', 'type': 'array_data', 'doc': 'Event times for each event type.', 'shape': (None,)}, - {'name': 'id', 'type': int, 'doc': 'ID for each unit', 'default': None}, - allow_extra=True) - def add_event_type(self, **kwargs): - """Add an event type as a row to this table.""" - super().add_row(**kwargs) diff --git a/src/pynwb/ndx_events/io/__init__.py b/src/pynwb/ndx_events/io/__init__.py deleted file mode 100644 index 24af8ee..0000000 --- a/src/pynwb/ndx_events/io/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from . import events as __events # noqa: E402,F401 diff --git a/src/pynwb/ndx_events/io/events.py b/src/pynwb/ndx_events/io/events.py deleted file mode 100644 index 59f7906..0000000 --- a/src/pynwb/ndx_events/io/events.py +++ /dev/null @@ -1,71 +0,0 @@ -from pynwb import register_map -from pynwb.io.core import NWBContainerMapper -from hdmf.common.io.table import DynamicTableMap -from hdmf.build import ObjectMapper, BuildManager -from hdmf.common import VectorData -from hdmf.utils import getargs, docval -from hdmf.spec import AttributeSpec - -from ..events import Events, LabeledEvents, AnnotatedEventsTable - - -@register_map(Events) -class EventsMap(NWBContainerMapper): - - def __init__(self, spec): - super().__init__(spec) - # map object attribute Events.unit -> spec Events/timestamps.unit - # map object attribute Events.resolution -> spec Events/timestamps.resolution - timestamps_spec = self.spec.get_dataset('timestamps') - self.map_spec('unit', timestamps_spec.get_attribute('unit')) - self.map_spec('resolution', timestamps_spec.get_attribute('resolution')) - - -@register_map(LabeledEvents) -class LabeledEventsMap(EventsMap): - - def __init__(self, spec): - super().__init__(spec) - # map object attribute LabeledEvents.labels -> spec LabeledEvents/data.labels - data_spec = self.spec.get_dataset('data') - self.map_spec('labels', data_spec.get_attribute('labels')) - - -@register_map(AnnotatedEventsTable) -class AnnotatedEventsTableMap(DynamicTableMap): - - def __init__(self, spec): - super().__init__(spec) - # map object attribute AnnotatedEventsTable.resolution -> spec AnnotatedEventsTable/event_times.resolution - event_times_spec = self.spec.get_dataset('event_times') - self.map_spec('resolution', event_times_spec.get_attribute('resolution')) - - @DynamicTableMap.constructor_arg('resolution') - def resolution_carg(self, builder, manager): - # on construct, map builder for AnnotatedEventsTable.datasets['event_times'].attributes['resolution'] - # -> AnnotatedEventsTable.__init__ argument 'resolution' - if 'event_times' in builder: - return builder['event_times'].attributes.get('resolution') - return None - - -@register_map(VectorData) -class VectorDataMap(ObjectMapper): - - # TODO when merging into NWB core, fold this into pynwb.io.core.VectorDataMap - - @docval({"name": "spec", "type": AttributeSpec, "doc": "the spec to get the attribute value for"}, - {"name": "container", "type": VectorData, "doc": "the container to get the attribute value from"}, - {"name": "manager", "type": BuildManager, "doc": "the BuildManager used for managing this build"}, - returns='the value of the attribute') - def get_attr_value(self, **kwargs): - ''' Get the value of the attribute corresponding to this spec from the given container ''' - spec, container, manager = getargs('spec', 'container', 'manager', kwargs) - - # on build of VectorData objects, map object attribute AnnotatedEventsTable.resolution - # -> spec AnnotatedEventsTable/event_times.resolution - if isinstance(container.parent, AnnotatedEventsTable): - if container.name == 'event_times': - if spec.name == 'resolution': - return container.parent.resolution - return super().get_attr_value(**kwargs) diff --git a/src/pynwb/tests/integration/__init__.py b/src/pynwb/tests/integration/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/pynwb/tests/integration/hdf5/__init__.py b/src/pynwb/tests/integration/hdf5/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/pynwb/tests/integration/hdf5/test_events.py b/src/pynwb/tests/integration/hdf5/test_events.py deleted file mode 100644 index 12ca9d6..0000000 --- a/src/pynwb/tests/integration/hdf5/test_events.py +++ /dev/null @@ -1,169 +0,0 @@ -import datetime -import numpy as np -from pynwb import NWBFile, NWBHDF5IO -from pynwb.testing import AcquisitionH5IOMixin, TestCase, remove_test_file - -from ndx_events import Events, LabeledEvents, TTLs, AnnotatedEventsTable - - -class TestEventsIOSimple(TestCase): - """Simple roundtrip test for CSD.""" - - def setUp(self): - self.nwbfile = NWBFile( - session_description='session_description', - identifier='identifier', - session_start_time=datetime.datetime.now(datetime.timezone.utc) - ) - self.path = 'test.nwb' - - def tearDown(self): - remove_test_file(self.path) - - def test_roundtrip(self): - """ - Add a CSD to an "ecephys" processing module in the NWBFile, write it to file, read the file, and test that the - CSD from the file matches the original CSD. - """ - - events = Events( - name='Events', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5 - ) - self.nwbfile.add_acquisition(events) - - labeled_events = LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - labels=['', '', '', 'event1', 'event2'] - ) - self.nwbfile.add_acquisition(labeled_events) - - ttls = TTLs( - name='TTLs', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - labels=['', '', '', 'event1', 'event2'] - ) - self.nwbfile.add_acquisition(ttls) - - annotated_events = AnnotatedEventsTable( - name='AnnotatedEventsTable', - description='annotated events from my experiment', - resolution=1e-5 - ) - annotated_events.add_column( - name='extra', - description='extra metadata for each event type' - ) - annotated_events.add_event_type( - label='Reward', - event_description='Times when the animal received juice reward.', - event_times=[1., 2., 3.], - extra='extra', - id=3 - ) - annotated_events.add_event_type( - label='Nosepoke', - event_description='Times when the animal poked its noise through the input port.', - event_times=[1., 2., 3.], - extra='extra', - id=5 - ) - - events_module = self.nwbfile.create_processing_module( - name='events', - description='processed events data' - ) - events_module.add(annotated_events) - - with NWBHDF5IO(self.path, mode='w') as io: - io.write(self.nwbfile) - - with NWBHDF5IO(self.path, mode='r', load_namespaces=True) as io: - read_nwbfile = io.read() - self.assertContainerEqual(events, read_nwbfile.acquisition['Events']) - self.assertContainerEqual(labeled_events, read_nwbfile.acquisition['LabeledEvents']) - self.assertContainerEqual(ttls, read_nwbfile.acquisition['TTLs']) - self.assertContainerEqual(annotated_events, read_nwbfile.processing['events']['AnnotatedEventsTable']) - - -class TestEventsIO(AcquisitionH5IOMixin, TestCase): - - def setUpContainer(self): - """ Return the test Events to read/write """ - events = Events( - name='Events', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5 - ) - return events - - -class TestLabeledEventsIO(AcquisitionH5IOMixin, TestCase): - - def setUpContainer(self): - """ Return the test Events to read/write """ - labeled_events = LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - labels=['', '', '', 'event1', 'event2'] - ) - return labeled_events - - -class TestTTLs(AcquisitionH5IOMixin, TestCase): - - def setUpContainer(self): - """ Return the test Events to read/write """ - ttls = TTLs( - name='TTLs', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - labels=['', '', '', 'event1', 'event2'] - ) - return ttls - - -class TestAnnotatedEventsTableIO(AcquisitionH5IOMixin, TestCase): - """ Test adding AnnotatedEventsTable into acquisition and accessing AnnotatedEvents after read """ - - def setUpContainer(self): - """ Return the test AnnotatedEventsTable to read/write """ - annotated_events = AnnotatedEventsTable( - name='AnnotatedEventsTable', - description='annotated events from my experiment', - resolution=1e-5 - ) - annotated_events.add_column( - name='extra', - description='extra metadata for each event type' - ) - annotated_events.add_event_type( - label='Reward', - event_description='Times when the animal received juice reward.', - event_times=[1., 2., 3.], - extra='extra', - id=3 - ) - annotated_events.add_event_type( - label='Nosepoke', - event_description='Times when the animal poked its noise through the input port.', - event_times=[1., 2., 3.], - extra='extra', - id=5 - ) - return annotated_events diff --git a/src/pynwb/tests/test_events.py b/src/pynwb/tests/test_events.py new file mode 100644 index 0000000..0709041 --- /dev/null +++ b/src/pynwb/tests/test_events.py @@ -0,0 +1,587 @@ +from hdmf.common import DynamicTable +import numpy as np +from pynwb import NWBHDF5IO +from pynwb.testing import TestCase, remove_test_file +from pynwb.testing.mock.file import mock_NWBFile + +from ndx_events import ( + EventsTable, + EventTypesTable, + TtlsTable, + TtlTypesTable, + Task, + DurationVectorData, + TimestampVectorData, +) + + +class TestTimestampVectorData(TestCase): + def test_init(self): + data = TimestampVectorData(name="test", description="description") + assert data.name == "test" + assert data.description == "description" + assert data.unit == "seconds" + assert data.resolution is None + + def test_add_to_dynamic_table(self): + col = TimestampVectorData(name="test", description="description") + table = DynamicTable(name="table", description="test", columns=[col]) + table.add_row(test=0.1) + assert table.test is col + assert table.test[0] == 0.1 + + def test_set_resolution_init(self): + data = TimestampVectorData(name="test", description="description", resolution=1 / 32000.0) + assert data.resolution == 1 / 32000.0 + + def test_set_resolution_attr(self): + data = TimestampVectorData(name="test", description="description") + data.resolution = 1 / 32000.0 + assert data.resolution == 1 / 32000.0 + + +class TestTimestampVectorDataSimpleRoundtrip(TestCase): + """Simple roundtrip test for TimestampVectorData.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Create a TimestampVectorData, write it to file, read the file, and test that the read object matches the + original. + """ + col = TimestampVectorData(name="test", description="description") + table = DynamicTable(name="table", description="description", columns=[col]) + table.add_row(test=0.1) + + nwbfile = mock_NWBFile() + nwbfile.add_acquisition(table) + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + read_col = read_nwbfile.acquisition["table"]["test"] + assert isinstance(read_col, TimestampVectorData) + assert read_col.name == "test" + assert read_col.description == "description" + assert read_col.unit == "seconds" + assert read_col[0] == 0.1 + + +class TestDurationVectorData(TestCase): + def test_init(self): + data = DurationVectorData(name="test", description="description") + assert data.name == "test" + assert data.description == "description" + assert data.unit == "seconds" + + def test_add_to_dynamic_table(self): + col = DurationVectorData(name="test", description="description") + table = DynamicTable(name="table", description="test", columns=[col]) + table.add_row(test=0.1) + assert table.test is col + assert table.test[0] == 0.1 + + +class TestDurationVectorDataSimpleRoundtrip(TestCase): + """Simple roundtrip test for DurationVectorData.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Create a DurationVectorData, write it to file, read the file, and test that the read object matches the + original. + """ + col = DurationVectorData(name="test", description="description") + table = DynamicTable(name="table", description="description", columns=[col]) + table.add_row(test=0.1) + + nwbfile = mock_NWBFile() + nwbfile.add_acquisition(table) + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + read_col = read_nwbfile.acquisition["table"]["test"] + assert isinstance(read_col, DurationVectorData) + assert read_col.name == "test" + assert read_col.description == "description" + assert read_col.unit == "seconds" + assert read_col[0] == 0.1 + + +class TestTask(TestCase): + def test_init(self): + task = Task() + assert task.name == "task" + + def test_add_to_nwbfile(self): + nwbfile = mock_NWBFile() + task = Task() + nwbfile.add_lab_meta_data(task) + assert nwbfile.get_lab_meta_data("task") is task + assert nwbfile.lab_meta_data["task"] is task + + +class TestTaskSimpleRoundtrip(TestCase): + """Simple roundtrip test for Task.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Create a Task, write it to file, read the file, and test that the read object matches the original. + """ + task = Task() + nwbfile = mock_NWBFile() + nwbfile.add_lab_meta_data(task) + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + assert isinstance(read_nwbfile.get_lab_meta_data("task"), Task) + assert read_nwbfile.get_lab_meta_data("task").name == "task" + assert read_nwbfile.lab_meta_data["task"].name == "task" + + +class TestEventTypesTable(TestCase): + def test_init(self): + event_types_table = EventTypesTable(description="Metadata about event types") + assert event_types_table.name == "EventTypesTable" + assert event_types_table.description == "Metadata about event types" + + def test_init_name(self): + event_types_table = EventTypesTable(name="event_types", description="Metadata about event types") + assert event_types_table.name == "event_types" + assert event_types_table.description == "Metadata about event types" + + def test_add_row(self): + event_types_table = EventTypesTable(description="Metadata about event types") + event_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + ) + event_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + ) + assert event_types_table["event_name"].data == ["cue on", "stimulus on"] + assert event_types_table["event_type_description"].data == [ + "Times when the cue was on screen.", + "Times when the stimulus was on screen.", + ] + + +class TestEventTypesTableSimpleRoundtrip(TestCase): + """Simple roundtrip test for EventTypesTable.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Create an EventTypesTable, write it to file, read the file, and test that the read table matches the original. + """ + # NOTE that when adding an EventTypesTable to a Task, the EventTypesTable + # must be named "event_types" according to the spec + event_types_table = EventTypesTable(name="event_types", description="Metadata about event types") + event_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + ) + event_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + ) + task = Task() + task.event_types = event_types_table + nwbfile = mock_NWBFile() + nwbfile.add_lab_meta_data(task) + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + read_event_types_table = read_nwbfile.get_lab_meta_data("task").event_types + assert isinstance(read_event_types_table, EventTypesTable) + assert read_event_types_table.name == "event_types" + assert read_event_types_table.description == "Metadata about event types" + assert all(read_event_types_table["event_name"].data[:] == ["cue on", "stimulus on"]) + assert all( + read_event_types_table["event_type_description"].data[:] + == [ + "Times when the cue was on screen.", + "Times when the stimulus was on screen.", + ] + ) + + +class TestEventsTable(TestCase): + def test_init(self): + events_table = EventsTable(description="Metadata about events") + assert events_table.name == "EventsTable" + assert events_table.description == "Metadata about events" + + def test_init_dtr(self): + event_types_table = EventTypesTable(description="Metadata about event types") + event_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + ) + event_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + ) + + events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) + assert events_table["event_type"].table is event_types_table + + def test_add_row(self): + event_types_table = EventTypesTable(description="Metadata about event types") + event_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + # hed_tags=["Sensory-event", "(Intended-effect, Cue)"], + ) + event_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + # hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], + ) + + events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) + events_table.add_column(name="cue_type", description="The cue type.") + events_table.add_column(name="stimulus_type", description="The stimulus type.") + events_table.add_row( + timestamp=0.1, + cue_type="white circle", + stimulus_type="", + event_type=0, + duration=0.1, + # hed_tags=["(White, Circle)"], + ) + events_table.add_row( + timestamp=0.3, + cue_type="", + stimulus_type="animal", + event_type=1, + duration=0.15, + ) + events_table.add_row( + timestamp=1.1, + cue_type="green square", + stimulus_type="", + event_type=0, + duration=0.1, + # hed_tags=["(Green, Square)"], + ) + events_table.add_row( + timestamp=1.3, + cue_type="", + stimulus_type="landscape", + event_type=1, + duration=0.15, + ) + assert events_table["timestamp"].data == [0.1, 0.3, 1.1, 1.3] + assert events_table["cue_type"].data == ["white circle", "", "green square", ""] + assert events_table["stimulus_type"].data == ["", "animal", "", "landscape"] + assert events_table["duration"].data == [0.1, 0.15, 0.1, 0.15] + assert events_table["event_type"].data == [0, 1, 0, 1] + # assert events_table["hed_tags"][0] == ["(White, Circle)"] + # assert events_table["hed_tags"][2] == ["(Green, Square)"] + + +class TestEventsTableSimpleRoundtrip(TestCase): + """Simple roundtrip test for EventsTable.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Create an EventsTable, write it to file, read the file, and test that the read table matches the original. + """ + # NOTE that when adding an EventTypesTable to a Task, the EventTypesTable + # must be named "event_types" according to the spec + event_types_table = EventTypesTable(name="event_types", description="Metadata about event types") + event_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + # hed_tags=["Sensory-event", "(Intended-effect, Cue)"], + ) + event_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + # hed_tags=["Sensory-event", "Experimental-stimulus", "Visual-presentation", "Image", "Face"], + ) + + events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) + events_table.add_column(name="cue_type", description="The cue type.") + events_table.add_column(name="stimulus_type", description="The stimulus type.") + events_table.add_row( + timestamp=0.1, + cue_type="white circle", + stimulus_type="", + event_type=0, + duration=0.1, + # hed_tags=["(White, Circle)"], + ) + events_table.add_row( + timestamp=0.3, + cue_type="", + stimulus_type="animal", + event_type=1, + duration=0.15, + ) + events_table.add_row( + timestamp=1.1, + cue_type="green square", + stimulus_type="", + event_type=0, + duration=0.1, + # hed_tags=["(Green, Square)"], + ) + events_table.add_row( + timestamp=1.3, + cue_type="", + stimulus_type="landscape", + event_type=1, + duration=0.15, + ) + + task = Task() + task.event_types = event_types_table + nwbfile = mock_NWBFile() + nwbfile.add_lab_meta_data(task) + nwbfile.add_acquisition(events_table) + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + read_event_types_table = read_nwbfile.get_lab_meta_data("task").event_types + read_events_table = read_nwbfile.acquisition["EventsTable"] + assert isinstance(read_events_table, EventsTable) + assert read_events_table.name == "EventsTable" + assert read_events_table.description == "Metadata about events" + assert all(read_events_table["timestamp"].data[:] == [0.1, 0.3, 1.1, 1.3]) + assert all(read_events_table["cue_type"].data[:] == ["white circle", "", "green square", ""]) + assert all(read_events_table["stimulus_type"].data[:] == ["", "animal", "", "landscape"]) + assert all(read_events_table["duration"].data[:] == [0.1, 0.15, 0.1, 0.15]) + assert all(read_events_table["event_type"].data[:] == [0, 1, 0, 1]) + assert read_events_table["event_type"].table is read_event_types_table + + +class TestTtlTypesTable(TestCase): + def test_init(self): + ttl_types_table = TtlTypesTable(description="Metadata about TTL types") + assert ttl_types_table.name == "TtlTypesTable" + assert ttl_types_table.description == "Metadata about TTL types" + + def test_init_name(self): + ttl_types_table = TtlTypesTable(name="ttl_types", description="Metadata about TTL types") + assert ttl_types_table.name == "ttl_types" + assert ttl_types_table.description == "Metadata about TTL types" + + def test_add_row(self): + ttl_types_table = TtlTypesTable(description="Metadata about TTL types") + ttl_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + pulse_value=np.uint(1), + ) + ttl_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + pulse_value=np.uint(2), + ) + assert ttl_types_table["event_name"].data == ["cue on", "stimulus on"] + assert ttl_types_table["event_type_description"].data == [ + "Times when the cue was on screen.", + "Times when the stimulus was on screen.", + ] + assert all(ttl_types_table["pulse_value"].data == np.uint([1, 2])) + + +class TestTtlTypesTableSimpleRoundtrip(TestCase): + """Simple roundtrip test for TtlTypesTable.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Create an TtlTypesTable, write it to file, read the file, and test that the read table matches the original. + """ + # NOTE that when adding an TtlTypesTable to a Task, the TtlTypesTable + # must be named "ttl_types" according to the spec + ttl_types_table = TtlTypesTable(name="ttl_types", description="Metadata about TTL types") + ttl_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + pulse_value=np.uint(1), + ) + ttl_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + pulse_value=np.uint(2), + ) + task = Task() + task.ttl_types = ttl_types_table + nwbfile = mock_NWBFile() + nwbfile.add_lab_meta_data(task) + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + read_ttl_types_table = read_nwbfile.get_lab_meta_data("task").ttl_types + assert isinstance(read_ttl_types_table, EventTypesTable) + assert read_ttl_types_table.name == "ttl_types" + assert read_ttl_types_table.description == "Metadata about TTL types" + assert all(read_ttl_types_table["event_name"].data[:] == ["cue on", "stimulus on"]) + assert all( + read_ttl_types_table["event_type_description"].data[:] + == [ + "Times when the cue was on screen.", + "Times when the stimulus was on screen.", + ] + ) + assert all(read_ttl_types_table["pulse_value"].data[:] == np.uint([1, 2])) + + +class TestTtlsTable(TestCase): + def test_init(self): + ttls_table = TtlsTable(description="Metadata about TTLs") + assert ttls_table.name == "TtlsTable" + assert ttls_table.description == "Metadata about TTLs" + + def test_init_dtr(self): + ttl_types_table = TtlTypesTable(description="Metadata about TTL types") + ttl_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + pulse_value=np.uint(1), + ) + ttl_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + pulse_value=np.uint(2), + ) + + ttls_table = TtlsTable(description="Metadata about TTLs", target_tables={"ttl_type": ttl_types_table}) + assert ttls_table["ttl_type"].table is ttl_types_table + + def test_add_row(self): + ttl_types_table = TtlTypesTable(description="Metadata about TTL types") + ttl_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + pulse_value=np.uint(1), + ) + ttl_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + pulse_value=np.uint(2), + ) + + ttls_table = TtlsTable(description="Metadata about TTLs", target_tables={"ttl_type": ttl_types_table}) + ttls_table.add_row( + timestamp=0.1, + ttl_type=0, + ) + ttls_table.add_row( + timestamp=1.1, + ttl_type=0, + ) + assert ttls_table["timestamp"].data == [0.1, 1.1] + assert ttls_table["ttl_type"].data == [0, 0] + + +class TestTtlsTableSimpleRoundtrip(TestCase): + """Simple roundtrip test for TtlsTable.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Create a TtlsTable, write it to file, read the file, and test that the read table matches the original. + """ + # NOTE that when adding an TtlTypesTable to a Task, the TtlTypesTable + # must be named "ttl_types" according to the spec + ttl_types_table = TtlTypesTable(name="ttl_types", description="Metadata about TTL types") + ttl_types_table.add_row( + event_name="cue on", + event_type_description="Times when the cue was on screen.", + pulse_value=np.uint(1), + ) + ttl_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen.", + pulse_value=np.uint(2), + ) + + ttls_table = TtlsTable(description="Metadata about TTLs", target_tables={"ttl_type": ttl_types_table}) + ttls_table.add_row( + timestamp=0.1, + ttl_type=0, + ) + ttls_table.add_row( + timestamp=1.1, + ttl_type=0, + ) + + task = Task() + task.ttl_types = ttl_types_table + nwbfile = mock_NWBFile() + nwbfile.add_lab_meta_data(task) + nwbfile.add_acquisition(ttls_table) + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + read_ttl_types_table = read_nwbfile.get_lab_meta_data("task").ttl_types + read_ttls_table = read_nwbfile.acquisition["TtlsTable"] + assert isinstance(read_ttls_table, TtlsTable) + assert read_ttls_table.name == "TtlsTable" + assert read_ttls_table.description == "Metadata about TTLs" + assert all(read_ttls_table["timestamp"].data[:] == [0.1, 1.1]) + assert all(read_ttls_table["ttl_type"].data[:] == [0, 0]) + assert read_ttls_table["ttl_type"].table is read_ttl_types_table diff --git a/src/pynwb/tests/test_example_usage.py b/src/pynwb/tests/test_example_usage.py index 4be8ffe..1518a19 100644 --- a/src/pynwb/tests/test_example_usage.py +++ b/src/pynwb/tests/test_example_usage.py @@ -1,72 +1,239 @@ -def test_example_usage(): +def test_example_usage1(): from datetime import datetime - + from ndx_events import EventsTable, EventTypesTable, TtlsTable, TtlTypesTable, Task + import numpy as np from pynwb import NWBFile, NWBHDF5IO - from ndx_events import LabeledEvents, AnnotatedEventsTable - nwb = NWBFile( - session_description='session description', - identifier='cool_experiment_001', - session_start_time=datetime.now().astimezone() + nwbfile = NWBFile( + session_description="session description", + identifier="cool_experiment_001", + session_start_time=datetime.now().astimezone(), ) - # create a new LabeledEvents type to hold events recorded from the data acquisition system - events = LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 0.5, 0.6, 2., 2.05, 3., 3.5, 3.6, 4.], - resolution=1e-5, # resolution of the timestamps, i.e., smallest possible difference between timestamps - data=[0, 1, 2, 3, 5, 0, 1, 2, 4], - labels=['trial_start', 'cue_onset', 'cue_offset', 'response_left', 'response_right', 'reward'] + # in this experiment, TTL pulses were sent by the stimulus computer + # to signal important time markers during the experiment/trial, + # when the stimulus was placed on the screen and removed from the screen, + # when the question appeared, and the responses of the subject. + + # ref: https://www.nature.com/articles/s41597-020-0415-9, DANDI:000004 + + # NOTE that when adding an TtlTypesTable to a Task, the TtlTypesTable + # must be named "ttl_types" according to the spec + ttl_types_table = TtlTypesTable(name="ttl_types", description="Metadata about TTL types") + ttl_types_table.add_row( + event_name="start experiment", + event_type_description="Start of experiment", + pulse_value=np.uint(55), + ) + ttl_types_table.add_row( + event_name="stimulus onset", + event_type_description="Stimulus onset", + pulse_value=np.uint(1), + ) + ttl_types_table.add_row( + event_name="stimulus offset", + event_type_description="Stimulus offset", + pulse_value=np.uint(2), + ) + ttl_types_table.add_row( + event_name="question onset", + event_type_description="Question screen onset", + pulse_value=np.uint(3), + ) + learning_response_description = ( + "During the learning phase, subjects are instructed to respond to the following " + "question: 'Is this an animal?' in each trial. Responses are encoded as 'Yes, this " + "is an animal' (20) and 'No, this is not an animal' (21)." + ) + ttl_types_table.add_row( + event_name="yes response during learning", + event_type_description=learning_response_description, + pulse_value=np.uint(20), + ) + ttl_types_table.add_row( + event_name="no response during learning", + event_type_description=learning_response_description, + pulse_value=np.uint(21), + ) + recognition_response_description = ( + "During the recognition phase, subjects are instructed to respond to the following " + "question: 'Have you seen this image before?' in each trial. Responses are encoded " + "as: 31 (new, confident), 32 (new, probably), 33 (new, guess), 34 (old, guess), 35 " + "(old, probably), 36 (old, confident)." + ) + ttl_types_table.add_row( + event_name="(new, confident) response during recognition", + event_type_description=recognition_response_description, + pulse_value=np.uint(31), + ) + ttl_types_table.add_row( + event_name="(new, probably) response during recognition", + event_type_description=recognition_response_description, + pulse_value=np.uint(32), + ) + ttl_types_table.add_row( + event_name="(new, guess) response during recognition", + event_type_description=recognition_response_description, + pulse_value=np.uint(33), + ) + ttl_types_table.add_row( + event_name="(old, guess) response during recognition", + event_type_description=recognition_response_description, + pulse_value=np.uint(34), + ) + ttl_types_table.add_row( + event_name="(old, probably) response during recognition", + event_type_description=recognition_response_description, + pulse_value=np.uint(35), + ) + ttl_types_table.add_row( + event_name="(old, confident) response during recognition", + event_type_description=recognition_response_description, + pulse_value=np.uint(36), + ) + ttl_types_table.add_row( + event_name="end trial", + event_type_description="End of trial", + pulse_value=np.uint(6), + ) + ttl_types_table.add_row( + event_name="end experiment", + event_type_description="End of experiment", + pulse_value=np.uint(66), ) - # add the LabeledEvents type to the acquisition group of the NWB file - nwb.add_acquisition(events) + ttls_table = TtlsTable(description="Metadata about TTLs", target_tables={"ttl_type": ttl_types_table}) + ttls_table.add_row( + timestamp=6820.092244, + ttl_type=0, # NOT the pulse value, but a row index into the ttl_types_table + ) + ttls_table.add_row( + timestamp=6821.208244, + ttl_type=1, + ) + ttls_table.add_row( + timestamp=6822.210644, + ttl_type=2, + ) + ttls_table.add_row( + timestamp=6822.711364, + ttl_type=3, + ) + ttls_table.add_row( + timestamp=6825.934244, + ttl_type=6, + ) + ttls_table.timestamp.resolution = 1 / 50000.0 # specify the resolution of the timestamps (optional) + + # if TTLs are recorded, then the events table should hold any non-TTL events + # recorded by the acquisition system + # OR the events table can hold more processed information than the TTLs table + # e.g., converting stimulus onset and offset into a single stimulus event with metadata. + # this may be redundant with information in the trials table if the task is + # structured into trials + + # NOTE that when adding an EventTypesTable to a Task, the EventTypesTable + # must be named "event_types" according to the spec + event_types_table = EventTypesTable(name="event_types", description="Metadata about event types") + event_types_table.add_row( + event_name="stimulus on", + event_type_description="Times when the stimulus was on screen", + ) - # create a new AnnotatedEventsTable type to hold annotated events - # each row of the table represents a single event type - annotated_events = AnnotatedEventsTable( - name='AnnotatedEventsTable', - description='annotated events from my experiment', - resolution=1e-5 # resolution of the timestamps, i.e., smallest possible difference between timestamps + events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) + events_table.add_column(name="category_name", description="Name of the category of the stimulus") + events_table.add_column( + name="stimulus_image_index", description="Frame index of the stimulus image in the StimulusPresentation object" ) - # add a custom indexed (ragged) column to represent whether each event time was a bad event - annotated_events.add_column( - name='bad_event', - description='whether each event time should be excluded', - index=True + events_table.add_row( + timestamp=6821.208244, + category_name="smallAnimal", + stimulus_image_index=0, + event_type=0, + duration=1.0024, # this comes from the stimulus onset and offset TTLs ) - # add an event type (row) to the AnnotatedEventsTable instance - annotated_events.add_event_type( - label='Reward', - event_description='Times when the subject received juice reward.', - event_times=[1., 2., 3.], - bad_event=[False, False, True], - id=3 + events_table.add_row( + timestamp=6821.208244, + category_name="phones", + stimulus_image_index=1, + event_type=0, + duration=0.99484, + ) + events_table.timestamp.resolution = 1 / 50000.0 # specify the resolution of the timestamps (optional) + events_table.duration.resolution = 1 / 50000.0 # specify the resolution of the durations (optional) + + task = Task() + task.event_types = event_types_table + task.ttl_types = ttl_types_table + nwbfile.add_lab_meta_data(task) + nwbfile.add_acquisition(events_table) + nwbfile.add_acquisition(ttls_table) + + # write nwb file + filename = "test.nwb" + with NWBHDF5IO(filename, "w") as io: + io.write(nwbfile) + + # read nwb file and check its contents + with NWBHDF5IO(filename, "r", load_namespaces=True) as io: + read_nwbfile = io.read() + print(read_nwbfile) + # access the events table, ttls table, event types table, and ttl types table and print them + print(read_nwbfile.get_lab_meta_data("task").event_types.to_dataframe()) + print(read_nwbfile.acquisition["EventsTable"].to_dataframe()) + print(read_nwbfile.get_lab_meta_data("task").ttl_types.to_dataframe()) + print(read_nwbfile.acquisition["TtlsTable"].to_dataframe()) + + +def test_example_usage2(): + """Example storing lick times""" + from datetime import datetime + from ndx_events import EventsTable, EventTypesTable, Task + import numpy as np + from pynwb import NWBFile, NWBHDF5IO + + nwbfile = NWBFile( + session_description="session description", + identifier="cool_experiment_001", + session_start_time=datetime.now().astimezone(), ) - # convert the AnnotatedEventsTable to a pandas.DataFrame and print it - print(annotated_events.to_dataframe()) - # create a processing module in the NWB file to hold processed events data - events_module = nwb.create_processing_module( - name='events', - description='processed event data' + # NOTE that when adding an EventTypesTable to a Task, the EventTypesTable + # must be named "event_types" according to the spec + event_types_table = EventTypesTable(name="event_types", description="Metadata about event types") + event_types_table.add_row( + event_name="lick", + event_type_description="Times when the subject licked the port", ) - # add the AnnotatedEventsTable instance to the processing module - events_module.add(annotated_events) + # create a random sorted array of 1000 lick timestamps (dtype=float) from 0 to 3600 seconds + lick_times = sorted(np.random.uniform(0, 3600, 1000)) + + events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) + for t in lick_times: + # event_type=0 corresponds to the first row in the event_types_table + events_table.add_row(timestamp=t, event_type=0) + events_table.timestamp.resolution = 1 / 30000.0 # licks were detected at 30 kHz + + task = Task() + task.event_types = event_types_table + nwbfile.add_lab_meta_data(task) + nwbfile.add_acquisition(events_table) # write nwb file - filename = 'test.nwb' - with NWBHDF5IO(filename, 'w') as io: - io.write(nwb) + filename = "test.nwb" + with NWBHDF5IO(filename, "w") as io: + io.write(nwbfile) # read nwb file and check its contents - with NWBHDF5IO(filename, 'r', load_namespaces=True) as io: - nwb = io.read() - print(nwb) - # access the LabeledEvents container by name from the NWBFile acquisition group and print it - print(nwb.acquisition['LabeledEvents']) - # access the AnnotatedEventsTable by name from the 'events' processing module, convert it to - # a pandas.DataFrame, and print that - print(nwb.processing['events']['AnnotatedEventsTable'].to_dataframe()) + with NWBHDF5IO(filename, "r", load_namespaces=True) as io: + read_nwbfile = io.read() + print(read_nwbfile) + # access the events table and event types table and print them + print(read_nwbfile.get_lab_meta_data("task").event_types.to_dataframe()) + print(read_nwbfile.acquisition["EventsTable"].to_dataframe()) + + +if __name__ == "__main__": + test_example_usage1() + test_example_usage2() diff --git a/src/pynwb/tests/unit/__init__.py b/src/pynwb/tests/unit/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/pynwb/tests/unit/test_events.py b/src/pynwb/tests/unit/test_events.py deleted file mode 100644 index 7938d99..0000000 --- a/src/pynwb/tests/unit/test_events.py +++ /dev/null @@ -1,195 +0,0 @@ -import numpy as np - -from pynwb.testing import TestCase -from pynwb.core import VectorData, VectorIndex - -from ndx_events import Events, LabeledEvents, TTLs, AnnotatedEventsTable - - -class TestEvents(TestCase): - - def test_init(self): - events = Events( - name='Events', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5 - ) - self.assertEqual(events.name, 'Events') - self.assertEqual(events.description, 'events from my experiment') - self.assertEqual(events.timestamps, [0., 1., 2.]) - self.assertEqual(events.resolution, 1e-5) - self.assertEqual(events.unit, 'seconds') - - -class TestLabeledEvents(TestCase): - - def test_init(self): - events = LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - labels=['', '', '', 'event1', 'event2'] - ) - self.assertEqual(events.name, 'LabeledEvents') - self.assertEqual(events.description, 'events from my experiment') - self.assertEqual(events.timestamps, [0., 1., 2.]) - self.assertEqual(events.resolution, 1e-5) - self.assertEqual(events.unit, 'seconds') - np.testing.assert_array_equal(events.data, np.uint([3, 4, 3])), - self.assertEqual(events.labels, ['', '', '', 'event1', 'event2']) - - def test_mismatch_length(self): - msg = 'Timestamps and data must have the same length: 3 != 4' - with self.assertRaisesWith(ValueError, msg): - LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3, 5]), - labels=['', '', '', 'event1', 'event2', 'event3'] - ) - - def test_default_labels(self): - events = LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - ) - self.assertEqual(events.labels, ['', '', '', '3', '4']) - - def test_none_in_labels(self): - msg = "None values are not allowed in the labels array. Please use '' for undefined labels." - with self.assertRaisesWith(ValueError, msg): - LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=np.uint([3, 4, 3]), - labels=[None, None, None, 'event1', 'event2'] - ) - - def test_data_negative(self): - msg = "Negative values are not allowed in 'data'." - with self.assertRaisesWith(ValueError, msg): - LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=[1, -2, 3], - labels=['', '', '', 'event1', 'event2'] - ) - - def test_data_int_conversion(self): - le = LabeledEvents( - name='LabeledEvents', - description='events from my experiment', - timestamps=[0., 1., 2.], - resolution=1e-5, - data=[1, 2, 3], - labels=['', '', '', 'event1', 'event2'] - ) - np.testing.assert_array_equal(le.data, np.array([1, 2, 3])) - self.assertEqual(le.data.dtype, np.uint) - - def test_data_string(self): - msg = ("'data' must be an array of numeric values that have type unsigned int or " - "can be converted to unsigned int, not type