diff --git a/.gitignore b/.gitignore index 0e5ce43..fac0f30 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,8 @@ +# output NWB files +*.nwb + # generated docs -docs/_build docs/source/_format_auto_docs -docs/source/_static -!docs/source/_static/theme_overrides.css - -# copied spec files -src/pynwb/ndx_events/spec/*.yaml # Byte-compiled / optimized / DLL files __pycache__/ @@ -29,6 +26,7 @@ parts/ sdist/ var/ wheels/ +share/python-wheels/ *.egg-info/ .installed.cfg *.egg @@ -47,14 +45,18 @@ pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ +.nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover +*.py,cover .hypothesis/ .pytest_cache/ +cover/ +.ruff_cache/ # Translations *.mo @@ -64,6 +66,7 @@ coverage.xml *.log local_settings.py db.sqlite3 +db.sqlite3-journal # Flask stuff: instance/ @@ -76,16 +79,49 @@ instance/ docs/_build/ # PyBuilder +.pybuilder/ target/ # Jupyter Notebook .ipynb_checkpoints -# pyenv -.python-version +# IPython +profile_default/ +ipython_config.py -# celery beat schedule file +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff celerybeat-schedule +celerybeat.pid # SageMath parsed files *.sage.py @@ -111,6 +147,24 @@ venv.bak/ # mypy .mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ # Mac finder .DS_Store diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..448372a --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,5 @@ +# Changelog for ndx-events + +## 0.3.0 (Upcoming) + + diff --git a/LICENSE.txt b/LICENSE.txt index e69de29..8850436 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2023, Ryan Ly +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index fe511eb..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include LICENSE.txt README.md requirements.txt -include spec/*.yaml - -graft src/pynwb/tests -global-exclude __pycache__ *.py[co] diff --git a/README.md b/README.md index 3aeedb5..52b22ea 100644 --- a/README.md +++ b/README.md @@ -22,8 +22,8 @@ subtype of `DynamicTable`, where each row corresponds to a different event type. Unlike for the other event types, users can add their own custom columns to annotate each event type or event time. This can be useful for storing event metadata related to data preprocessing and analysis, such as marking bad events. -This extension was developed by Ryan Ly, Ben Dichter, Oliver Rübel, and Andrew Tritt. Information about the rationale, -background, and alternative approaches to this extension can be found here: +This extension was developed by Ryan Ly, Oliver Rübel, and the NWB Technical Advisory Board. +Information about the rationale, background, and alternative approaches to this extension can be found here: https://docs.google.com/document/d/1qcsjyFVX9oI_746RdMoDdmQPu940s0YtDjb1en1Xtdw ## Installation diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..cfa56a2 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,112 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "ndx-events" +version = "0.3.0" +authors = [ + { name="Ryan Ly", email="rly@lbl.gov" } +] +description = "NWB extension for storing timestamped event and TTL pulse data" +readme = "README.md" +requires-python = ">=3.8" +license = {text = "BSD-3"} +classifiers = [ + # TODO: add classifiers before release + "Programming Language :: Python", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", +] +keywords = [ + 'NeurodataWithoutBorders', + 'NWB', + 'nwb-extension', + 'ndx-extension', +] +dependencies = [ + "pynwb>=2.5.0", + "hdmf>=3.10.0", +] + +# TODO: add URLs before release +[project.urls] +"Homepage" = "https://github.com/rly/ndx-events" +# "Documentation" = "https://package.readthedocs.io/" +"Bug Tracker" = "https://github.com/rly/ndx-events/issues" +"Discussions" = "https://github.com/rly/ndx-events/discussions" +"Changelog" = "https://github.com/rly/ndx-events/CHANGELOG.md" + +[tool.hatch.build] +include = [ + "src/pynwb", + "spec/ndx-events.extensions.yaml", + "spec/ndx-events.namespace.yaml", +] +exclude = [ + "src/pynwb/tests", +] + +[tool.hatch.build.targets.wheel] +packages = [ + "src/pynwb/ndx_events", + "spec" +] + +[tool.hatch.build.targets.wheel.sources] +"spec" = "ndx_events/spec" + +[tool.hatch.build.targets.sdist] +include = [ + "src/pynwb", + "spec/ndx-events.extensions.yaml", + "spec/ndx-events.namespace.yaml", + "docs", +] +exclude = [] + +[tool.pytest.ini_options] +addopts = "--cov --cov-report html" + +[tool.codespell] +skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs,.tox,hdmf-common-schema,./docs/_build/*,*.ipynb" + +[tool.coverage.run] +branch = true +source = ["src/pynwb"] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "@abstract" +] + +[tool.black] +line-length = 120 +preview = true +exclude = ".git|.mypy_cache|.tox|.venv|venv|.ipynb_checkpoints|_build/|dist/|__pypackages__|.ipynb|docs/" + +[tool.ruff] +select = ["E", "F", "T100", "T201", "T203"] +exclude = [ + ".git", + ".tox", + "__pycache__", + "build/", + "dist/", +] +line-length = 120 + +[tool.ruff.per-file-ignores] +"src/spec/create_extension_spec.py" = ["T201"] +"src/pynwb/tests/test_example_usage.py" = ["T201"] + +[tool.ruff.mccabe] +max-complexity = 17 \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..7655a0a --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,15 @@ +# pinned dependencies to reproduce an entire development environment to +# run tests, check code style, and generate documentation +black==23.9.1 +codespell==2.2.6 +coverage==7.3.2 +hdmf==3.10.0 +hdmf-docutils==0.4.5 +pre-commit==3.4.0 +pynwb==2.5.0 +pytest==7.4.2 +pytest-cov==4.1.0 +python-dateutil==2.8.2 +pytest-subtests==0.6.0 +ruff==0.0.292 +tox==4.11.3 diff --git a/requirements-min.txt b/requirements-min.txt new file mode 100644 index 0000000..695410a --- /dev/null +++ b/requirements-min.txt @@ -0,0 +1,5 @@ +# minimum versions of package dependencies for installation +# these should match the minimum versions specified in pyproject.toml +# NOTE: it may be possible to relax these minimum requirements +pynwb==2.5.0 +hdmf==3.10.0 diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index d68ad89..0000000 --- a/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -pynwb>=1.1.2 -hdmf_docutils -pytest \ No newline at end of file diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index b3e6c0b..0000000 --- a/setup.cfg +++ /dev/null @@ -1,20 +0,0 @@ -[wheel] -universal = 1 - -[flake8] -max-line-length = 120 -max-complexity = 17 -exclude = - .git, - .tox, - __pycache__, - build/, - dist/, - docs/source/conf.py - versioneer.py -per-file-ignores = - src/pynwb/tests/test_example_usage.py:T001 - - -[metadata] -description-file = README.md diff --git a/setup.py b/setup.py deleted file mode 100644 index 29e384d..0000000 --- a/setup.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- - -import os - -from setuptools import setup, find_packages -from shutil import copy2 - -# load README.md/README.rst file -try: - if os.path.exists('README.md'): - with open('README.md', 'r') as fp: - readme = fp.read() - readme_type = 'text/markdown; charset=UTF-8' - elif os.path.exists('README.rst'): - with open('README.rst', 'r') as fp: - readme = fp.read() - readme_type = 'text/x-rst; charset=UTF-8' - else: - readme = "" -except Exception: - readme = "" - -setup_args = { - 'name': 'ndx-events', - 'version': '0.2.0', - 'description': 'NWB extension for storing timestamped event and TTL pulse data', - 'long_description': readme, - 'long_description_content_type': readme_type, - 'author': 'Ryan Ly', - 'author_email': 'rly@lbl.gov', - 'url': 'https://github.com/rly/ndx-events', - 'license': 'BSD 3-Clause', - 'install_requires': [ - 'pynwb>=1.1.2' - ], - 'packages': find_packages('src/pynwb'), - 'package_dir': {'': 'src/pynwb'}, - 'package_data': {'ndx_events': [ - 'spec/ndx-events.namespace.yaml', - 'spec/ndx-events.extensions.yaml', - ]}, - 'classifiers': [ - "Intended Audience :: Developers", - "Intended Audience :: Science/Research", - ], - 'zip_safe': False -} - - -def _copy_spec_files(project_dir): - ns_path = os.path.join(project_dir, 'spec', 'ndx-events.namespace.yaml') - ext_path = os.path.join(project_dir, 'spec', 'ndx-events.extensions.yaml') - - dst_dir = os.path.join(project_dir, 'src', 'pynwb', 'ndx_events', 'spec') - if not os.path.exists(dst_dir): - os.mkdir(dst_dir) - - copy2(ns_path, dst_dir) - copy2(ext_path, dst_dir) - - -if __name__ == '__main__': - _copy_spec_files(os.path.dirname(__file__)) - setup(**setup_args) diff --git a/src/pynwb/ndx_events/__init__.py b/src/pynwb/ndx_events/__init__.py index 82abd08..1a58541 100644 --- a/src/pynwb/ndx_events/__init__.py +++ b/src/pynwb/ndx_events/__init__.py @@ -1,32 +1,34 @@ import os from pynwb import load_namespaces, get_class -# Set path of the namespace.yaml file to the expected install location -ndx_events_specpath = os.path.join( - os.path.dirname(__file__), - 'spec', - 'ndx-events.namespace.yaml' -) +try: + from importlib.resources import files +except ImportError: + # TODO: Remove when python 3.9 becomes the new minimum + from importlib_resources import files -# If the extension has not been installed yet but we are running directly from -# the git repo -if not os.path.exists(ndx_events_specpath): - ndx_events_specpath = os.path.abspath(os.path.join( - os.path.dirname(__file__), - '..', '..', '..', - 'spec', - 'ndx-events.namespace.yaml' - )) +# Get path to the namespace.yaml file with the expected location when installed not in editable mode +__location_of_this_file = files(__name__) +__spec_path = __location_of_this_file / "spec" / "ndx-events.namespace.yaml" + +# If that path does not exist, we are likely running in editable mode. Use the local path instead +if not os.path.exists(__spec_path): + __spec_path = __location_of_this_file.parent.parent.parent / "spec" / "ndx-events.namespace.yaml" # Load the namespace -load_namespaces(ndx_events_specpath) +load_namespaces(str(__spec_path)) -from . import io as __io # noqa: E402,F401 +# TODO: Define your classes here to make them accessible at the package level. +# Either have PyNWB generate a class from the spec using `get_class` as shown +# below or write a custom class and register it using the class decorator +# `@register_class("TetrodeSeries", "ndx-hed")` +Task = get_class("Task", "ndx-events") +TimestampVectorData = get_class("TimestampVectorData", "ndx-events") +DurationVectorData = get_class("DurationVectorData", "ndx-events") +EventTypesTable = get_class("EventTypesTable", "ndx-events") +EventsTable = get_class("EventsTable", "ndx-events") +TtlTypesTable = get_class("TtlTypesTable", "ndx-events") +TtlsTable = get_class("TtlsTable", "ndx-events") -Task = get_class('Task', 'ndx-events') -TimestampVectorData = get_class('TimestampVectorData', 'ndx-events') -DurationVectorData = get_class('DurationVectorData', 'ndx-events') -EventTypesTable = get_class('EventTypesTable', 'ndx-events') -EventsTable = get_class('EventsTable', 'ndx-events') -TtlTypesTable = get_class('TtlTypesTable', 'ndx-events') -TtlsTable = get_class('TtlsTable', 'ndx-events') +# Remove these functions from the package +del load_namespaces, get_class diff --git a/src/pynwb/ndx_events/events.py b/src/pynwb/ndx_events/events.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/pynwb/ndx_events/io/__init__.py b/src/pynwb/ndx_events/io/__init__.py deleted file mode 100644 index 24af8ee..0000000 --- a/src/pynwb/ndx_events/io/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from . import events as __events # noqa: E402,F401 diff --git a/src/pynwb/ndx_events/io/events.py b/src/pynwb/ndx_events/io/events.py deleted file mode 100644 index 29f3089..0000000 --- a/src/pynwb/ndx_events/io/events.py +++ /dev/null @@ -1,71 +0,0 @@ -from pynwb import register_map -from pynwb.io.core import NWBContainerMapper -# from hdmf.common.io.table import DynamicTableMap -# from hdmf.build import ObjectMapper, BuildManager -# from hdmf.common import VectorData -# from hdmf.utils import getargs, docval -# from hdmf.spec import AttributeSpec - -# from ..events import Events - - -# @register_map(Events) -# class EventsMap(NWBContainerMapper): - -# def __init__(self, spec): -# super().__init__(spec) -# # map object attribute Events.unit -> spec Events/timestamps.unit -# # map object attribute Events.resolution -> spec Events/timestamps.resolution -# timestamps_spec = self.spec.get_dataset('timestamps') -# self.map_spec('unit', timestamps_spec.get_attribute('unit')) -# self.map_spec('resolution', timestamps_spec.get_attribute('resolution')) - - -# @register_map(LabeledEvents) -# class LabeledEventsMap(EventsMap): - -# def __init__(self, spec): -# super().__init__(spec) -# # map object attribute LabeledEvents.labels -> spec LabeledEvents/data.labels -# data_spec = self.spec.get_dataset('data') -# self.map_spec('labels', data_spec.get_attribute('labels')) - - -# @register_map(AnnotatedEventsTable) -# class AnnotatedEventsTableMap(DynamicTableMap): - -# def __init__(self, spec): -# super().__init__(spec) -# # map object attribute AnnotatedEventsTable.resolution -> spec AnnotatedEventsTable/event_times.resolution -# event_times_spec = self.spec.get_dataset('event_times') -# self.map_spec('resolution', event_times_spec.get_attribute('resolution')) - -# @DynamicTableMap.constructor_arg('resolution') -# def resolution_carg(self, builder, manager): -# # on construct, map builder for AnnotatedEventsTable.datasets['event_times'].attributes['resolution'] -# # -> AnnotatedEventsTable.__init__ argument 'resolution' -# if 'event_times' in builder: -# return builder['event_times'].attributes.get('resolution') -# return None - - -# @register_map(VectorData) -# class VectorDataMap(ObjectMapper): - -# # TODO when merging into NWB core, fold this into pynwb.io.core.VectorDataMap - -# @docval({"name": "spec", "type": AttributeSpec, "doc": "the spec to get the attribute value for"}, -# {"name": "container", "type": VectorData, "doc": "the container to get the attribute value from"}, -# {"name": "manager", "type": BuildManager, "doc": "the BuildManager used for managing this build"}, -# returns='the value of the attribute') -# def get_attr_value(self, **kwargs): -# ''' Get the value of the attribute corresponding to this spec from the given container ''' -# spec, container, manager = getargs('spec', 'container', 'manager', kwargs) - -# # on build of VectorData objects, map object attribute AnnotatedEventsTable.resolution -# # -> spec AnnotatedEventsTable/event_times.resolution -# if isinstance(container.parent, AnnotatedEventsTable): -# if container.name == 'event_times': -# if spec.name == 'resolution': -# return container.parent.resolution -# return super().get_attr_value(**kwargs) diff --git a/src/pynwb/tests/test_events.py b/src/pynwb/tests/test_events.py index 14e49c6..0709041 100644 --- a/src/pynwb/tests/test_events.py +++ b/src/pynwb/tests/test_events.py @@ -4,17 +4,24 @@ from pynwb.testing import TestCase, remove_test_file from pynwb.testing.mock.file import mock_NWBFile -from ndx_events import EventsTable, EventTypesTable, TtlsTable, TtlTypesTable, Task, DurationVectorData, TimestampVectorData +from ndx_events import ( + EventsTable, + EventTypesTable, + TtlsTable, + TtlTypesTable, + Task, + DurationVectorData, + TimestampVectorData, +) class TestTimestampVectorData(TestCase): - def test_init(self): data = TimestampVectorData(name="test", description="description") assert data.name == "test" assert data.description == "description" assert data.unit == "seconds" - assert data.resolution == None + assert data.resolution is None def test_add_to_dynamic_table(self): col = TimestampVectorData(name="test", description="description") @@ -24,13 +31,13 @@ def test_add_to_dynamic_table(self): assert table.test[0] == 0.1 def test_set_resolution_init(self): - data = TimestampVectorData(name="test", description="description", resolution=1/32000.0) - assert data.resolution == 1/32000.0 + data = TimestampVectorData(name="test", description="description", resolution=1 / 32000.0) + assert data.resolution == 1 / 32000.0 def test_set_resolution_attr(self): data = TimestampVectorData(name="test", description="description") - data.resolution = 1/32000.0 - assert data.resolution == 1/32000.0 + data.resolution = 1 / 32000.0 + assert data.resolution == 1 / 32000.0 class TestTimestampVectorDataSimpleRoundtrip(TestCase): @@ -68,7 +75,6 @@ def test_roundtrip(self): class TestDurationVectorData(TestCase): - def test_init(self): data = DurationVectorData(name="test", description="description") assert data.name == "test" @@ -118,7 +124,6 @@ def test_roundtrip(self): class TestTask(TestCase): - def test_init(self): task = Task() assert task.name == "task" @@ -159,7 +164,6 @@ def test_roundtrip(self): class TestEventTypesTable(TestCase): - def test_init(self): event_types_table = EventTypesTable(description="Metadata about event types") assert event_types_table.name == "EventTypesTable" @@ -227,7 +231,8 @@ def test_roundtrip(self): assert read_event_types_table.description == "Metadata about event types" assert all(read_event_types_table["event_name"].data[:] == ["cue on", "stimulus on"]) assert all( - read_event_types_table["event_type_description"].data[:] == [ + read_event_types_table["event_type_description"].data[:] + == [ "Times when the cue was on screen.", "Times when the stimulus was on screen.", ] @@ -235,7 +240,6 @@ def test_roundtrip(self): class TestEventsTable(TestCase): - def test_init(self): events_table = EventsTable(description="Metadata about events") assert events_table.name == "EventsTable" @@ -396,7 +400,6 @@ def test_roundtrip(self): class TestTtlTypesTable(TestCase): - def test_init(self): ttl_types_table = TtlTypesTable(description="Metadata about TTL types") assert ttl_types_table.name == "TtlTypesTable" @@ -469,7 +472,8 @@ def test_roundtrip(self): assert read_ttl_types_table.description == "Metadata about TTL types" assert all(read_ttl_types_table["event_name"].data[:] == ["cue on", "stimulus on"]) assert all( - read_ttl_types_table["event_type_description"].data[:] == [ + read_ttl_types_table["event_type_description"].data[:] + == [ "Times when the cue was on screen.", "Times when the stimulus was on screen.", ] @@ -478,7 +482,6 @@ def test_roundtrip(self): class TestTtlsTable(TestCase): - def test_init(self): ttls_table = TtlsTable(description="Metadata about TTLs") assert ttls_table.name == "TtlsTable" @@ -581,4 +584,4 @@ def test_roundtrip(self): assert read_ttls_table.description == "Metadata about TTLs" assert all(read_ttls_table["timestamp"].data[:] == [0.1, 1.1]) assert all(read_ttls_table["ttl_type"].data[:] == [0, 0]) - assert read_ttls_table["ttl_type"].table is read_ttl_types_table \ No newline at end of file + assert read_ttls_table["ttl_type"].table is read_ttl_types_table diff --git a/src/pynwb/tests/test_example_usage.py b/src/pynwb/tests/test_example_usage.py index 3df07fa..ba32e10 100644 --- a/src/pynwb/tests/test_example_usage.py +++ b/src/pynwb/tests/test_example_usage.py @@ -123,7 +123,7 @@ def test_example_usage(): timestamp=6825.934244, ttl_type=6, ) - ttls_table.timestamp.resolution = 1/50000.0 # specify the resolution of the timestamps (optional) + ttls_table.timestamp.resolution = 1 / 50000.0 # specify the resolution of the timestamps (optional) # if TTLs are recorded, then the events table should hold any non-TTL events # recorded by the acquisition system @@ -143,8 +143,7 @@ def test_example_usage(): events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table}) events_table.add_column(name="category_name", description="Name of the category of the stimulus") events_table.add_column( - name="stimulus_image_index", - description="Frame index of the stimulus image in the StimulusPresentation object" + name="stimulus_image_index", description="Frame index of the stimulus image in the StimulusPresentation object" ) events_table.add_row( timestamp=6821.208244, @@ -160,8 +159,8 @@ def test_example_usage(): event_type=0, duration=0.99484, ) - events_table.timestamp.resolution = 1/50000.0 # specify the resolution of the timestamps (optional) - events_table.duration.resolution = 1/50000.0 # specify the resolution of the durations (optional) + events_table.timestamp.resolution = 1 / 50000.0 # specify the resolution of the timestamps (optional) + events_table.duration.resolution = 1 / 50000.0 # specify the resolution of the durations (optional) task = Task() task.event_types = event_types_table diff --git a/src/spec/create_extension_spec.py b/src/spec/create_extension_spec.py index 9576de0..75d47f9 100644 --- a/src/spec/create_extension_spec.py +++ b/src/spec/create_extension_spec.py @@ -12,14 +12,14 @@ def main(): contact=["rly@lbl.gov"], ) - ns_builder.include_namespace('core') + ns_builder.include_namespace("core") timestamp_vector_data = NWBDatasetSpec( neurodata_type_def="TimestampVectorData", neurodata_type_inc="VectorData", doc="A VectorData that stores timestamps in seconds.", dtype="float64", - dims=['num_times'], + dims=["num_times"], shape=[None], attributes=[ NWBAttributeSpec( @@ -31,8 +31,10 @@ def main(): NWBAttributeSpec( name="resolution", dtype="float64", - doc=("The smallest possible difference between two timestamps. Usually 1 divided by the " - "sampling rate for timestamps of the data acquisition system."), + doc=( + "The smallest possible difference between two timestamps. Usually 1 divided by the " + "sampling rate for timestamps of the data acquisition system." + ), required=False, ), ], @@ -43,7 +45,7 @@ def main(): neurodata_type_inc="VectorData", doc="A VectorData that stores durations in seconds.", dtype="float64", - dims=['num_events'], + dims=["num_events"], shape=[None], attributes=[ NWBAttributeSpec( @@ -55,8 +57,10 @@ def main(): NWBAttributeSpec( name="resolution", dtype="float64", - doc=("The smallest possible difference between two timestamps. Usually 1 divided by the " - "sampling rate for timestamps of the data acquisition system."), + doc=( + "The smallest possible difference between two timestamps. Usually 1 divided by the " + "sampling rate for timestamps of the data acquisition system." + ), required=False, ), ], @@ -64,51 +68,55 @@ def main(): event_types_table = NWBGroupSpec( neurodata_type_def="EventTypesTable", - neurodata_type_inc='DynamicTable', - doc=("A column-based table to store information about each event type, such as name, one event type per row."), + neurodata_type_inc="DynamicTable", + doc="A column-based table to store information about each event type, such as name, one event type per row.", default_name="EventTypesTable", datasets=[ NWBDatasetSpec( - name='event_name', - neurodata_type_inc='VectorData', - dtype='text', - doc='Name of each event type.', + name="event_name", + neurodata_type_inc="VectorData", + dtype="text", + doc="Name of each event type.", ), NWBDatasetSpec( - name='event_type_description', - neurodata_type_inc='VectorData', - dtype='text', - doc='Description of each event type.', + name="event_type_description", + neurodata_type_inc="VectorData", + dtype="text", + doc="Description of each event type.", ), ], ) events_table = NWBGroupSpec( - neurodata_type_def='EventsTable', - neurodata_type_inc='DynamicTable', - doc=("A column-based table to store information about events (event instances), one event per row. " - "Each event must have an event_type, which is a row in the EventTypesTable. Additional columns " - "may be added to store metadata about each event, such as the duration of the event, or a " - "text value of the event."), + neurodata_type_def="EventsTable", + neurodata_type_inc="DynamicTable", + doc=( + "A column-based table to store information about events (event instances), one event per row. " + "Each event must have an event_type, which is a row in the EventTypesTable. Additional columns " + "may be added to store metadata about each event, such as the duration of the event, or a " + "text value of the event." + ), default_name="EventsTable", datasets=[ NWBDatasetSpec( - name='timestamp', - neurodata_type_inc='TimestampVectorData', + name="timestamp", + neurodata_type_inc="TimestampVectorData", doc="The time that each event occurred, in seconds, from the session start time.", ), NWBDatasetSpec( - name='event_type', - neurodata_type_inc='DynamicTableRegion', - dims=['num_events'], + name="event_type", + neurodata_type_inc="DynamicTableRegion", + dims=["num_events"], shape=[None], - doc=("The type of event that occurred. This is represented as a reference " - "to a row of the EventTypesTable."), + doc=( + "The type of event that occurred. This is represented as a reference " + "to a row of the EventTypesTable." + ), quantity="?", ), NWBDatasetSpec( - name='duration', - neurodata_type_inc='DurationVectorData', + name="duration", + neurodata_type_inc="DurationVectorData", doc="Optional column containing the duration of each event, in seconds.", quantity="?", ), @@ -117,43 +125,46 @@ def main(): ttl_types_table = NWBGroupSpec( neurodata_type_def="TtlTypesTable", - neurodata_type_inc='EventTypesTable', - doc=("A column-based table to store information about each TTL type, such as name and pulse value, " - "one TTL type per row."), + neurodata_type_inc="EventTypesTable", + doc=( + "A column-based table to store information about each TTL type, such as name and pulse value, " + "one TTL type per row." + ), default_name="TtlTypesTable", datasets=[ NWBDatasetSpec( - name='pulse_value', - neurodata_type_inc='VectorData', - dtype='uint8', - doc='TTL pulse value for each event type.', + name="pulse_value", + neurodata_type_inc="VectorData", + dtype="uint8", + doc="TTL pulse value for each event type.", ), ], ) ttls_table = NWBGroupSpec( - neurodata_type_def='TtlsTable', - neurodata_type_inc='EventsTable', - doc=("Data type to hold timestamps of TTL pulses."), + neurodata_type_def="TtlsTable", + neurodata_type_inc="EventsTable", + doc="Data type to hold timestamps of TTL pulses.", default_name="TtlsTable", datasets=[ NWBDatasetSpec( - name='ttl_type', - neurodata_type_inc='DynamicTableRegion', - dims=['num_events'], + name="ttl_type", + neurodata_type_inc="DynamicTableRegion", + dims=["num_events"], shape=[None], - doc=("The type of TTL that occurred. This is represented as a reference " - "to a row of the TtlTypesTable."), + doc="The type of TTL that occurred. This is represented as a reference to a row of the TtlTypesTable.", ), ], ) task = NWBGroupSpec( - neurodata_type_def='Task', - neurodata_type_inc='LabMetaData', - doc=("A group to store task-related general metadata. TODO When merged with core, " - "this will no longer inherit from LabMetaData but from NWBContainer and be placed " - "optionally in /general."), + neurodata_type_def="Task", + neurodata_type_inc="LabMetaData", + doc=( + "A group to store task-related general metadata. TODO When merged with core, " + "this will no longer inherit from LabMetaData but from NWBContainer and be placed " + "optionally in /general." + ), name="task", groups=[ NWBGroupSpec( @@ -182,7 +193,7 @@ def main(): ] # export the spec to yaml files in the spec folder - output_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'spec')) + output_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "spec")) export_spec(ns_builder, new_data_types, output_dir)