Skip to content

Commit

Permalink
Add notes and infra changes
Browse files Browse the repository at this point in the history
  • Loading branch information
rly committed Jan 22, 2024
1 parent 38e594a commit b0c87a0
Show file tree
Hide file tree
Showing 5 changed files with 116 additions and 12 deletions.
3 changes: 0 additions & 3 deletions .codespellrc

This file was deleted.

28 changes: 28 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# NOTE: run `pre-commit autoupdate` to update hooks to latest version
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
- id: check-added-large-files
- id: check-json
- id: check-toml
- id: name-tests-test
args: [--pytest-test-first]
- id: check-docstring-first
- repo: https://github.com/psf/black
rev: 23.12.0
hooks:
- id: black
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.8
hooks:
- id: ruff
- repo: https://github.com/codespell-project/codespell
rev: v2.2.6
hooks:
- id: codespell
additional_dependencies:
- tomli
24 changes: 24 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,30 @@ Matlab:
generateExtension('<directory path>/ndx-events/spec/ndx-events.namespace.yaml');
```

## Developer installation
In a Python 3.8-3.12 environment:
```bash
pip install -r requirements-dev.txt
pip install -e .
```

Run tests:
```bash
pytest
```

Install pre-commit hooks:
```bash
pre-commit install
```

Style and other checks:
```bash
black .
ruff .
codespell .
```

## Example usage
Python:

Expand Down
56 changes: 53 additions & 3 deletions src/pynwb/tests/test_example_usage.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
def test_example_usage():
def test_example_usage1():
from datetime import datetime
from ndx_events import EventsTable, EventTypesTable, TtlsTable, TtlTypesTable, Task
import numpy as np
Expand Down Expand Up @@ -42,7 +42,7 @@ def test_example_usage():
)
learning_response_description = (
"During the learning phase, subjects are instructed to respond to the following "
"question: 'Is this an animal?' in each trial. Response are encoded as 'Yes, this "
"question: 'Is this an animal?' in each trial. Responses are encoded as 'Yes, this "
"is an animal' (20) and 'No, this is not an animal' (21)."
)
ttl_types_table.add_row(
Expand Down Expand Up @@ -185,5 +185,55 @@ def test_example_usage():
print(read_nwbfile.acquisition["TtlsTable"].to_dataframe())


def test_example_usage2():
"""Example storing lick times"""
from datetime import datetime
from ndx_events import EventsTable, EventTypesTable, Task
import numpy as np
from pynwb import NWBFile, NWBHDF5IO

nwbfile = NWBFile(
session_description="session description",
identifier="cool_experiment_001",
session_start_time=datetime.now().astimezone(),
)

# NOTE that when adding an EventTypesTable to a Task, the EventTypesTable
# must be named "event_types" according to the spec
event_types_table = EventTypesTable(name="event_types", description="Metadata about event types")
event_types_table.add_row(
event_name="lick",
event_type_description="Times when the subject licked the port",
)

# create a random sorted array of 1000 lick timestamps (dtype=float) from 0 to 3600 seconds
lick_times = sorted(np.random.uniform(0, 3600, 1000))

events_table = EventsTable(description="Metadata about events", target_tables={"event_type": event_types_table})
for t in lick_times:
# event_type=0 corresponds to the first row in the event_types_table
events_table.add_row(timestamp=t, event_type=0)
events_table.timestamp.resolution = 1 / 30000.0 # licks were detected at 30 kHz

task = Task()
task.event_types = event_types_table
nwbfile.add_lab_meta_data(task)
nwbfile.add_acquisition(events_table)

# write nwb file
filename = "test.nwb"
with NWBHDF5IO(filename, "w") as io:
io.write(nwbfile)

# read nwb file and check its contents
with NWBHDF5IO(filename, "r", load_namespaces=True) as io:
read_nwbfile = io.read()
print(read_nwbfile)
# access the events table and event types table and print them
print(read_nwbfile.get_lab_meta_data("task").event_types.to_dataframe())
print(read_nwbfile.acquisition["EventsTable"].to_dataframe())


if __name__ == "__main__":
test_example_usage()
test_example_usage1()
test_example_usage2()
17 changes: 11 additions & 6 deletions src/spec/create_extension_spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def main():
timestamp_vector_data = NWBDatasetSpec(
neurodata_type_def="TimestampVectorData",
neurodata_type_inc="VectorData",
doc="A VectorData that stores timestamps in seconds.",
doc="A 1-dimensional VectorData that stores timestamps in seconds.",
dtype="float64",
dims=["num_times"],
shape=[None],
Expand All @@ -26,8 +26,11 @@ def main():
name="unit",
dtype="text",
doc="The unit of measurement for the timestamps, fixed to 'seconds'.",
value="seconds",
value="xseconds",
),
# NOTE: this requires all timestamps to have the same resolution which may not be true
# if they come from different acquisition systems or processing pipelines...
# maybe this should be a column of the event type table instead?
NWBAttributeSpec(
name="resolution",
dtype="float64",
Expand All @@ -43,7 +46,7 @@ def main():
duration_vector_data = NWBDatasetSpec(
neurodata_type_def="DurationVectorData",
neurodata_type_inc="VectorData",
doc="A VectorData that stores durations in seconds.",
doc="A 1-dimensional VectorData that stores durations in seconds.",
dtype="float64",
dims=["num_events"],
shape=[None],
Expand All @@ -54,6 +57,7 @@ def main():
doc="The unit of measurement for the durations, fixed to 'seconds'.",
value="seconds",
),
# NOTE: this is usually the same as the timestamp resolution
NWBAttributeSpec(
name="resolution",
dtype="float64",
Expand Down Expand Up @@ -92,10 +96,11 @@ def main():
neurodata_type_inc="DynamicTable",
doc=(
"A column-based table to store information about events (event instances), one event per row. "
"Each event must have an event_type, which is a row in the EventTypesTable. Additional columns "
"may be added to store metadata about each event, such as the duration of the event, or a "
"text value of the event."
"Each event must have an event_type, which is a reference to a row in the EventTypesTable. "
"Additional columns may be added to store metadata about each event, such as the duration "
"of the event, or a text value of the event."
),
# NOTE: custom columns should apply to every event in the table which may not be the case
default_name="EventsTable",
datasets=[
NWBDatasetSpec(
Expand Down

0 comments on commit b0c87a0

Please sign in to comment.