diff --git a/.github/workflows/check_external_links.yml b/.github/workflows/check_external_links.yml new file mode 100644 index 0000000..d574174 --- /dev/null +++ b/.github/workflows/check_external_links.yml @@ -0,0 +1,38 @@ +name: Check Sphinx external links +on: + push: + schedule: + - cron: '0 5 * * *' # once per day at midnight ET + workflow_dispatch: + +jobs: + check-external-links: + name: Check for broken Sphinx external links + runs-on: ubuntu-latest + steps: + - name: Cancel any previous incomplete runs + uses: styfle/cancel-workflow-action@0.12.0 + with: + all_but_latest: true + access_token: ${{ github.token }} + + - uses: actions/checkout@v3 + with: + submodules: 'recursive' + fetch-depth: 0 # tags are required for versioneer to determine the version + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install Sphinx dependencies and package + run: | + python -m pip install --upgrade pip + python -m pip install -r requirements-dev.txt + python -m pip install . + + - name: Check Sphinx external links + run: | + cd docs # run_doc_autogen assumes spec is found in ../spec/ + sphinx-build -b linkcheck ./source ./test_build diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml new file mode 100644 index 0000000..e1f3e6f --- /dev/null +++ b/.github/workflows/codespell.yml @@ -0,0 +1,14 @@ +name: Codespell +on: + push: + workflow_dispatch: + +jobs: + codespell: + name: Check for spelling errors + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Codespell + uses: codespell-project/actions-codespell@v2 \ No newline at end of file diff --git a/.github/workflows/ruff.yml b/.github/workflows/ruff.yml new file mode 100644 index 0000000..db6b5c2 --- /dev/null +++ b/.github/workflows/ruff.yml @@ -0,0 +1,14 @@ +name: Ruff +on: + push: + workflow_dispatch: + +jobs: + ruff: + name: Check for style errors and common problems + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Ruff + uses: chartboost/ruff-action@v1 \ No newline at end of file diff --git a/.github/workflows/run_all_tests.yml b/.github/workflows/run_all_tests.yml new file mode 100644 index 0000000..f442fa5 --- /dev/null +++ b/.github/workflows/run_all_tests.yml @@ -0,0 +1,183 @@ +name: Run all tests +on: + push: + schedule: + - cron: '0 5 * * *' # once per day at midnight ET + workflow_dispatch: + +jobs: + run-all-tests: + name: ${{ matrix.name }} + runs-on: ${{ matrix.os }} + defaults: + run: + shell: bash + strategy: + fail-fast: false + matrix: + include: + - { name: linux-python3.8-minimum , requirements: minimum , python-ver: "3.8" , os: ubuntu-latest } + - { name: linux-python3.8 , requirements: pinned , python-ver: "3.8" , os: ubuntu-latest } + - { name: linux-python3.9 , requirements: pinned , python-ver: "3.9" , os: ubuntu-latest } + - { name: linux-python3.10 , requirements: pinned , python-ver: "3.10", os: ubuntu-latest } + - { name: linux-python3.11 , requirements: pinned , python-ver: "3.11", os: ubuntu-latest } + - { name: linux-python3.12 , requirements: pinned , python-ver: "3.12", os: ubuntu-latest } + - { name: linux-python3.12-upgraded , requirements: upgraded , python-ver: "3.12", os: ubuntu-latest } + - { name: windows-python3.8-minimum , requirements: minimum , python-ver: "3.8" , os: windows-latest } + - { name: windows-python3.8 , requirements: pinned , python-ver: "3.8" , os: windows-latest } + - { name: windows-python3.9 , requirements: pinned , python-ver: "3.9" , os: windows-latest } + - { name: windows-python3.10 , requirements: pinned , python-ver: "3.10", os: windows-latest } + - { name: windows-python3.11 , requirements: pinned , python-ver: "3.11", os: windows-latest } + - { name: windows-python3.12 , requirements: pinned , python-ver: "3.12", os: windows-latest } + - { name: windows-python3.12-upgraded , requirements: upgraded , python-ver: "3.12", os: windows-latest } + - { name: macos-python3.8-minimum , requirements: minimum , python-ver: "3.8" , os: macos-latest } + - { name: macos-python3.8 , requirements: pinned , python-ver: "3.8" , os: macos-latest } + - { name: macos-python3.9 , requirements: pinned , python-ver: "3.9" , os: macos-latest } + - { name: macos-python3.10 , requirements: pinned , python-ver: "3.10", os: macos-latest } + - { name: macos-python3.11 , requirements: pinned , python-ver: "3.11", os: macos-latest } + - { name: macos-python3.12 , requirements: pinned , python-ver: "3.12", os: macos-latest } + - { name: macos-python3.12-upgraded , requirements: upgraded , python-ver: "3.12", os: macos-latest } + steps: + - name: Cancel non-latest runs + uses: styfle/cancel-workflow-action@0.11.0 + with: + all_but_latest: true + access_token: ${{ github.token }} + + - uses: actions/checkout@v3 + with: + submodules: 'recursive' + fetch-depth: 0 # fetch tags + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-ver }} + + - name: Install build dependencies + run: | + python -m pip install --upgrade pip + python -m pip list + python -m pip check + + - name: Install run requirements (minimum) + if: ${{ matrix.requirements == 'minimum' }} + run: | + python -m pip install -r requirements-min.txt -r requirements-dev.txt + python -m pip install -e . + + - name: Install run requirements (pinned) + if: ${{ matrix.requirements == 'pinned' }} + run: | + python -m pip install -r requirements-dev.txt + python -m pip install -e . + + - name: Install run requirements (upgraded) + if: ${{ matrix.requirements == 'upgraded' }} + run: | + python -m pip install -r requirements-dev.txt + python -m pip install -U -e . + + - name: Run tests + run: | + pytest -v + + - name: Build wheel and source distribution + run: | + python -m pip install --upgrade build + python -m build + ls -1 dist + + - name: Test installation from a wheel (POSIX) + if: ${{ matrix.os != 'windows-latest' }} + run: | + python -m venv test-wheel-env + source test-wheel-env/bin/activate + python -m pip install dist/*-none-any.whl + python -c "import ndx_hed" + + - name: Test installation from a wheel (windows) + if: ${{ matrix.os == 'windows-latest' }} + run: | + python -m venv test-wheel-env + test-wheel-env/Scripts/activate.bat + python -m pip install dist/*-none-any.whl + python -c "import ndx_hed" + + run-all-tests-on-conda: + name: ${{ matrix.name }} + runs-on: ubuntu-latest + defaults: + run: + shell: bash -l {0} # needed for conda environment to work + strategy: + fail-fast: false + matrix: + include: + - { name: conda-linux-python3.8-minimum , requirements: minimum , python-ver: "3.8" , os: ubuntu-latest } + - { name: conda-linux-python3.8 , requirements: pinned , python-ver: "3.8" , os: ubuntu-latest } + - { name: conda-linux-python3.9 , requirements: pinned , python-ver: "3.9" , os: ubuntu-latest } + - { name: conda-linux-python3.10 , requirements: pinned , python-ver: "3.10", os: ubuntu-latest } + - { name: conda-linux-python3.11 , requirements: pinned , python-ver: "3.11", os: ubuntu-latest } + - { name: conda-linux-python3.12 , requirements: pinned , python-ver: "3.12", os: ubuntu-latest } + - { name: conda-linux-python3.12-upgraded , requirements: upgraded , python-ver: "3.12", os: ubuntu-latest } + steps: + - name: Cancel any previous incomplete runs + uses: styfle/cancel-workflow-action@0.11.0 + with: + access_token: ${{ github.token }} + + - uses: actions/checkout@v3 + with: + submodules: 'recursive' + fetch-depth: 0 # fetch tags + + - name: Set up Conda + uses: conda-incubator/setup-miniconda@v2 + with: + auto-update-conda: true + auto-activate-base: true + activate-environment: true + python-version: ${{ matrix.python-ver }} + + - name: Install build dependencies + run: | + conda config --set always_yes yes --set changeps1 no + conda info + conda config --show-sources + conda list --show-channel-urls + + - name: Install run requirements (minimum) + if: ${{ matrix.requirements == 'minimum' }} + run: | + python -m pip install -r requirements-min.txt -r requirements-dev.txt + python -m pip install -e . + + - name: Install run requirements (pinned) + if: ${{ matrix.requirements == 'pinned' }} + run: | + python -m pip install -r requirements-dev.txt + python -m pip install -e . + + - name: Install run requirements (upgraded) + if: ${{ matrix.requirements == 'upgraded' }} + run: | + python -m pip install -r requirements-dev.txt + python -m pip install -U -e . + + - name: Run tests + run: | + pytest -v + + - name: Build wheel and source distribution + run: | + python -m pip install --upgrade build + python -m build + ls -1 dist + + - name: Test installation from a wheel (POSIX) + run: | + python -m venv test-wheel-env + source test-wheel-env/bin/activate + python -m pip install dist/*-none-any.whl + python -c "import ndx_hed" diff --git a/.github/workflows/run_coverage.yml b/.github/workflows/run_coverage.yml new file mode 100644 index 0000000..ee19717 --- /dev/null +++ b/.github/workflows/run_coverage.yml @@ -0,0 +1,59 @@ +name: Run code coverage +on: + push: + workflow_dispatch: + +jobs: + run-coverage: + name: ${{ matrix.os }} + runs-on: ${{ matrix.os }} + # TODO handle forks + # run pipeline on either a push event or a PR event on a fork + # if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name + defaults: + run: + shell: bash + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + env: # used by codecov-action + OS: ${{ matrix.os }} + PYTHON: '3.11' + steps: + - name: Cancel any previous incomplete runs + uses: styfle/cancel-workflow-action@0.11.0 + with: + all_but_latest: true + access_token: ${{ github.token }} + + - uses: actions/checkout@v3 + with: + submodules: 'recursive' + fetch-depth: 0 # fetch tags + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: ${{ env.PYTHON }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install -r requirements-dev.txt + + - name: Install package + run: | + python -m pip install -e . # must install in editable mode for coverage to find sources + python -m pip list + + - name: Run tests and generate coverage report + run: | + pytest --cov + python -m coverage xml # codecov uploader requires xml format + python -m coverage report -m + + # TODO uncomment after setting up repo on codecov.io + # - name: Upload coverage to Codecov + # uses: codecov/codecov-action@v3 + # with: + # fail_ci_if_error: true diff --git a/.gitignore b/.gitignore index 68bc17f..fac0f30 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,9 @@ +# output NWB files +*.nwb + +# generated docs +docs/source/_format_auto_docs + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] @@ -50,6 +56,7 @@ coverage.xml .hypothesis/ .pytest_cache/ cover/ +.ruff_cache/ # Translations *.mo @@ -158,3 +165,6 @@ cython_debug/ # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ + +# Mac finder +.DS_Store diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..49c68b4 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog for ndx-hed diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..642dd74 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2023, Ryan Ly, Oliver Ruebel, Kay Robbins +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/NEXTSTEPS.md b/NEXTSTEPS.md new file mode 100644 index 0000000..6c6b873 --- /dev/null +++ b/NEXTSTEPS.md @@ -0,0 +1,165 @@ + + +# Next Steps for ndx-hed Extension for NWB + +## Creating Your Extension + +1. In a terminal, change directory into the new ndx-hed directory: `cd ndx-hed` + +2. Add any packages required by your extension to the `dependencies` key in `pyproject.toml`. + +3. Run `python -m pip install -e .` to install your new extension Python package +and any other packages required to develop, document, and run your extension. + +4. Modify `src/spec/create_extension_spec.py` to define your extension. + +5. Run `python src/spec/create_extension_spec.py` to generate the +`spec/ndx-hed.namespace.yaml` and +`spec/ndx-hed.extensions.yaml` files. + +6. Define API classes for your new extension data types. + + - As a starting point, `src/pynwb/__init__.py` includes an example for how to use + the `pynwb.get_class` to generate a basic Python class for your new extension data + type. This class contains a constructor and properties for the new data type. + - Instead of using `pynwb.get_class`, you can define your own custom class for the + new type, which will allow you to customize the class methods, customize the + object mapping, and create convenience functions. See the + [Extending NWB tutorial](https://pynwb.readthedocs.io/en/stable/tutorials/general/extensions.html) + for more details. + +7. Define tests for your new extension data types in `src/pynwb/tests` or `src/matnwb/tests`. +A test for the example `TetrodeSeries` data type is provided as a reference and should be +replaced or removed. + + - Python tests should be runnable by executing [`pytest`](https://docs.pytest.org/en/latest/) + from the root of the extension directory. Use of PyNWB testing infrastructure from + `pynwb.testing` is encouraged (see + [documentation](https://pynwb.readthedocs.io/en/stable/pynwb.testing.html)). + - Creating both **unit tests** (e.g., testing initialization of new data type classes and + new functions) and **integration tests** (e.g., write the new data types to file, read + the file, and confirm the read data types are equal to the written data types) is + highly encouraged. + +8. You may need to modify `pyproject.toml` and re-run `python -m pip install -e .` if you +use any dependencies. + +9. Update the `CHANGELOG.md` regularly to document changes to your extension. + + +## Documenting and Publishing Your Extension to the Community + +1. Install the latest release of hdmf_docutils: `python -m pip install hdmf-docutils` + +2. Start a git repository for your extension directory ndx-hed + and push it to GitHub. You will need a GitHub account. + - Follow these directions: + https://help.github.com/en/articles/adding-an-existing-project-to-github-using-the-command-line + +3. Change directory into `docs`. + +4. Run `make html` to generate documentation for your extension based on the YAML files. + +5. Read `docs/README.md` for instructions on how to customize documentation for +your extension. + +6. Modify `README.md` to describe this extension for interested developers. + +7. Add a license file. Permissive licenses should be used if possible. **A [BSD license](https://opensource.org/licenses/BSD-3-Clause) is recommended.** + +8. Update the `CHANGELOG.md` to document changes to your extension. + +8. Push your repository to GitHub. A default set of GitHub Actions workflows is set up to +test your code on Linux, Windows, Mac OS, and Linux using conda; upload code coverage +stats to codecov.io; check for spelling errors; check for style errors; and check for broken +links in the documentation. For the code coverage workflow to work, you will need to +set up the repo on codecov.io and uncomment the "Upload coverage to Codecov" step +in `.github/workflows/run_coverage.yml`. + +8. Make a release for the extension on GitHub with the version number specified. e.g. if version is 0.1.0, then this page should exist: https://github.com/rly/ndx-hed/releases/tag/0.1.0 . For instructions on how to make a release on GitHub see [here](https://help.github.com/en/github/administering-a-repository/creating-releases). + +9. Publish your updated extension on [PyPI](https://pypi.org/). + - Follow these directions: https://packaging.python.org/en/latest/tutorials/packaging-projects/ + - You may need to modify `pyproject.toml` + - If your extension version is 0.1.0, then this page should exist: https://pypi.org/project/ndx-hed/0.1.0 + + Once your GitHub release and `pyproject.toml` are ready, publishing on PyPI: + ```bash + python -m pip install --upgrade build twine + python -m build + twine upload dist/* + ``` + +10. Go to https://github.com/nwb-extensions/staged-extensions and fork the +repository. + +11. Clone the fork onto your local filesystem. + +12. Copy the directory `staged-extensions/example` to a new directory +`staged-extensions/ndx-hed`: + + ```bash + cp -r staged-extensions/example staged-extensions/ndx-hed + ``` + +13. Edit `staged-extensions/ndx-hed/ndx-meta.yaml` +with information on where to find your NWB extension. + - The YAML file MUST contain a dict with the following keys: + - name: extension namespace name + - version: extension version + - src: URL for the main page of the public repository (e.g. on GitHub, BitBucket, GitLab) that contains the sources of the extension + - pip: URL for the main page of the extension on PyPI + - license: name of the license of the extension + - maintainers: list of GitHub usernames of those who will reliably maintain the extension + - You may copy and modify the following YAML that was auto-generated: + + ```yaml + name: ndx-hed + version: 0.1.0 + src: https://github.com/rly/ndx-hed + pip: https://pypi.org/project/ndx-hed/ + license: BSD-3 + maintainers: + - rly + - oruebel + - VisLab + ``` + +14. Edit `staged-extensions/ndx-hed/README.md` +to add information about your extension. You may copy it from +`ndx-hed/README.md`. + + ```bash + cp ndx-hed/README.md staged-extensions/ndx-hed/README.md + ``` + +15. Add and commit your changes to Git and push your changes to GitHub. +``` +cd staged-extensions +git add ndx-hed +git commit -m "Add new catalog entry for ndx-hed" . +git push +``` + +16. Open a pull request. Building of your extension will be tested on Windows, +Mac, and Linux. The technical team will review your extension shortly after +and provide feedback and request changes, if any. + +17. When your pull request is merged, a new repository, called +ndx-hed-record will be created in the nwb-extensions +GitHub organization and you will be added as a maintainer for that repository. + + +## Updating Your Published Extension + +1. Update your ndx-hed GitHub repository. + +2. Publish your updated extension on PyPI. + +3. Fork the ndx-hed-record repository on GitHub. + +4. Open a pull request to test the changes automatically. The technical team +will review your changes shortly after and provide feedback and request changes, +if any. + +5. Your updated extension is approved. diff --git a/README.md b/README.md index c12f40a..c29228e 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,15 @@ -# ndx-hed -HED extensions for Neurodata Without Borders (nwb) +# ndx-hed Extension for NWB + +Description of the extension + +## Installation + + +## Usage + +```python + +``` + +--- +This extension was created using [ndx-template](https://github.com/nwb-extensions/ndx-template). diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..54e6545 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,179 @@ + +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXAPIDOC = sphinx-apidoc +PAPER = +BUILDDIR = build +SRCDIR = ../src +RSTDIR = source +CONFDIR = $(PWD)/source + + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext fulldoc allclean + +help: + @echo "To update documentation sources from the format specification please use \`make apidoc'" + @echo "" + @echo "To build the documentation please use \`make ' where is one of" + @echo " fulldoc to rebuild the apidoc, html, and latexpdf all at once" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " apidoc to to build RST from source code" + @echo " clean to clean all documents built by Sphinx in _build" + @echo " allclean to clean all autogenerated documents both from Sphinx and apidoc" + +allclean: + -rm -rf $(BUILDDIR)/* $(RSTDIR)/modules.rst + -rm $(RSTDIR)/_format_auto_docs/*.png + -rm $(RSTDIR)/_format_auto_docs/*.pdf + -rm $(RSTDIR)/_format_auto_docs/*.rst + -rm $(RSTDIR)/_format_auto_docs/*.inc + +clean: + -rm -rf $(BUILDDIR)/* $(RSTDIR)/modules.rst + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/sample.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/sample.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/sample" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/sample" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " "results in $(BUILDDIR)/doctest/output.txt." + +apidoc: + PYTHONPATH=$(CONFDIR):$(PYTHONPATH) nwb_generate_format_docs + @echo + @echo "Generate rst source files from NWB spec." + +fulldoc: + $(MAKE) allclean + @echo + @echo "Rebuilding apidoc, html, latexpdf" + $(MAKE) apidoc + $(MAKE) html + $(MAKE) latexpdf diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..9a3a30d --- /dev/null +++ b/docs/README.md @@ -0,0 +1,121 @@ + +# Getting started + +## Generate Documentation + +* To generate the HTML version of your documentation run ``make html``. +* The [hdmf-docutils](https://pypi.org/project/hdmf-docutils/) package must be installed. + +## Customize Your Extension Documentation + +* **extension description** + * Edit ``source/description.rst`` to describe your extension. + +* **release notes** + * Edit ``source/release_notes.rst`` to document improvements and fixes of your extension. + +* **documentation build settings** + * Edit ``source/conf.py`` to customize your extension documentation configuration. + * Edit ``source/conf_doc_autogen.py`` to customize the format documentation auto-generation based on + the YAML specification files. + + +# Overview + +The specification documentation uses Sphinx [http://www.sphinx-doc.org/en/stable/index.html](http://www.sphinx-doc.org/en/stable/index.html) + +## Rebuilding All + +To rebuild the full documentation in html, latex, and PDF simply run: + +``` +make fulldoc +``` + +This is a convenience function that is equivalent to: + +``` +make allclean +make apidoc +make html +make latexpdf +``` + +## Generating the format documentation from the format spec + +The format documentation is auto-generated from the format specification (YAML) sources via: + +``` +make apidoc +``` + +This will invoke the executable: + +``` +hdmf_generate_format_docs +``` + +The script automatically generates a series of .rst, .png, and .pdf files that are stored in the folder `source/format_auto_docs`. The generated .rst files are included in `source/format.rst` and the png and pdf files are used as figures in the autogenerated docs. + +The folder `source/format_auto_docs` is reserved for autogenerated files, i.e., files in the folder should not be added or edited by hand as they will be deleted and rebuilt during the full built of the documentation. + +By default the Sphinx configuration is setup to always regenerate the sources whenever the docs are being built (see next section). This behavior can be customized via the `spec_doc_rebuild_always` parameter in `source/conf.py` + +## Building a specific document type + +To build the documentation, run: + +``` +make +``` + +where `` is, e.g., `latexpdf`, `html`, `singlehtml`, or `man`. For a complete list of supported doc-types, see: + +``` +make help +``` + +## Cleaning up + +`make clean` cleans up all builds of the documentation located in `_build`. + +`make allclean` cleans up all builds of the documentation located in `_build` as well as all autogenerated sources stored in `source/format_auto_docs`. + +## Configuration + +The build of the documentation can be customized via a broad range of Sphinx options in: + +`source/conf_doc_autogen.py` + +In addition to standard Sphinx options, there are a number of additional options used to customize the content and structure of the autogenerated documents, e.g.: + +* `spec_show_yaml_src` - Boolean indicating whether the YAML sources should be included for the different Neurodata types +* `spec_generate_src_file` - Boolean indicating whether the YAML sources of the neurodata_types should be rendered in a separate section (True) or in the same location as the main documentation +* `spec_show_hierarchy_plots` - Boolean indicating whether we should generate and show figures of the hierarchy defined by the specifications as part of the documentation +* `spec_file_per_type` - Boolean indicating whether we should generate separate .inc reStructuredText for each neurodata_type (True) +or should all text be added to the main file (False) +* `spec_show_subgroups_in_tables` - Should subgroups of the main groups be rendered in the table as well. Usually this is disabled since groups are rendered as separate sections in the text +* `spec_appreviate_main_object_doc_in_tables` - Abbreviate the documentation of the main object for which a table is rendered in the table. This is commonly set to True as doc of the main object is already rendered as the main intro for the section describing the object +* `spec_show_title_for_tables` - Add a title for the table showing the specifications. +* `spec_show_subgroups_in_seperate_table` - Should top-level subgroups be listed in a separate table or as part of the main dataset and attributes table +* `spec_table_depth_char` - Char to be used as prefix to indicate the depth of an object in the specification hierarchy. NOTE: The char used should be supported by LaTeX. +* `spec_add_latex_clearpage_after_ndt_sections` - Add a LaTeX clearpage after each main section describing a neurodata_type. This helps in LaTeX to keep the ordering of figures, tables, and code blocks consistent in particular when the hierarchy_plots are included. +* `spec_resolve_type_inc` - Resolve includes to always show the full list of objects that are part of a type (True) or to show only the parts that are actually new to a current type while only linking to base types (False) + +In addition, the location of the input format specification can be customized as follows: + +* `spec_input_spec_dir` - Directory where the YAML files for the namespace to be documented are located +* `spec_input_namespace_filename` - Name of the YAML file with the specification of the Namespace to be documented +* `spec_input_default_namespace` - Name of the default namespace in the file + +Finally, the name and location of output files can be customized as follows: + +* `spec_output_dir` - Directory where the autogenerated files should be stored +* `spec_output_master_filename` - Name of the master .rst file that includes all the autogenerated docs +* `spec_output_doc_filename` - Name of the file where the main documentation goes +* `spec_output_src_filename` - Name of the file where the sources of the format spec go. NOTE: This file is only generated if `spec_generate_src_file` is enabled +* `spec_output_doc_type_hierarchy_filename` - Name of the file containing the type hierarchy. (Included in `spec_output_doc_filename`) + +In the regular Sphinx `source/conf.py` file, we can then also set: + +* `spec_doc_rebuild_always` - Boolean to define whether to always rebuild the source docs from YAML when doing a regular build of the sources (e.g., via `make html`) even if the folder with the source files already exists diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..747ffb7 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/source/_static/theme_overrides.css b/docs/source/_static/theme_overrides.css new file mode 100644 index 0000000..63ee6cc --- /dev/null +++ b/docs/source/_static/theme_overrides.css @@ -0,0 +1,13 @@ +/* override table width restrictions */ +@media screen and (min-width: 767px) { + + .wy-table-responsive table td { + /* !important prevents the common CSS stylesheets from overriding + this as on RTD they are loaded after this stylesheet */ + white-space: normal !important; + } + + .wy-table-responsive { + overflow: visible !important; + } +} diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000..14a849d --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,112 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = 'ndx-hed' +copyright = '2023, Ryan Ly, Oliver Ruebel, Kay Robbins' +author = 'Ryan Ly, Oliver Ruebel, Kay Robbins' + +version = '0.1.0' +release = 'alpha' + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + 'sphinx.ext.ifconfig', + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', +] + +templates_path = ['_templates'] +exclude_patterns = [] + +language = 'en' + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = 'alabaster' +html_static_path = ['_static'] + +# -- Options for intersphinx extension --------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#configuration + +intersphinx_mapping = { + 'python': ('https://docs.python.org/3', None), +} + + +############################################################################ +# CUSTOM CONFIGURATIONS ADDED BY THE NWB TOOL FOR GENERATING FORMAT DOCS +########################################################################### + +import sphinx_rtd_theme # noqa: E402 +import textwrap # noqa: E402 + +# -- Options for intersphinx --------------------------------------------- +intersphinx_mapping.update({ + 'core': ('https://nwb-schema.readthedocs.io/en/latest/', None), + 'hdmf-common': ('https://hdmf-common-schema.readthedocs.io/en/latest/', None), +}) + +# -- Generate sources from YAML--------------------------------------------------- +# Always rebuild the source docs from YAML even if the folder with the source files already exists +spec_doc_rebuild_always = True + + +def run_doc_autogen(_): + # Execute the autogeneration of Sphinx format docs from the YAML sources + import sys + import os + conf_file_dir = os.path.dirname(os.path.abspath(__file__)) + sys.path.append(conf_file_dir) # Need so that generate format docs can find the conf_doc_autogen file + from conf_doc_autogen import spec_output_dir + + if spec_doc_rebuild_always or not os.path.exists(spec_output_dir): + sys.path.append('./docs') # needed to enable import of generate_format docs + from hdmf_docutils.generate_format_docs import main as generate_docs + generate_docs() + + +def setup(app): + app.connect('builder-inited', run_doc_autogen) + # overrides for wide tables in RTD theme + try: + app.add_css_file("theme_overrides.css") # Used by newer Sphinx versions + except AttributeError: + app.add_stylesheet("theme_overrides.css") # Used by older version of Sphinx + +# -- Customize sphinx settings +numfig = True +autoclass_content = 'both' +autodoc_docstring_signature = True +autodoc_member_order = 'bysource' +add_function_parentheses = False + + +# -- HTML sphinx options +html_theme = "sphinx_rtd_theme" +html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +# LaTeX Sphinx options +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + 'preamble': textwrap.dedent( + ''' + \\setcounter{tocdepth}{3} + \\setcounter{secnumdepth}{6} + \\usepackage{enumitem} + \\setlistdepth{100} + '''), +} diff --git a/docs/source/conf_doc_autogen.py b/docs/source/conf_doc_autogen.py new file mode 100644 index 0000000..232ffd1 --- /dev/null +++ b/docs/source/conf_doc_autogen.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Configuration file for generating sources for the format documentation from the YAML specification files + +import os + +# -- Input options for the specification files to be used ----------------------- + +# Directory where the YAML files for the namespace to be documented are located +spec_input_spec_dir = '../spec' + +# Name of the YAML file with the specification of the Namespace to be documented +spec_input_namespace_filename = 'ndx-hed.namespace.yaml' + +# Name of the default namespace in the file +spec_input_default_namespace = 'ndx-hed' + + +# -- Options for customizing the locations of output files + +# Directory where the autogenerated files should be stored +spec_output_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "_format_auto_docs") + +# Name of the master rst file that includes all the autogenerated docs +spec_output_master_filename = 'format_spec_main.inc' + +# Name of the file where the main documentation goes +spec_output_doc_filename = 'format_spec_doc.inc' + +# Name of the file where the sources of the format spec go. NOTE: This file is only generated if +# spec_generate_src_file is enabled +spec_output_src_filename = 'format_spec_sources.inc' + +# Name of the file containing the type hierarchy. (Included in spec_output_doc_filename) +spec_output_doc_type_hierarchy_filename = 'format_spec_type_hierarchy.inc' + +# Clean up the output directory before we build if the git hash is out of date +spec_clean_output_dir_if_old_git_hash = True + +# Do not rebuild the format sources if we have previously build the sources and the git hash matches +spec_skip_doc_autogen_if_current_git_hash = False + + +# -- Options for the generation of the documentation from source ---------------- + +# Should the YAML sources be included for the different modules +spec_show_yaml_src = True + +# Show figure of the hierarchy of objects defined by the spec +spec_show_hierarchy_plots = True + +# Should the sources of the neurodata_types (YAML) be rendered in a separate section (True) or +# in the same location as the base documentation +spec_generate_src_file = True + +# Should separate .inc reStructuredText files be generated for each neurodata_type (True) +# or should all text be added to the main file +spec_file_per_type = True + +# Should top-level subgroups be listed in a separate table or as part of the main dataset and attributes table +spec_show_subgroups_in_seperate_table = True + +# Abbreviate the documentation of the main object for which a table is rendered in the table. +# This is commonly set to True as doc of the main object is alrready rendered as the main intro for the +# section describing the object +spec_appreviate_main_object_doc_in_tables = True + +# Show a title for the tables +spec_show_title_for_tables = True + +# Char to be used as prefix to indicate the depth of an object in the specification hierarchy +spec_table_depth_char = '.' # '→' '.' + +# Add a LaTeX clearpage after each main section describing a neurodata_type. This helps in LaTeX to keep the ordering +# of figures, tables, and code blocks consistent in particular when the hierarchy_plots are included +spec_add_latex_clearpage_after_ndt_sections = True + +# Resolve includes to always show the full list of objects that are part of a type (True) +# or to show only the parts that are actually new to a current type while only linking to base types +spec_resolve_type_inc = False + +# Default type map to be used. This is the type map where dependent namespaces are stored. In the case of +# NWB this is spec_default_type_map = pynwb.get_type_map() +import pynwb # noqa: E402 +spec_default_type_map = pynwb.get_type_map() + +# Default specification classes for groups datasets and namespaces. In the case of NWB these are the NWB-specfic +# spec classes. In the general cases these are the spec classes from HDMF +spec_group_spec_cls = pynwb.spec.NWBGroupSpec +spec_dataset_spec_cls = pynwb.spec.NWBDatasetSpec +spec_namespace_spec_cls = pynwb.spec.NWBNamespace diff --git a/docs/source/credits.rst b/docs/source/credits.rst new file mode 100644 index 0000000..da5cda1 --- /dev/null +++ b/docs/source/credits.rst @@ -0,0 +1,21 @@ +******* +Credits +******* + +.. note:: + Add the credits for your extension here + +Acknowledgments +=============== + + +Authors +======= + + +***** +Legal +***** + +License +======= diff --git a/docs/source/description.rst b/docs/source/description.rst new file mode 100644 index 0000000..6f8553e --- /dev/null +++ b/docs/source/description.rst @@ -0,0 +1,5 @@ +Overview +======== + +.. note:: + Add the description of your extension here diff --git a/docs/source/format.rst b/docs/source/format.rst new file mode 100644 index 0000000..199e903 --- /dev/null +++ b/docs/source/format.rst @@ -0,0 +1,12 @@ + +.. _ndx-hed: + +******* +ndx-hed +******* + +Version |release| |today| + +.. .. contents:: + +.. include:: _format_auto_docs/format_spec_main.inc diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000..bc93840 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,30 @@ +Specification for the ndx-hed extension +======================================= + +.. toctree:: + :numbered: + :maxdepth: 8 + :caption: Table of Contents + + description + +.. toctree:: + :numbered: + :maxdepth: 3 + :caption: Extension Specification + + format + +.. toctree:: + :maxdepth: 2 + :caption: History & Legal + + release_notes + credits + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst new file mode 100644 index 0000000..39ccd1c --- /dev/null +++ b/docs/source/release_notes.rst @@ -0,0 +1,5 @@ +Release Notes +============= + +.. note:: + Add the release notes of your extension here diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..0644760 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,114 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "ndx-hed" +version = "0.1.0" +authors = [ + { name="Ryan Ly", email="rly@lbl.gov" }, + { name="Oliver Ruebel", email="oruebel@lbl.gov" }, + { name="Kay Robbins", email="kay.robbins@utsa.edu" }, + { name="Ian Callanan", email="ianrcallanan@gmail.com"} +] +description = "NWB extension for HED data" +readme = "README.md" +requires-python = ">=3.8" +license = {text = "BSD-3"} +classifiers = [ + # TODO: add classifiers before release + "Programming Language :: Python", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", +] +keywords = [ + 'NeurodataWithoutBorders', + 'NWB', + 'nwb-extension', + 'ndx-extension', +] +dependencies = [ + "pynwb>=2.5.0", + "hdmf>=3.10.0", +] + +# TODO: add URLs before release +[project.urls] +"Homepage" = "https://github.com/hed-standard/ndx-hed" +# "Documentation" = "https://package.readthedocs.io/" +"Bug Tracker" = "https://github.com/hed-standard/ndx-hed/issues" +"Discussions" = "https://github.com/hed-standard/ndx-hed/discussions" +"Changelog" = "https://github.com/hed-standard/ndx-hed/CHANGELOG.md" + +[tool.hatch.build] +include = [ + "src/pynwb", + "spec/ndx-hed.extensions.yaml", + "spec/ndx-hed.namespace.yaml", +] +exclude = [ + "src/pynwb/tests", +] + +[tool.hatch.build.targets.wheel] +packages = [ + "src/pynwb/ndx_hed", + "spec" +] + +[tool.hatch.build.targets.wheel.sources] +"spec" = "ndx_hed/spec" + +[tool.hatch.build.targets.sdist] +include = [ + "src/pynwb", + "spec/ndx-hed.extensions.yaml", + "spec/ndx-hed.namespace.yaml", + "docs", +] +exclude = [] + +[tool.pytest.ini_options] +addopts = "--cov --cov-report html" + +[tool.codespell] +skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs,.tox,hdmf-common-schema,./docs/_build/*,*.ipynb" + +[tool.coverage.run] +branch = true +source = ["src/pynwb"] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "@abstract" +] + +[tool.black] +line-length = 120 +preview = true +exclude = ".git|.mypy_cache|.tox|.venv|venv|.ipynb_checkpoints|_build/|dist/|__pypackages__|.ipynb|docs/" + +[tool.ruff] +select = ["E", "F", "T100", "T201", "T203"] +exclude = [ + ".git", + ".tox", + "__pycache__", + "build/", + "dist/", +] +line-length = 120 + +[tool.ruff.per-file-ignores] +"src/spec/create_extension_spec.py" = ["T201"] + +[tool.ruff.mccabe] +max-complexity = 17 \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..7655a0a --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,15 @@ +# pinned dependencies to reproduce an entire development environment to +# run tests, check code style, and generate documentation +black==23.9.1 +codespell==2.2.6 +coverage==7.3.2 +hdmf==3.10.0 +hdmf-docutils==0.4.5 +pre-commit==3.4.0 +pynwb==2.5.0 +pytest==7.4.2 +pytest-cov==4.1.0 +python-dateutil==2.8.2 +pytest-subtests==0.6.0 +ruff==0.0.292 +tox==4.11.3 diff --git a/requirements-min.txt b/requirements-min.txt new file mode 100644 index 0000000..695410a --- /dev/null +++ b/requirements-min.txt @@ -0,0 +1,5 @@ +# minimum versions of package dependencies for installation +# these should match the minimum versions specified in pyproject.toml +# NOTE: it may be possible to relax these minimum requirements +pynwb==2.5.0 +hdmf==3.10.0 diff --git a/spec/ndx-hed.extensions.yaml b/spec/ndx-hed.extensions.yaml new file mode 100644 index 0000000..c9ea583 --- /dev/null +++ b/spec/ndx-hed.extensions.yaml @@ -0,0 +1,19 @@ +datasets: +- neurodata_type_def: HedAnnotations + neurodata_type_inc: VectorData + dtype: text + doc: An extension of VectorData for Hierarchical Event Descriptor (HED) tags. If + HED tags are used, the HED schema version must be specified in the NWB file using + the HedMetadata type. +groups: +- neurodata_type_def: HedMetadata + neurodata_type_inc: LabMetaData + name: HedMetadata + doc: An extension of LabMetaData to store the Hierarchical Event Descriptor (HED) + schema version. TODO When merged with core, this will no longer inherit from LabMetaData + but from NWBContainer and be placed optionally in /general. + attributes: + - name: hed_schema_version + dtype: text + doc: The version of the HED schema used to validate the HED tags, e.g., '8.2.0'. + Required if HED tags are used in the NWB file. diff --git a/spec/ndx-hed.namespace.yaml b/spec/ndx-hed.namespace.yaml new file mode 100644 index 0000000..0b278bb --- /dev/null +++ b/spec/ndx-hed.namespace.yaml @@ -0,0 +1,17 @@ +namespaces: +- author: + - Ryan Ly + - Oliver Ruebel + - Kay Robbins + - Ian Callanan + contact: + - rly@lbl.gov + - oruebel@lbl.gov + - kay.robbins@utsa.edu + - ianrcallanan@gmail.com + doc: NWB extension for HED data + name: ndx-hed + schema: + - namespace: core + - source: ndx-hed.extensions.yaml + version: 0.1.0 diff --git a/src/matnwb/README.md b/src/matnwb/README.md new file mode 100644 index 0000000..e69de29 diff --git a/src/pynwb/README.md b/src/pynwb/README.md new file mode 100644 index 0000000..e69de29 diff --git a/src/pynwb/ndx_hed/__init__.py b/src/pynwb/ndx_hed/__init__.py new file mode 100644 index 0000000..f00087d --- /dev/null +++ b/src/pynwb/ndx_hed/__init__.py @@ -0,0 +1,29 @@ +import os +from pynwb import load_namespaces, get_class + +try: + from importlib.resources import files +except ImportError: + # TODO: Remove when python 3.9 becomes the new minimum + from importlib_resources import files + +# Get path to the namespace.yaml file with the expected location when installed not in editable mode +__location_of_this_file = files(__name__) +__spec_path = __location_of_this_file / "spec" / "ndx-hed.namespace.yaml" + +# If that path does not exist, we are likely running in editable mode. Use the local path instead +if not os.path.exists(__spec_path): + __spec_path = __location_of_this_file.parent.parent.parent / "spec" / "ndx-hed.namespace.yaml" + +# Load the namespace +load_namespaces(str(__spec_path)) + +# TODO: Define your classes here to make them accessible at the package level. +# Either have PyNWB generate a class from the spec using `get_class` as shown +# below or write a custom class and register it using the class decorator +# `@register_class("TetrodeSeries", "ndx-hed")` +HedAnnotations = get_class("HedAnnotations", "ndx-hed") +HedMetadata = get_class("HedMetadata", "ndx-hed") + +# Remove these functions from the package +del load_namespaces, get_class diff --git a/src/pynwb/tests/__init__.py b/src/pynwb/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/pynwb/tests/test_hed_tags.py b/src/pynwb/tests/test_hed_tags.py new file mode 100644 index 0000000..d6f5b7a --- /dev/null +++ b/src/pynwb/tests/test_hed_tags.py @@ -0,0 +1,151 @@ +"""Unit and integration tests for ndx-hed.""" +from pynwb import NWBHDF5IO # , NWBFile +from pynwb.testing.mock.file import mock_NWBFile +from pynwb.testing import TestCase, remove_test_file # , NWBH5IOFlexMixin + +from ndx_hed import HedAnnotations, HedMetadata + + +class TestHedMetadataConstructor(TestCase): + """Simple unit test for creating a HedMetadata.""" + + def test_constructor(self): + """Test setting HedNWBFile values using the constructor.""" + hed_metadata = HedMetadata(hed_schema_version="8.2.0") + assert hed_metadata.hed_schema_version == "8.2.0" + + def test_add_to_nwbfile(self): + nwbfile = mock_NWBFile() + hed_metadata = HedMetadata(hed_schema_version="8.2.0") + nwbfile.add_lab_meta_data(hed_metadata) + assert nwbfile.get_lab_meta_data("HedMetadata") is hed_metadata + + +class TestHedNWBFileSimpleRoundtrip(TestCase): + """Simple roundtrip test for HedNWBFile.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Create a HedMetadata, write it to file, read the file, and test that it matches the original HedNWBFile. + """ + nwbfile = mock_NWBFile() + hed_metadata = HedMetadata(hed_schema_version="8.2.0") + nwbfile.add_lab_meta_data(hed_metadata) + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + read_hed_metadata = read_nwbfile.get_lab_meta_data("HedMetadata") + assert isinstance(read_hed_metadata, HedMetadata) + assert read_hed_metadata.hed_schema_version == "8.2.0" + + +# class TestHedNWBFileRoundtripPyNWB(NWBH5IOFlexMixin, TestCase): +# """Complex, more complete roundtrip test for HedNWBFile using pynwb.testing infrastructure.""" + +# def getContainerType(self): +# return "HedNWBFile" + +# def addContainer(self): +# self.nwbfile = HedNWBFile( +# session_description="session_description", +# identifier=str(uuid4()), +# session_start_time=datetime(1970, 1, 1, tzinfo=tzlocal()), +# hed_schema_version="8.2.0", +# ) + +# def getContainer(self, nwbfile: NWBFile): +# return nwbfile + + +class TestHedTagsConstructor(TestCase): + """Simple unit test for creating a HedTags.""" + + def test_constructor(self): + """Test setting HedTags values using the constructor.""" + hed_annotations = HedAnnotations( + name="name", + description="description", + data=["animal_target, correct_response", "animal_target, incorrect_response"], + ) + assert hed_annotations.name == "name" + assert hed_annotations.description == "description" + assert hed_annotations.data == ["animal_target, correct_response", "animal_target, incorrect_response"] + + def test_add_to_trials_table(self): + """Test adding HedTags column and data to a trials table.""" + nwbfile = mock_NWBFile() + hed_metadata = HedMetadata(hed_schema_version="8.2.0") + nwbfile.add_lab_meta_data(hed_metadata) + + nwbfile.add_trial_column("HED", "HED annotations for each trial", col_cls=HedAnnotations) + nwbfile.add_trial(start_time=0.0, stop_time=1.0, HED="animal_target, correct_response") + nwbfile.add_trial(start_time=2.0, stop_time=3.0, HED="animal_target, incorrect_response") + + assert isinstance(nwbfile.trials["HED"], HedAnnotations) + assert nwbfile.trials["HED"][0] == "animal_target, correct_response" + assert nwbfile.trials["HED"][1] == "animal_target, incorrect_response" + + +class TestHedTagsSimpleRoundtrip(TestCase): + """Simple roundtrip test for HedTags.""" + + def setUp(self): + self.path = "test.nwb" + + def tearDown(self): + remove_test_file(self.path) + + def test_roundtrip(self): + """ + Add a HedTags to an NWBFile, write it to file, read the file, and test that the HedTags from the + file matches the original HedTags. + """ + nwbfile = mock_NWBFile() + hed_metadata = HedMetadata(hed_schema_version="8.2.0") + nwbfile.add_lab_meta_data(hed_metadata) + + nwbfile.add_trial_column("HED", "HED annotations for each trial", col_cls=HedAnnotations) + nwbfile.add_trial(start_time=0.0, stop_time=1.0, HED="animal_target, correct_response") + nwbfile.add_trial(start_time=2.0, stop_time=3.0, HED="animal_target, incorrect_response") + + with NWBHDF5IO(self.path, mode="w") as io: + io.write(nwbfile) + + with NWBHDF5IO(self.path, mode="r", load_namespaces=True) as io: + read_nwbfile = io.read() + read_hed_annotations = read_nwbfile.trials["HED"] + assert isinstance(read_hed_annotations, HedAnnotations) + # read_nwbfile.trials["hed_tags"][0] is read as a numpy array + assert read_hed_annotations[0] == "animal_target, correct_response" + assert read_hed_annotations[1] == "animal_target, incorrect_response" + + +# class TestHedTagsRoundtripPyNWB(NWBH5IOFlexMixin, TestCase): +# """Complex, more complete roundtrip test for HedTags using pynwb.testing infrastructure.""" + +# def getContainerType(self): +# return "HedTags" + +# def addContainer(self): +# self.nwbfile = HedNWBFile( +# session_description="session_description", +# identifier=str(uuid4()), +# session_start_time=datetime(1970, 1, 1, tzinfo=tzlocal()), +# hed_schema_version="8.2.0", +# ) + +# self.nwbfile.add_trial_column("hed_tags", "HED tags for each trial", col_cls=HedTags, index=True) +# self.nwbfile.add_trial(start_time=0.0, stop_time=1.0, hed_tags=["animal_target", "correct_response"]) +# self.nwbfile.add_trial(start_time=2.0, stop_time=3.0, hed_tags=["animal_target", "incorrect_response"]) + +# def getContainer(self, nwbfile: NWBFile): +# return nwbfile.trials["hed_tags"].target diff --git a/src/spec/create_extension_spec.py b/src/spec/create_extension_spec.py new file mode 100644 index 0000000..b4b4b7b --- /dev/null +++ b/src/spec/create_extension_spec.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +import os.path + +from pynwb.spec import NWBNamespaceBuilder, export_spec, NWBDatasetSpec, NWBGroupSpec, NWBAttributeSpec + +# TODO: import other spec classes as needed +# from pynwb.spec import , NWBLinkSpec, NWBDtypeSpec, NWBRefSpec + + +def main(): + # these arguments were auto-generated from your cookiecutter inputs + ns_builder = NWBNamespaceBuilder( + name="""ndx-hed""", + version="""0.1.0""", + doc="""NWB extension for HED data""", + author=[ + "Ryan Ly", + "Oliver Ruebel", + "Kay Robbins", + "Ian Callanan", + ], + contact=[ + "rly@lbl.gov", + "oruebel@lbl.gov", + "kay.robbins@utsa.edu", + "ianrcallanan@gmail.com", + ], + ) + + # TODO: specify either the neurodata types that are used by the extension + # or the namespaces that contain the neurodata types used. Including the + # namespace will include all neurodata types in that namespace. + # This is similar to specifying the Python modules that need to be imported + # to use your new data types. + # ns_builder.include_type("ElectricalSeries", namespace="core") + ns_builder.include_namespace("core") + + # TODO: define your new data types + # see https://pynwb.readthedocs.io/en/latest/extensions.html#extending-nwb + # for more information + hed_annotations = NWBDatasetSpec( + neurodata_type_def="HedAnnotations", + neurodata_type_inc="VectorData", + doc=("An extension of VectorData for Hierarchical Event Descriptor (HED) tags. If HED tags are used, " + "the HED schema version must be specified in the NWB file using the HedMetadata type."), + dtype="text", + ) + + hed_metadata = NWBGroupSpec( + neurodata_type_def="HedMetadata", + neurodata_type_inc="LabMetaData", + name="HedMetadata", # fixed name + doc=("An extension of LabMetaData to store the Hierarchical Event Descriptor (HED) schema version. " + "TODO When merged with core, " + "this will no longer inherit from LabMetaData but from NWBContainer and be placed " + "optionally in /general."), + attributes=[ + NWBAttributeSpec( + name="hed_schema_version", + doc=( + "The version of the HED schema used to validate the HED tags, e.g., '8.2.0'. " + "Required if HED tags are used in the NWB file." + ), + dtype="text", + required=True, + ), + ], + ) + + # TODO: add all of your new data types to this list + new_data_types = [hed_annotations, hed_metadata] + + # export the spec to yaml files in the spec folder + output_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "spec")) + export_spec(ns_builder, new_data_types, output_dir) + print("Spec files generated. Please make sure to run `pip install .` to load the changes.") + + +if __name__ == "__main__": + # usage: python create_extension_spec.py + main()