From 9a6148cb7c9dc01a8e22218723b834b4a218a9ac Mon Sep 17 00:00:00 2001 From: rly Date: Mon, 9 Sep 2024 16:57:19 -0700 Subject: [PATCH] Update to use latest template with gh actions --- .github/workflows/check_external_links.yml | 32 ++++ .github/workflows/codespell.yml | 11 +- .github/workflows/ruff.yml | 14 ++ .github/workflows/run_all_tests.yml | 180 +++++++++++++++++++++ .github/workflows/run_coverage.yml | 57 +++++++ .github/workflows/validate_schema.yml | 22 +++ .pre-commit-config.yaml | 8 +- LICENSE.txt | 2 +- NEXTSTEPS.md | 129 ++++++++++----- docs/make.bat | 70 ++++---- docs/source/_static/theme_overrides.css | 13 ++ docs/source/conf.py | 108 +++++-------- pyproject.toml | 44 ++--- requirements-dev.txt | 23 ++- requirements-min.txt | 5 +- spec/ndx-events.extensions.yaml | 10 +- src/pynwb/ndx_events/__init__.py | 5 +- src/spec/create_extension_spec.py | 2 +- 18 files changed, 534 insertions(+), 201 deletions(-) create mode 100644 .github/workflows/check_external_links.yml create mode 100644 .github/workflows/ruff.yml create mode 100644 .github/workflows/run_all_tests.yml create mode 100644 .github/workflows/run_coverage.yml create mode 100644 .github/workflows/validate_schema.yml create mode 100644 docs/source/_static/theme_overrides.css diff --git a/.github/workflows/check_external_links.yml b/.github/workflows/check_external_links.yml new file mode 100644 index 0000000..9dd1a84 --- /dev/null +++ b/.github/workflows/check_external_links.yml @@ -0,0 +1,32 @@ +name: Check Sphinx external links +on: + push: + schedule: + - cron: '0 5 * * 0' # once every Sunday at midnight ET + workflow_dispatch: + +jobs: + check-external-links: + name: Check for broken Sphinx external links + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 # tags are required to determine the version + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Install Sphinx dependencies and package + run: | + python -m pip install --upgrade pip + python -m pip install -r requirements-dev.txt + python -m pip install . + + - name: Check Sphinx external links + run: | + cd docs # run_doc_autogen assumes spec is found in ../spec/ + sphinx-build -b linkcheck ./source ./test_build diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml index 243ba8c..314b085 100644 --- a/.github/workflows/codespell.yml +++ b/.github/workflows/codespell.yml @@ -1,19 +1,14 @@ ---- name: Codespell - on: push: - branches: [main] - pull_request: - branches: [main] + workflow_dispatch: jobs: codespell: name: Check for spelling errors runs-on: ubuntu-latest - steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Codespell - uses: codespell-project/actions-codespell@v1 + uses: codespell-project/actions-codespell@v2 diff --git a/.github/workflows/ruff.yml b/.github/workflows/ruff.yml new file mode 100644 index 0000000..9b4f05d --- /dev/null +++ b/.github/workflows/ruff.yml @@ -0,0 +1,14 @@ +name: Ruff +on: + push: + workflow_dispatch: + +jobs: + ruff: + name: Check for style errors and common problems + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Ruff + uses: chartboost/ruff-action@v1 diff --git a/.github/workflows/run_all_tests.yml b/.github/workflows/run_all_tests.yml new file mode 100644 index 0000000..f2600fd --- /dev/null +++ b/.github/workflows/run_all_tests.yml @@ -0,0 +1,180 @@ +name: Run all tests +on: + push: + schedule: + - cron: '0 5 * * 0' # once every Sunday at midnight ET + workflow_dispatch: + +jobs: + run-all-tests: + name: ${{ matrix.name }} + runs-on: ${{ matrix.os }} + defaults: + run: + shell: bash + concurrency: + group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.name }} + cancel-in-progress: true + strategy: + fail-fast: false + matrix: + include: + - { name: linux-python3.8-minimum , requirements: minimum , python-ver: "3.8" , os: ubuntu-latest } + - { name: linux-python3.8 , requirements: pinned , python-ver: "3.8" , os: ubuntu-latest } + - { name: linux-python3.9 , requirements: pinned , python-ver: "3.9" , os: ubuntu-latest } + - { name: linux-python3.10 , requirements: pinned , python-ver: "3.10", os: ubuntu-latest } + - { name: linux-python3.11 , requirements: pinned , python-ver: "3.11", os: ubuntu-latest } + - { name: linux-python3.12 , requirements: pinned , python-ver: "3.12", os: ubuntu-latest } + - { name: linux-python3.12-upgraded , requirements: upgraded , python-ver: "3.12", os: ubuntu-latest } + - { name: windows-python3.8-minimum , requirements: minimum , python-ver: "3.8" , os: windows-latest } + - { name: windows-python3.8 , requirements: pinned , python-ver: "3.8" , os: windows-latest } + - { name: windows-python3.9 , requirements: pinned , python-ver: "3.9" , os: windows-latest } + - { name: windows-python3.10 , requirements: pinned , python-ver: "3.10", os: windows-latest } + - { name: windows-python3.11 , requirements: pinned , python-ver: "3.11", os: windows-latest } + - { name: windows-python3.12 , requirements: pinned , python-ver: "3.12", os: windows-latest } + - { name: windows-python3.12-upgraded , requirements: upgraded , python-ver: "3.12", os: windows-latest } + - { name: macos-python3.8-minimum , requirements: minimum , python-ver: "3.8" , os: macos-latest } + - { name: macos-python3.8 , requirements: pinned , python-ver: "3.8" , os: macos-latest } + - { name: macos-python3.9 , requirements: pinned , python-ver: "3.9" , os: macos-latest } + - { name: macos-python3.10 , requirements: pinned , python-ver: "3.10", os: macos-latest } + - { name: macos-python3.11 , requirements: pinned , python-ver: "3.11", os: macos-latest } + - { name: macos-python3.12 , requirements: pinned , python-ver: "3.12", os: macos-latest } + - { name: macos-python3.12-upgraded , requirements: upgraded , python-ver: "3.12", os: macos-latest } + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 # tags are required to determine the version + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-ver }} + + - name: Install build dependencies + run: | + python -m pip install --upgrade pip + python -m pip list + python -m pip check + + - name: Install run requirements (minimum) + if: ${{ matrix.requirements == 'minimum' }} + run: | + python -m pip install -r requirements-min.txt -r requirements-dev.txt + python -m pip install . + + - name: Install run requirements (pinned) + if: ${{ matrix.requirements == 'pinned' }} + run: | + python -m pip install -r requirements-dev.txt + python -m pip install . + + - name: Install run requirements (upgraded) + if: ${{ matrix.requirements == 'upgraded' }} + run: | + python -m pip install -r requirements-dev.txt + # force upgrade of all dependencies to latest versions within allowed range + python -m pip install -U --upgrade-strategy eager . + + - name: Run tests + run: | + pytest -v + + - name: Build wheel and source distribution + run: | + python -m pip install --upgrade build + python -m build + ls -1 dist + + - name: Test installation from a wheel (POSIX) + if: ${{ matrix.os != 'windows-latest' }} + run: | + python -m venv test-wheel-env + source test-wheel-env/bin/activate + python -m pip install dist/*-none-any.whl + python -c "import ndx_events" + + - name: Test installation from a wheel (windows) + if: ${{ matrix.os == 'windows-latest' }} + run: | + python -m venv test-wheel-env + test-wheel-env/Scripts/activate.bat + python -m pip install dist/*-none-any.whl + python -c "import ndx_events" + + run-all-tests-on-conda: + name: ${{ matrix.name }} + runs-on: ubuntu-latest + defaults: + run: + shell: bash -l {0} # needed for conda environment to work + concurrency: + group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.name }} + cancel-in-progress: true + strategy: + fail-fast: false + matrix: + include: + - { name: conda-linux-python3.8-minimum , requirements: minimum , python-ver: "3.8" , os: ubuntu-latest } + - { name: conda-linux-python3.8 , requirements: pinned , python-ver: "3.8" , os: ubuntu-latest } + - { name: conda-linux-python3.9 , requirements: pinned , python-ver: "3.9" , os: ubuntu-latest } + - { name: conda-linux-python3.10 , requirements: pinned , python-ver: "3.10", os: ubuntu-latest } + - { name: conda-linux-python3.11 , requirements: pinned , python-ver: "3.11", os: ubuntu-latest } + - { name: conda-linux-python3.12 , requirements: pinned , python-ver: "3.12", os: ubuntu-latest } + - { name: conda-linux-python3.12-upgraded , requirements: upgraded , python-ver: "3.12", os: ubuntu-latest } + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 # tags are required to determine the version + + - name: Set up Conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + auto-activate-base: true + activate-environment: true + python-version: ${{ matrix.python-ver }} + + - name: Install build dependencies + run: | + conda config --set always_yes yes --set changeps1 no + conda info + conda config --show-sources + conda list --show-channel-urls + + - name: Install run requirements (minimum) + if: ${{ matrix.requirements == 'minimum' }} + run: | + python -m pip install -r requirements-min.txt -r requirements-dev.txt + python -m pip install . + + - name: Install run requirements (pinned) + if: ${{ matrix.requirements == 'pinned' }} + run: | + python -m pip install -r requirements-dev.txt + python -m pip install . + + - name: Install run requirements (upgraded) + if: ${{ matrix.requirements == 'upgraded' }} + run: | + python -m pip install -r requirements-dev.txt + # force upgrade of all dependencies to latest versions within allowed range + python -m pip install -U --upgrade-strategy eager . + + - name: Run tests + run: | + pytest -v + + - name: Build wheel and source distribution + run: | + python -m pip install --upgrade build + python -m build + ls -1 dist + + - name: Test installation from a wheel (POSIX) + run: | + python -m venv test-wheel-env + source test-wheel-env/bin/activate + python -m pip install dist/*-none-any.whl + python -c "import ndx_events" diff --git a/.github/workflows/run_coverage.yml b/.github/workflows/run_coverage.yml new file mode 100644 index 0000000..5aa6f9e --- /dev/null +++ b/.github/workflows/run_coverage.yml @@ -0,0 +1,57 @@ +name: Run code coverage +on: + push: + workflow_dispatch: + +jobs: + run-coverage: + name: ${{ matrix.os }} + runs-on: ${{ matrix.os }} + # TODO handle forks + # run pipeline on either a push event or a PR event on a fork + # if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name + defaults: + run: + shell: bash + concurrency: + group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.os }} + cancel-in-progress: true + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + env: # used by codecov-action + OS: ${{ matrix.os }} + PYTHON: '3.12' + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 # tags are required to determine the version + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install -r requirements-dev.txt + + - name: Install package + run: | + python -m pip install . + python -m pip list + + - name: Run tests and generate coverage report + run: | + pytest --cov --cov-report=xml --cov-report=term # codecov uploader requires xml format + + # TODO uncomment after setting up repo on codecov.io and adding token + # - name: Upload coverage to Codecov + # uses: codecov/codecov-action@v4 + # with: + # fail_ci_if_error: true + # file: ./coverage.xml + # env: + # CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/validate_schema.yml b/.github/workflows/validate_schema.yml new file mode 100644 index 0000000..33e805f --- /dev/null +++ b/.github/workflows/validate_schema.yml @@ -0,0 +1,22 @@ +name: Validate schema + +on: [push, pull_request, workflow_dispatch] + +jobs: + validate: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.12 + uses: actions/setup-python@v4 + with: + python-version: "3.12" + - name: Install HDMF + run: | + pip install hdmf + - name: Download latest nwb schema language specification + run: | + curl -L https://raw.githubusercontent.com/NeurodataWithoutBorders/nwb-schema/dev/nwb.schema.json -o nwb.schema.json + - name: Validate schema specification + run: | + validate_hdmf_spec spec -m nwb.schema.json diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 902b3b2..827eeb6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ # NOTE: run `pre-commit autoupdate` to update hooks to latest version repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-yaml - id: end-of-file-fixer @@ -13,15 +13,15 @@ repos: args: [--pytest-test-first] - id: check-docstring-first - repo: https://github.com/psf/black - rev: 23.12.0 + rev: 24.4.2 hooks: - id: black - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.8 + rev: v0.4.10 hooks: - id: ruff - repo: https://github.com/codespell-project/codespell - rev: v2.2.6 + rev: v2.3.0 hooks: - id: codespell additional_dependencies: diff --git a/LICENSE.txt b/LICENSE.txt index 8850436..f803560 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,6 +1,6 @@ BSD 3-Clause License -Copyright (c) 2023, Ryan Ly +Copyright (c) 2024, Ryan Ly All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/NEXTSTEPS.md b/NEXTSTEPS.md index 454ad7e..0377ffa 100644 --- a/NEXTSTEPS.md +++ b/NEXTSTEPS.md @@ -1,32 +1,67 @@ -# Next Steps for ndx-events Extension for NWB:N -## Creating Your Extension -1. In a terminal, change directory into the new ndx-events directory. +# Next Steps for ndx-events Extension for NWB + +## Creating Your Extension -2. Add any packages required by your extension to `requirements.txt` and `setup.py`. +1. In a terminal, change directory into the new ndx-events directory: `cd ndx-events` -3. Run `python -m pip install -r requirements.txt` to install the `pynwb` package -and any other packages required by your extension. +2. Add any packages required by your extension to the `dependencies` key in `pyproject.toml`. -4. Modify `src/create_extension_spec.py` to define your extension. +3. Run `python -m pip install -e .` to install your new extension Python package +and any other packages required to develop, document, and run your extension. - - If you want to create any custom classes for interacting with the extension, - add them to the `src/pynwb`. - - If present, the `src/pynwb` folder MUST contain the following: - - `ndx-events` - Folder with the sources of the NWB extension - - `ndx-events/__init__.py` - Python file that may be empty - - If present, the `src/pynwb` folder MAY contain the following files/folders: - - `test` - Folder for unit tests for the extensions - - `jupyter_widgets` - Optional package with custom widgets for use with Jupyter +4. Modify `src/spec/create_extension_spec.py` to define your extension. 5. Run `python src/spec/create_extension_spec.py` to generate the `spec/ndx-events.namespace.yaml` and `spec/ndx-events.extensions.yaml` files. -6. You may need to modify `setup.py` and re-run `python setup.py install` if you +6. Define API classes for your new extension data types. + + - As a starting point, `src/pynwb/ndx_events/__init__.py` includes an + example for how to use + the `pynwb.get_class` to generate a basic Python class for your new extension data + type. This class contains a constructor and properties for the new data type. + - Instead of using `pynwb.get_class`, you can define your own custom class for the + new type, which will allow you to customize the class methods, customize the + object mapping, and create convenience functions. See the + [Extending NWB tutorial](https://pynwb.readthedocs.io/en/stable/tutorials/general/extensions.html) + for more details. + +7. Define tests for your new extension data types in +`src/pynwb/ndx_events/tests` or `src/matnwb/tests`. +A test for the example `TetrodeSeries` data type is provided as a reference and should be +replaced or removed. + + - Python tests should be runnable by executing [`pytest`](https://docs.pytest.org/en/latest/) + from the root of the extension directory. Use of PyNWB testing infrastructure from + `pynwb.testing` is encouraged (see + [documentation](https://pynwb.readthedocs.io/en/stable/pynwb.testing.html)). + - Creating both **unit tests** (e.g., testing initialization of new data type classes and + new functions) and **integration tests** (e.g., write the new data types to file, read + the file, and confirm the read data types are equal to the written data types) is + highly encouraged. + - By default, to aid with debugging, the project is configured NOT to run code coverage as + part of the tests. + Code coverage reporting is useful to help with creation of tests and report test coverage. + However, with this option enabled, breakpoints for debugging with pdb are being ignored. + To enable this option for code coverage reporting, uncomment out the following line in + your `pyproject.toml`: [line](https://github.com/nwb-extensions/ndx-template/blob/11ae225b3fd3934fa3c56e6e7b563081793b3b43/%7B%7B%20cookiecutter.namespace%20%7D%7D/pyproject.toml#L82-L83 +) + +7. (Optional) Define custom visualization widgets for your new extension data types in +`src/pynwb/ndx_events/widgets` so that the visualizations can be displayed with +[nwbwidgets](https://github.com/NeurodataWithoutBorders/nwbwidgets). +You will also need to update the `vis_spec` dictionary in +`src/pynwb/ndx_events/widgets/__init__.py` so that +nwbwidgets can find your custom visualizations. + +8. You may need to modify `pyproject.toml` and re-run `python -m pip install -e .` if you use any dependencies. +9. Update the `CHANGELOG.md` regularly to document changes to your extension. + ## Documenting and Publishing Your Extension to the Community @@ -48,16 +83,26 @@ your extension. 7. Add a license file. Permissive licenses should be used if possible. **A [BSD license](https://opensource.org/licenses/BSD-3-Clause) is recommended.** -8. Make a release for the extension on GitHub with the version number specified. e.g. if version is 0.1.0, then this page should exist: https://github.com/rly/ndx-events/releases/tag/0.1.0 . For instructions on how to make a release on GitHub see [here](https://help.github.com/en/github/administering-a-repository/creating-releases). +8. Update the `CHANGELOG.md` to document changes to your extension. -9. Publish your updated extension on PyPi. - - Follow these directions: https://packaging.python.org/tutorials/packaging-projects/ - - You may need to modify `setup.py` - - If your extension version is 0.1.0, then this page should exist: https://pypi.org/project/ndx-events/0.1.0 +8. Push your repository to GitHub. A default set of GitHub Actions workflows is set up to +test your code on Linux, Windows, Mac OS, and Linux using conda; upload code coverage +stats to codecov.io; check for spelling errors; check for style errors; and check for broken +links in the documentation. For the code coverage workflow to work, you will need to +set up the repo on codecov.io and uncomment the "Upload coverage to Codecov" step +in `.github/workflows/run_coverage.yml`. - Once your GitHub release and ``setup.py`` are ready, publishing on PyPi: +8. Make a release for the extension on GitHub with the version number specified. e.g. if version is 0.3.0, then this page should exist: https://github.com/rly/ndx-events/releases/tag/0.3.0 . For instructions on how to make a release on GitHub see [here](https://help.github.com/en/github/administering-a-repository/creating-releases). + +9. Publish your updated extension on [PyPI](https://pypi.org/). + - Follow these directions: https://packaging.python.org/en/latest/tutorials/packaging-projects/ + - You may need to modify `pyproject.toml` + - If your extension version is 0.3.0, then this page should exist: https://pypi.org/project/ndx-events/0.3.0 + + Once your GitHub release and `pyproject.toml` are ready, publishing on PyPI: ```bash - python setup.py sdist bdist_wheel + python -m pip install --upgrade build twine + python -m build twine upload dist/* ``` @@ -81,28 +126,26 @@ with information on where to find your NWB extension. - src: URL for the main page of the public repository (e.g. on GitHub, BitBucket, GitLab) that contains the sources of the extension - pip: URL for the main page of the extension on PyPI - license: name of the license of the extension - - maintainers: list of GitHub - usernames of those who will reliably maintain the extension - - - - You may copy and modify the following YAML that was auto-generated: -```yaml -name: ndx-events -version: 0.1.0 -src: https://github.com/rly/ndx-events -pip: https://pypi.org/project/ndx-events/ -license: BSD 3-Clause -maintainers: - - rly -``` + - maintainers: list of GitHub usernames of those who will reliably maintain the extension + - You may copy and modify the following YAML that was auto-generated: + + ```yaml + name: ndx-events + version: 0.3.0 + src: https://github.com/rly/ndx-events + pip: https://pypi.org/project/ndx-events/ + license: BSD-3 + maintainers: + - rly + ``` 14. Edit `staged-extensions/ndx-events/README.md` to add information about your extension. You may copy it from `ndx-events/README.md`. ```bash -cp ndx-events/README.md staged-extensions/ndx-events/README.md -``` + cp ndx-events/README.md staged-extensions/ndx-events/README.md + ``` 15. Add and commit your changes to Git and push your changes to GitHub. ``` @@ -117,7 +160,7 @@ Mac, and Linux. The technical team will review your extension shortly after and provide feedback and request changes, if any. 17. When your pull request is merged, a new repository, called -ndx-events-feedstock will be created in the nwb-extensions +ndx-events-record will be created in the nwb-extensions GitHub organization and you will be added as a maintainer for that repository. @@ -125,12 +168,12 @@ GitHub organization and you will be added as a maintainer for that repository. 1. Update your ndx-events GitHub repository. -2. Publish your updated extension on PyPi. +2. Publish your updated extension on PyPI. -3. Fork the ndx-events-feedstock repository on GitHub. +3. Fork the ndx-events-record repository on GitHub. 4. Open a pull request to test the changes automatically. The technical team will review your changes shortly after and provide feedback and request changes, - if any. +if any. 5. Your updated extension is approved. diff --git a/docs/make.bat b/docs/make.bat index 9534b01..747ffb7 100644 --- a/docs/make.bat +++ b/docs/make.bat @@ -1,35 +1,35 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=source -set BUILDDIR=build - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% - -:end -popd +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/source/_static/theme_overrides.css b/docs/source/_static/theme_overrides.css new file mode 100644 index 0000000..63ee6cc --- /dev/null +++ b/docs/source/_static/theme_overrides.css @@ -0,0 +1,13 @@ +/* override table width restrictions */ +@media screen and (min-width: 767px) { + + .wy-table-responsive table td { + /* !important prevents the common CSS stylesheets from overriding + this as on RTD they are loaded after this stylesheet */ + white-space: normal !important; + } + + .wy-table-responsive { + overflow: visible !important; + } +} diff --git a/docs/source/conf.py b/docs/source/conf.py index 80ce7a2..ac8c925 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,76 +1,46 @@ # Configuration file for the Sphinx documentation builder. # -# This file only contains a selection of the most common options. For a full -# list see the documentation: +# For the full list of built-in configuration values, see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -# import os -# import sys -# sys.path.insert(0, os.path.abspath('.')) - - # -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information -project = 'ndx-events' -copyright = '2020, Ryan Ly' -author = 'Ryan Ly' - -# The short X.Y version -version = '0.2.0' - -# The full version, including alpha/beta/rc tags -release = 'alpha' +project = "ndx-events" +copyright = "2024, Ryan Ly" +author = "Ryan Ly" +version = "0.3.0" +release = "alpha" # -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. extensions = [ - 'sphinx.ext.ifconfig', - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', + "sphinx.ext.ifconfig", + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", ] -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'English' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. +templates_path = ["_templates"] exclude_patterns = [] +language = "en" # -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'alabaster' +html_theme = "alabaster" +html_static_path = ["_static"] -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +# -- Options for intersphinx extension --------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#configuration +intersphinx_mapping = { + "python": ("https://docs.python.org/3", None), +} -# -- Extension configuration ------------------------------------------------- -# -- Options for intersphinx extension --------------------------------------- ############################################################################ # CUSTOM CONFIGURATIONS ADDED BY THE NWB TOOL FOR GENERATING FORMAT DOCS ########################################################################### @@ -79,7 +49,12 @@ import textwrap # noqa: E402 # -- Options for intersphinx --------------------------------------------- -intersphinx_mapping = {'core': ('https://nwb-schema.readthedocs.io/en/latest/', None)} +intersphinx_mapping.update( + { + "core": ("https://nwb-schema.readthedocs.io/en/latest/", None), + "hdmf-common": ("https://hdmf-common-schema.readthedocs.io/en/latest/", None), + } +) # -- Generate sources from YAML--------------------------------------------------- # Always rebuild the source docs from YAML even if the folder with the source files already exists @@ -90,26 +65,32 @@ def run_doc_autogen(_): # Execute the autogeneration of Sphinx format docs from the YAML sources import sys import os + conf_file_dir = os.path.dirname(os.path.abspath(__file__)) sys.path.append(conf_file_dir) # Need so that generate format docs can find the conf_doc_autogen file from conf_doc_autogen import spec_output_dir if spec_doc_rebuild_always or not os.path.exists(spec_output_dir): - sys.path.append('./docs') # needed to enable import of generate_format docs + sys.path.append("./docs") # needed to enable import of generate_format docs from hdmf_docutils.generate_format_docs import main as generate_docs + generate_docs() def setup(app): - app.connect('builder-inited', run_doc_autogen) - app.add_stylesheet("theme_overrides.css") # overrides for wide tables in RTD theme + app.connect("builder-inited", run_doc_autogen) + # overrides for wide tables in RTD theme + try: + app.add_css_file("theme_overrides.css") # Used by newer Sphinx versions + except AttributeError: + app.add_stylesheet("theme_overrides.css") # Used by older version of Sphinx # -- Customize sphinx settings numfig = True -autoclass_content = 'both' +autoclass_content = "both" autodoc_docstring_signature = True -autodoc_member_order = 'bysource' +autodoc_member_order = "bysource" add_function_parentheses = False @@ -120,17 +101,16 @@ def setup(app): # LaTeX Sphinx options latex_elements = { # The paper size ('letterpaper' or 'a4paper'). - 'papersize': 'letterpaper', - + "papersize": "letterpaper", # The font size ('10pt', '11pt' or '12pt'). - 'pointsize': '10pt', - + "pointsize": "10pt", # Additional stuff for the LaTeX preamble. - 'preamble': textwrap.dedent( - ''' + "preamble": textwrap.dedent( + """ \\setcounter{tocdepth}{3} \\setcounter{secnumdepth}{6} \\usepackage{enumitem} \\setlistdepth{100} - '''), + """ + ), } diff --git a/pyproject.toml b/pyproject.toml index 6ef711f..4d73325 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,43 +44,42 @@ dependencies = [ "Discussions" = "https://github.com/rly/ndx-events/discussions" "Changelog" = "https://github.com/rly/ndx-events/CHANGELOG.md" -[tool.hatch.build] -include = [ - "src/pynwb", - "spec/ndx-events.extensions.yaml", - "spec/ndx-events.namespace.yaml", -] -exclude = [ - "src/pynwb/tests", -] - +# Include only the source code under `src/pynwb/ndx_events` and the spec files under `spec` +# in the wheel. [tool.hatch.build.targets.wheel] packages = [ "src/pynwb/ndx_events", "spec" ] +# Rewrite the path to the `spec` directory to `ndx_events/spec`. +# `ndx_events/__init__.py` will look there first for the spec files. +# The resulting directory structure within the wheel will be: +# ndx_events/ +# ├── __init__.py +# ├── spec +# └── widgets [tool.hatch.build.targets.wheel.sources] "spec" = "ndx_events/spec" +# The source distribution includes everything in the package except for the `src/matnwb` directory and +# git and github-related files. [tool.hatch.build.targets.sdist] -include = [ - "src/pynwb", - "spec/ndx-events.extensions.yaml", - "spec/ndx-events.namespace.yaml", - "docs", +exclude = [ + ".git*", + "src/matnwb", ] -exclude = [] [tool.pytest.ini_options] -addopts = "--cov --cov-report html" +# uncomment below to run pytest always with code coverage reporting. NOTE: breakpoints may not work +# addopts = "--cov --cov-report html" [tool.codespell] skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs,.tox,hdmf-common-schema,./docs/_build/*,*.ipynb" [tool.coverage.run] branch = true -source = ["src/pynwb"] +source = ["ndx_events"] [tool.coverage.report] exclude_lines = [ @@ -94,19 +93,20 @@ preview = true exclude = ".git|.mypy_cache|.tox|.venv|venv|.ipynb_checkpoints|_build/|dist/|__pypackages__|.ipynb|docs/" [tool.ruff] -select = ["E", "F", "T100", "T201", "T203"] +lint.select = ["E", "F", "T100", "T201", "T203"] exclude = [ ".git", ".tox", "__pycache__", "build/", "dist/", + "docs/source/conf.py", ] line-length = 120 -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] +"src/pynwb/ndx_events/__init__.py" = ["E402", "F401"] "src/spec/create_extension_spec.py" = ["T201"] -"src/pynwb/tests/test_example_usage.py" = ["T201"] -[tool.ruff.mccabe] +[tool.ruff.lint.mccabe] max-complexity = 17 diff --git a/requirements-dev.txt b/requirements-dev.txt index 38eb48c..07e3a8b 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,15 +1,14 @@ # pinned dependencies to reproduce an entire development environment to # run tests, check code style, and generate documentation -black==23.9.1 -codespell==2.2.6 -coverage==7.3.2 -hdmf==3.11.0 -hdmf-docutils==0.4.6 -pre-commit==3.4.0 -pynwb==2.5.0 -pytest==7.4.2 -pytest-cov==4.1.0 +black==24.4.2 +codespell==2.3.0 +coverage==7.5.4 +hdmf==3.14.4 +hdmf-docutils==0.4.7 +pre-commit==3.5.0 # latest pre-commit does not support py3.8 +pynwb==2.8.2 +pytest==8.2.2 +pytest-cov==5.0.0 +pytest-subtests==0.12.1 python-dateutil==2.8.2 -pytest-subtests==0.6.0 -ruff==0.0.292 -tox==4.11.3 +ruff==0.4.10 diff --git a/requirements-min.txt b/requirements-min.txt index 12925ce..d8142c7 100644 --- a/requirements-min.txt +++ b/requirements-min.txt @@ -1,4 +1,5 @@ # minimum versions of package dependencies for installation # these should match the minimum versions specified in pyproject.toml -pynwb==2.5.0 -hdmf==3.11.0 # required for bug fixes for generating some classes +# NOTE: it may be possible to relax these minimum requirements +pynwb==2.8.0 +hdmf==3.14.1 diff --git a/spec/ndx-events.extensions.yaml b/spec/ndx-events.extensions.yaml index d749773..34418c8 100644 --- a/spec/ndx-events.extensions.yaml +++ b/spec/ndx-events.extensions.yaml @@ -6,7 +6,7 @@ datasets: - num_times shape: - null - doc: A VectorData that stores timestamps in seconds. + doc: A 1-dimensional VectorData that stores timestamps in seconds. attributes: - name: unit dtype: text @@ -24,7 +24,7 @@ datasets: - num_events shape: - null - doc: A VectorData that stores durations in seconds. + doc: A 1-dimensional VectorData that stores durations in seconds. attributes: - name: unit dtype: text @@ -54,9 +54,9 @@ groups: neurodata_type_inc: DynamicTable default_name: EventsTable doc: A column-based table to store information about events (event instances), one - event per row. Each event must have an event_type, which is a row in the EventTypesTable. - Additional columns may be added to store metadata about each event, such as the - duration of the event, or a text value of the event. + event per row. Each event must have an event_type, which is a reference to a row + in the EventTypesTable. Additional columns may be added to store metadata about + each event, such as the duration of the event, or a text value of the event. datasets: - name: timestamp neurodata_type_inc: TimestampVectorData diff --git a/src/pynwb/ndx_events/__init__.py b/src/pynwb/ndx_events/__init__.py index 1a58541..7f462bc 100644 --- a/src/pynwb/ndx_events/__init__.py +++ b/src/pynwb/ndx_events/__init__.py @@ -18,10 +18,7 @@ # Load the namespace load_namespaces(str(__spec_path)) -# TODO: Define your classes here to make them accessible at the package level. -# Either have PyNWB generate a class from the spec using `get_class` as shown -# below or write a custom class and register it using the class decorator -# `@register_class("TetrodeSeries", "ndx-hed")` +# Define the new classes Task = get_class("Task", "ndx-events") TimestampVectorData = get_class("TimestampVectorData", "ndx-events") DurationVectorData = get_class("DurationVectorData", "ndx-events") diff --git a/src/spec/create_extension_spec.py b/src/spec/create_extension_spec.py index 32a5f60..be63fe5 100644 --- a/src/spec/create_extension_spec.py +++ b/src/spec/create_extension_spec.py @@ -26,7 +26,7 @@ def main(): name="unit", dtype="text", doc="The unit of measurement for the timestamps, fixed to 'seconds'.", - value="xseconds", + value="seconds", ), # NOTE: this requires all timestamps to have the same resolution which may not be true # if they come from different acquisition systems or processing pipelines...