From 01ad1e85a3e2db48c198b2d0d32096152ffba295 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 29 Jun 2023 13:48:50 -0500 Subject: [PATCH 1/9] Add default value --- element_array_ephys/ephys_acute.py | 2 +- element_array_ephys/ephys_chronic.py | 2 +- element_array_ephys/ephys_no_curation.py | 2 +- element_array_ephys/ephys_precluster.py | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/element_array_ephys/ephys_acute.py b/element_array_ephys/ephys_acute.py index d7abec62..e86865e4 100644 --- a/element_array_ephys/ephys_acute.py +++ b/element_array_ephys/ephys_acute.py @@ -910,7 +910,7 @@ def make(self, key): raise ValueError(f"Unknown task mode: {task_mode}") creation_time, _, _ = kilosort.extract_clustering_info(kilosort_dir) - self.insert1({**key, "clustering_time": creation_time}) + self.insert1({**key, "clustering_time": creation_time, "package_version":""}) @schema diff --git a/element_array_ephys/ephys_chronic.py b/element_array_ephys/ephys_chronic.py index 9c2bc853..f121af1d 100644 --- a/element_array_ephys/ephys_chronic.py +++ b/element_array_ephys/ephys_chronic.py @@ -839,7 +839,7 @@ def make(self, key): raise ValueError(f"Unknown task mode: {task_mode}") creation_time, _, _ = kilosort.extract_clustering_info(kilosort_dir) - self.insert1({**key, "clustering_time": creation_time}) + self.insert1({**key, "clustering_time": creation_time, "package_version":""}) @schema diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 70c4d079..bc297044 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -916,7 +916,7 @@ def make(self, key): raise ValueError(f"Unknown task mode: {task_mode}") creation_time, _, _ = kilosort.extract_clustering_info(kilosort_dir) - self.insert1({**key, "clustering_time": creation_time}) + self.insert1({**key, "clustering_time": creation_time, "package_version": ""}) @schema diff --git a/element_array_ephys/ephys_precluster.py b/element_array_ephys/ephys_precluster.py index 9308d7bc..8c573a4c 100644 --- a/element_array_ephys/ephys_precluster.py +++ b/element_array_ephys/ephys_precluster.py @@ -518,7 +518,7 @@ def make(self, key): else: raise ValueError(f"Unknown task mode: {task_mode}") - self.insert1({**key, "precluster_time": creation_time}) + self.insert1({**key, "precluster_time": creation_time, "package_version": ""}) @schema @@ -832,7 +832,7 @@ def make(self, key): else: raise ValueError(f"Unknown task mode: {task_mode}") - self.insert1({**key, "clustering_time": creation_time}) + self.insert1({**key, "clustering_time": creation_time, "package_version": ""}) @schema From d4f7fe080eb5fe94f518c2db5b17ffef4448dee2 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 29 Jun 2023 13:53:59 -0500 Subject: [PATCH 2/9] Update version and changelog --- CHANGELOG.md | 3 ++- element_array_ephys/version.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 11579ee1..a96a10c7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,9 +3,10 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. -## [Unreleased] - 2023-06-23 +## [0.2.11] - 2023-06-29 + Update - Improve kilosort triggering routine - better logging, remove temporary files, robust resumable processing ++ Add - Null value for `package_version` to patch bug ## [0.2.10] - 2023-05-26 diff --git a/element_array_ephys/version.py b/element_array_ephys/version.py index d2ea9f20..122aedf1 100644 --- a/element_array_ephys/version.py +++ b/element_array_ephys/version.py @@ -1,2 +1,2 @@ """Package metadata.""" -__version__ = "0.2.10" +__version__ = "0.2.11" From 71bb8e2a489d044a01c4328027630c5f8f34b6cf Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 29 Jun 2023 15:36:55 -0500 Subject: [PATCH 3/9] Update GitHub Actions --- .github/workflows/development.yaml | 177 ------------------ .github/workflows/release.yaml | 27 +++ .github/workflows/test.yaml | 34 ++++ .../workflows/u24_element_before_release.yml | 17 -- .../workflows/u24_element_release_call.yml | 29 --- .../workflows/u24_element_tag_to_release.yml | 14 -- 6 files changed, 61 insertions(+), 237 deletions(-) delete mode 100644 .github/workflows/development.yaml create mode 100644 .github/workflows/release.yaml create mode 100644 .github/workflows/test.yaml delete mode 100644 .github/workflows/u24_element_before_release.yml delete mode 100644 .github/workflows/u24_element_release_call.yml delete mode 100644 .github/workflows/u24_element_tag_to_release.yml diff --git a/.github/workflows/development.yaml b/.github/workflows/development.yaml deleted file mode 100644 index 9ad02579..00000000 --- a/.github/workflows/development.yaml +++ /dev/null @@ -1,177 +0,0 @@ -name: Development -on: - push: - branches: - - '**' # every branch - - '!gh-pages' # exclude gh-pages branch - - '!stage*' # exclude branches beginning with stage - tags: - - '\d+\.\d+\.\d+' # only semver tags - pull_request: - branches: - - '**' # every branch - - '!gh-pages' # exclude gh-pages branch - - '!stage*' # exclude branches beginning with stage -jobs: - test-changelog: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Get changelog entry - id: changelog_reader - uses: guzman-raphael/changelog-reader-action@v5 - with: - path: ./CHANGELOG.md - - name: Verify changelog parsing - env: - TAG_NAME: ${{steps.changelog_reader.outputs.version}} - RELEASE_NAME: Release ${{steps.changelog_reader.outputs.version}} - BODY: ${{steps.changelog_reader.outputs.changes}} - PRERELEASE: ${{steps.changelog_reader.outputs.status == 'prereleased'}} - DRAFT: ${{steps.changelog_reader.outputs.status == 'unreleased'}} - run: | - echo "TAG_NAME=${TAG_NAME}" - echo "RELEASE_NAME=${RELEASE_NAME}" - echo "BODY=${BODY}" - echo "PRERELEASE=${PRERELEASE}" - echo "DRAFT=${DRAFT}" - build: - needs: test-changelog - runs-on: ubuntu-latest - strategy: - matrix: - include: - - py_ver: 3.8 - distro: alpine - image: djbase - env: - PY_VER: ${{matrix.py_ver}} - DISTRO: ${{matrix.distro}} - IMAGE: ${{matrix.image}} - DOCKER_CLIENT_TIMEOUT: "120" - COMPOSE_HTTP_TIMEOUT: "120" - steps: - - uses: actions/checkout@v2 - - name: Compile image - run: | - export PKG_NAME=$(python3 -c "print([p for p in __import__('setuptools').find_packages() if '.' not in p][0])") - export PKG_VERSION=$(cat ${PKG_NAME}/version.py | awk -F\' '/__version__ = / {print $2}') - export HOST_UID=$(id -u) - docker-compose -f docker-compose-build.yaml up --exit-code-from element --build - IMAGE=$(docker images --filter "reference=datajoint/${PKG_NAME}*" \ - --format "{{.Repository}}") - TAG=$(docker images --filter "reference=datajoint/${PKG_NAME}*" --format "{{.Tag}}") - docker save "${IMAGE}:${TAG}" | \ - gzip > "image-${PKG_NAME}-${PKG_VERSION}-py${PY_VER}-${DISTRO}.tar.gz" - echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_ENV - echo "PKG_VERSION=${PKG_VERSION}" >> $GITHUB_ENV - - name: Add image artifact - uses: actions/upload-artifact@v2 - with: - name: image-${{env.PKG_NAME}}-${{env.PKG_VERSION}}-py${{matrix.py_ver}}-${{matrix.distro}} - path: - "image-${{env.PKG_NAME}}-${{env.PKG_VERSION}}-py${{matrix.py_ver}}-\ - ${{matrix.distro}}.tar.gz" - retention-days: 1 - - if: matrix.py_ver == '3.8' && matrix.distro == 'alpine' - name: Add pip artifacts - uses: actions/upload-artifact@v2 - with: - name: pip-${{env.PKG_NAME}}-${{env.PKG_VERSION}} - path: dist - retention-days: 1 - publish-release: - if: | - github.event_name == 'push' && - startsWith(github.ref, 'refs/tags') - needs: build - runs-on: ubuntu-latest - env: - TWINE_USERNAME: ${{secrets.twine_username}} - TWINE_PASSWORD: ${{secrets.twine_password}} - outputs: - release_upload_url: ${{steps.create_gh_release.outputs.upload_url}} - steps: - - uses: actions/checkout@v2 - - name: Determine package version - run: | - PKG_NAME=$(python3 -c "print([p for p in __import__('setuptools').find_packages() if '.' not in p][0])") - SDIST_PKG_NAME=$(echo ${PKG_NAME} | sed 's|_|-|g') - PKG_VERSION=$(cat ${PKG_NAME}/version.py | awk -F\' '/__version__ = / {print $2}') - echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_ENV - echo "PKG_VERSION=${PKG_VERSION}" >> $GITHUB_ENV - echo "SDIST_PKG_NAME=${SDIST_PKG_NAME}" >> $GITHUB_ENV - - name: Get changelog entry - id: changelog_reader - uses: guzman-raphael/changelog-reader-action@v5 - with: - path: ./CHANGELOG.md - version: ${{env.PKG_VERSION}} - - name: Create GH release - id: create_gh_release - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - with: - tag_name: ${{steps.changelog_reader.outputs.version}} - release_name: Release ${{steps.changelog_reader.outputs.version}} - body: ${{steps.changelog_reader.outputs.changes}} - prerelease: ${{steps.changelog_reader.outputs.status == 'prereleased'}} - draft: ${{steps.changelog_reader.outputs.status == 'unreleased'}} - - name: Fetch image artifact - uses: actions/download-artifact@v2 - with: - name: image-${{env.PKG_NAME}}-${{env.PKG_VERSION}}-py3.8-alpine - - name: Fetch pip artifacts - uses: actions/download-artifact@v2 - with: - name: pip-${{env.PKG_NAME}}-${{env.PKG_VERSION}} - path: dist - - name: Publish pip release - run: | - export HOST_UID=$(id -u) - docker load < "image-${{env.PKG_NAME}}-${PKG_VERSION}-py3.8-alpine.tar.gz" - docker-compose -f docker-compose-build.yaml run \ - -e TWINE_USERNAME=${TWINE_USERNAME} -e TWINE_PASSWORD=${TWINE_PASSWORD} element \ - sh -lc "pip install twine && python -m twine upload dist/*" - - name: Determine pip artifact paths - run: | - echo "PKG_WHEEL_PATH=$(ls dist/${PKG_NAME}-*.whl)" >> $GITHUB_ENV - echo "PKG_SDIST_PATH=$(ls dist/${SDIST_PKG_NAME}-*.tar.gz)" >> $GITHUB_ENV - - name: Upload pip wheel asset to release - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - with: - upload_url: ${{steps.create_gh_release.outputs.upload_url}} - asset_path: ${{env.PKG_WHEEL_PATH}} - asset_name: pip-${{env.PKG_NAME}}-${{env.PKG_VERSION}}.whl - asset_content_type: application/zip - - name: Upload pip sdist asset to release - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - with: - upload_url: ${{steps.create_gh_release.outputs.upload_url}} - asset_path: ${{env.PKG_SDIST_PATH}} - asset_name: pip-${{env.SDIST_PKG_NAME}}-${{env.PKG_VERSION}}.tar.gz - asset_content_type: application/gzip - publish-docs: - if: | - github.event_name == 'push' && - startsWith(github.ref, 'refs/tags') - needs: build - runs-on: ubuntu-latest - env: - DOCKER_CLIENT_TIMEOUT: "120" - COMPOSE_HTTP_TIMEOUT: "120" - steps: - - uses: actions/checkout@v2 - - name: Deploy docs - run: | - export MODE=BUILD - export PACKAGE=element_array_ephys - export UPSTREAM_REPO=https://github.com/${GITHUB_REPOSITORY}.git - export HOST_UID=$(id -u) - docker compose -f docs/docker-compose.yaml up --exit-code-from docs --build - git push origin gh-pages diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 00000000..4a5f2cb5 --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,27 @@ +name: Release +on: + workflow_dispatch: +jobs: + make_github_release: + uses: datajoint/.github/.github/workflows/make_github_release.yaml@main + pypi_release: + needs: make_github_release + uses: datajoint/.github/.github/workflows/pypi_release.yaml@main + secrets: + TWINE_USERNAME: ${{secrets.TWINE_USERNAME}} + TWINE_PASSWORD: ${{secrets.TWINE_PASSWORD}} + with: + UPLOAD_URL: ${{needs.make_github_release.outputs.release_upload_url}} + mkdocs_release: + uses: datajoint/.github/.github/workflows/mkdocs_release.yaml@main + permissions: + contents: write + devcontainer-build: + uses: datajoint/.github/.github/workflows/devcontainer-build.yaml@main + devcontainer-publish: + needs: + - devcontainer-build + uses: datajoint/.github/.github/workflows/devcontainer-publish.yaml@main + secrets: + DOCKERHUB_USERNAME: ${{secrets.DOCKERHUB_USERNAME}} + DOCKERHUB_TOKEN: ${{secrets.DOCKERHUB_TOKEN_FOR_ELEMENTS}} \ No newline at end of file diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 00000000..e5e6a07a --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,34 @@ +name: Test +on: + push: + pull_request: + workflow_dispatch: +jobs: + devcontainer-build: + uses: datajoint/.github/.github/workflows/devcontainer-build.yaml@main + tests: + runs-on: ubuntu-latest + strategy: + matrix: + py_ver: ["3.9", "3.10"] + mysql_ver: ["8.0", "5.7"] + include: + - py_ver: "3.8" + mysql_ver: "5.7" + - py_ver: "3.7" + mysql_ver: "5.7" + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{matrix.py_ver}} + uses: actions/setup-python@v4 + with: + python-version: ${{matrix.py_ver}} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install flake8 "black[jupyter]" + - name: Run style tests + run: | + python_version=${{matrix.py_ver}} + black element_array_ephys --check --verbose --target-version py${python_version//.} + diff --git a/.github/workflows/u24_element_before_release.yml b/.github/workflows/u24_element_before_release.yml deleted file mode 100644 index 692cf82e..00000000 --- a/.github/workflows/u24_element_before_release.yml +++ /dev/null @@ -1,17 +0,0 @@ -name: u24_element_before_release -on: - pull_request: - push: - branches: - - '**' - tags-ignore: - - '**' - workflow_dispatch: -jobs: - call_context_check: - uses: dj-sciops/djsciops-cicd/.github/workflows/context_check.yaml@main - call_u24_elements_build_alpine: - uses: dj-sciops/djsciops-cicd/.github/workflows/u24_element_build.yaml@main - with: - py_ver: 3.9 - image: djbase diff --git a/.github/workflows/u24_element_release_call.yml b/.github/workflows/u24_element_release_call.yml deleted file mode 100644 index 8cd75d58..00000000 --- a/.github/workflows/u24_element_release_call.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: u24_element_release_call -on: - workflow_run: - workflows: ["u24_element_tag_to_release"] - types: - - completed -jobs: - call_context_check: - uses: dj-sciops/djsciops-cicd/.github/workflows/context_check.yaml@main - test_call_u24_elements_release_alpine: - if: >- - github.event.workflow_run.conclusion == 'success' && ( contains(github.event.workflow_run.head_branch, 'test') || (github.event.workflow_run.event == 'pull_request')) - uses: dj-sciops/djsciops-cicd/.github/workflows/u24_element_release.yaml@main - with: - py_ver: 3.9 - twine_repo: testpypi - secrets: - TWINE_USERNAME: ${{secrets.TWINE_TEST_USERNAME}} - TWINE_PASSWORD: ${{secrets.TWINE_TEST_PASSWORD}} - call_u24_elements_release_alpine: - if: >- - github.event.workflow_run.conclusion == 'success' && github.repository_owner == 'datajoint' && !contains(github.event.workflow_run.head_branch, 'test') - uses: dj-sciops/djsciops-cicd/.github/workflows/u24_element_release.yaml@main - with: - py_ver: 3.9 - secrets: - TWINE_USERNAME: ${{secrets.TWINE_USERNAME}} - TWINE_PASSWORD: ${{secrets.TWINE_PASSWORD}} - diff --git a/.github/workflows/u24_element_tag_to_release.yml b/.github/workflows/u24_element_tag_to_release.yml deleted file mode 100644 index 57334e9a..00000000 --- a/.github/workflows/u24_element_tag_to_release.yml +++ /dev/null @@ -1,14 +0,0 @@ -name: u24_element_tag_to_release -on: - push: - tags: - - '*.*.*' - - 'test*.*.*' -jobs: - call_context_check: - uses: dj-sciops/djsciops-cicd/.github/workflows/context_check.yaml@main - call_u24_elements_build_alpine: - uses: dj-sciops/djsciops-cicd/.github/workflows/u24_element_build.yaml@main - with: - py_ver: 3.9 - image: djbase From d1cf13f8595c0fe6d90a0dd029a65b73d8ecec4a Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 29 Jun 2023 15:37:24 -0500 Subject: [PATCH 4/9] Update changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a96a10c7..35f7120e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and + Update - Improve kilosort triggering routine - better logging, remove temporary files, robust resumable processing + Add - Null value for `package_version` to patch bug ++ Update - GitHub Actions workflows ++ Update - README instructions ## [0.2.10] - 2023-05-26 From c5306715508891428e69203172091664c6d34c7a Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 29 Jun 2023 15:40:30 -0500 Subject: [PATCH 5/9] Update readme --- README.md | 46 +++++++++++++++++++++++++++++++++++++--------- 1 file changed, 37 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index a73d45f1..d3fd8437 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,40 @@ -# DataJoint Element - Array Electrophysiology Element +[![PyPI version](https://badge.fury.io/py/element-array-ephys.svg)](http://badge.fury.io/py/element-array-ephys) -DataJoint Element for extracellular array electrophysiology. DataJoint Elements -collectively standardize and automate data collection and analysis for neuroscience -experiments. Each Element is a modular pipeline for data storage and processing with -corresponding database tables that can be combined with other Elements to assemble a -fully functional pipeline. +# DataJoint Element for Extracellular Electrophysiology -![diagram](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/diagram_flowchart.svg) +DataJoint Element for extracellular array electrophysiology that processes data +acquired with a polytrode probe +(e.g. [Neuropixels](https://www.neuropixels.org), Neuralynx) using the +[SpikeGLX](https://github.com/billkarsh/SpikeGLX) or +[OpenEphys](https://open-ephys.org/gui) acquisition software and processed with +[MATLAB-based Kilosort](https://github.com/MouseLand/Kilosort) or [python-based +Kilosort](https://github.com/MouseLand/pykilosort) spike sorting software. DataJoint +Elements collectively standardize and automate data collection and analysis for +neuroscience experiments. Each Element is a modular pipeline for data storage and +processing with corresponding database tables that can be combined with other Elements +to assemble a fully functional pipeline. -Installation and usage instructions can be found at the -[Element documentation](https://datajoint.com/docs/elements/element-array-ephys). +## Experiment flowchart + +![flowchart](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/diagram_flowchart.svg) + +## Data Pipeline Diagram + +![datajoint](https://raw.githubusercontent.com/datajoint/workflow-array-ephys/main/images/attached_array_ephys_element.svg) + + +## Getting Started + ++ Install from PyPI + + ```bash + pip install element-array-ephys + ``` + ++ [Interactive tutorial on GitHub Codespaces](https://github.com/datajoint/workflow-array-ephys#interactive-tutorial) + ++ [Documentation](https://datajoint.com/docs/elements/element-array-ephys) + +## Support + ++ If you need help getting started or run into any errors, please contact our team by email at support@datajoint.com. From 8a764e85f0645fb38d2d74f87fdfb73260bb2524 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 29 Jun 2023 15:54:26 -0500 Subject: [PATCH 6/9] Update text --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d3fd8437..ce29558f 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ DataJoint Element for extracellular array electrophysiology that processes data acquired with a polytrode probe (e.g. [Neuropixels](https://www.neuropixels.org), Neuralynx) using the [SpikeGLX](https://github.com/billkarsh/SpikeGLX) or -[OpenEphys](https://open-ephys.org/gui) acquisition software and processed with +[OpenEphys](https://open-ephys.org/gui) acquisition software and [MATLAB-based Kilosort](https://github.com/MouseLand/Kilosort) or [python-based Kilosort](https://github.com/MouseLand/pykilosort) spike sorting software. DataJoint Elements collectively standardize and automate data collection and analysis for From 25578773db6a478c54486ebcfc9010d7c23fa87e Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 29 Jun 2023 15:55:41 -0500 Subject: [PATCH 7/9] Update image path --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ce29558f..9324328f 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ to assemble a fully functional pipeline. ## Data Pipeline Diagram -![datajoint](https://raw.githubusercontent.com/datajoint/workflow-array-ephys/main/images/attached_array_ephys_element.svg) +![datajoint](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/attached_array_ephys_element_acute.svg) ## Getting Started From d5de59661c21992dbc9104f6ce8ca9c26e64cc91 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 29 Jun 2023 16:01:51 -0500 Subject: [PATCH 8/9] Format with black --- element_array_ephys/ephys_acute.py | 2 +- element_array_ephys/ephys_chronic.py | 2 +- element_array_ephys/ephys_report.py | 9 +++------ element_array_ephys/export/nwb/nwb.py | 1 - element_array_ephys/plotting/qc.py | 2 +- element_array_ephys/plotting/unit_level.py | 1 - element_array_ephys/plotting/widget.py | 3 --- element_array_ephys/probe.py | 1 - element_array_ephys/readers/kilosort.py | 1 - element_array_ephys/readers/openephys.py | 1 - 10 files changed, 6 insertions(+), 17 deletions(-) diff --git a/element_array_ephys/ephys_acute.py b/element_array_ephys/ephys_acute.py index e86865e4..9b7b5c8d 100644 --- a/element_array_ephys/ephys_acute.py +++ b/element_array_ephys/ephys_acute.py @@ -910,7 +910,7 @@ def make(self, key): raise ValueError(f"Unknown task mode: {task_mode}") creation_time, _, _ = kilosort.extract_clustering_info(kilosort_dir) - self.insert1({**key, "clustering_time": creation_time, "package_version":""}) + self.insert1({**key, "clustering_time": creation_time, "package_version": ""}) @schema diff --git a/element_array_ephys/ephys_chronic.py b/element_array_ephys/ephys_chronic.py index f121af1d..61c325a9 100644 --- a/element_array_ephys/ephys_chronic.py +++ b/element_array_ephys/ephys_chronic.py @@ -839,7 +839,7 @@ def make(self, key): raise ValueError(f"Unknown task mode: {task_mode}") creation_time, _, _ = kilosort.extract_clustering_info(kilosort_dir) - self.insert1({**key, "clustering_time": creation_time, "package_version":""}) + self.insert1({**key, "clustering_time": creation_time, "package_version": ""}) @schema diff --git a/element_array_ephys/ephys_report.py b/element_array_ephys/ephys_report.py index 8911505c..ce8f6cad 100644 --- a/element_array_ephys/ephys_report.py +++ b/element_array_ephys/ephys_report.py @@ -53,7 +53,6 @@ class ProbeLevelReport(dj.Computed): """ def make(self, key): - from .plotting.probe_level import plot_driftmap save_dir = _make_save_dir() @@ -63,7 +62,6 @@ def make(self, key): shanks = set((probe.ProbeType.Electrode & units).fetch("shank")) for shank_no in shanks: - table = units * ephys.ProbeInsertion * probe.ProbeType.Electrode & { "shank": shank_no } @@ -120,7 +118,6 @@ class UnitLevelReport(dj.Computed): """ def make(self, key): - from .plotting.unit_level import ( plot_auto_correlogram, plot_depth_waveforms, @@ -168,7 +165,7 @@ class QualityMetricCutoffs(dj.Lookup): isi_violations_maximum (float): Optional. ISI violation ratio cut-off. cutoffs_hash (uuid): uuid for the cut-off values. """ - + definition = """ cutoffs_id : smallint --- @@ -234,7 +231,7 @@ class QualityMetricSet(dj.Manual): ephys.QualityMetrics (foreign key): ephys.QualityMetrics primary key. QualityMetricCutoffs (foreign key): QualityMetricCutoffs primary key. """ - + definition = """ -> ephys.QualityMetrics -> QualityMetricCutoffs @@ -249,7 +246,7 @@ class QualityMetricReport(dj.Computed): QualityMetricSet (foreign key): QualityMetricSet primary key. plot_grid (longblob): Plotly figure object. """ - + definition = """ -> QualityMetricSet --- diff --git a/element_array_ephys/export/nwb/nwb.py b/element_array_ephys/export/nwb/nwb.py index d498d468..a45eb754 100644 --- a/element_array_ephys/export/nwb/nwb.py +++ b/element_array_ephys/export/nwb/nwb.py @@ -219,7 +219,6 @@ def create_units_table( (ephys.CuratedClustering.Unit & clustering_query.proj()).fetch(as_dict=True), desc=f"creating units table for paramset {paramset_record['paramset_idx']}", ): - probe_id, shank_num = ( ephys.ProbeInsertion * ephys.CuratedClustering.Unit diff --git a/element_array_ephys/plotting/qc.py b/element_array_ephys/plotting/qc.py index 7797bad9..16e88d3a 100644 --- a/element_array_ephys/plotting/qc.py +++ b/element_array_ephys/plotting/qc.py @@ -198,7 +198,7 @@ class initialization. return fig.add_trace( go.Scatter( x=histogram_bins[:-1], - y=gaussian_filter1d(histogram, 1), + y=gaussian_filter1d(histogram, 1), mode="lines", line=dict(color="rgb(0, 160, 223)", width=2 * scale), # DataJoint Blue hovertemplate="%{x:.2f}
%{y:.2f}", diff --git a/element_array_ephys/plotting/unit_level.py b/element_array_ephys/plotting/unit_level.py index a19b0fbe..54130916 100644 --- a/element_array_ephys/plotting/unit_level.py +++ b/element_array_ephys/plotting/unit_level.py @@ -183,7 +183,6 @@ def plot_depth_waveforms( # Plot figure fig = go.Figure() for electrode, wf, coord in zip(electrodes_to_plot, waveforms, coords): - wf_scaled = wf * y_scale_factor wf_scaled -= wf_scaled.mean() color = "red" if electrode == peak_electrode else "rgb(51, 76.5, 204)" diff --git a/element_array_ephys/plotting/widget.py b/element_array_ephys/plotting/widget.py index a26fc843..29338d59 100644 --- a/element_array_ephys/plotting/widget.py +++ b/element_array_ephys/plotting/widget.py @@ -11,7 +11,6 @@ def main(ephys: types.ModuleType) -> widgets: - # Build dropdown widgets probe_dropdown_wg = widgets.Dropdown( options=ephys.CuratedClustering & ephys_report.ProbeLevelReport, @@ -66,7 +65,6 @@ def probe_dropdown_evt(change): ) def plot_probe_widget(probe_key, shank): - fig_name = ( ephys_report.ProbeLevelReport & probe_key & f"shank={shank}" ).fetch1("drift_map_plot") @@ -92,7 +90,6 @@ def plot_probe_widget(probe_key, shank): display(go.FigureWidget(probe_fig)) def plot_unit_widget(unit): - waveform_fig, autocorrelogram_fig, depth_waveform_fig = ( ephys_report.UnitLevelReport & probe_dropdown_wg.value & f"unit={unit}" ).fetch1("waveform_plotly", "autocorrelogram_plotly", "depth_waveform_plotly") diff --git a/element_array_ephys/probe.py b/element_array_ephys/probe.py index 417fa3bc..497f1792 100644 --- a/element_array_ephys/probe.py +++ b/element_array_ephys/probe.py @@ -218,7 +218,6 @@ def build_electrode_layouts( shank_spacing: float = None, y_origin="bottom", ) -> list[dict]: - """Builds electrode layouts. Args: diff --git a/element_array_ephys/readers/kilosort.py b/element_array_ephys/readers/kilosort.py index abddee74..80ae5510 100644 --- a/element_array_ephys/readers/kilosort.py +++ b/element_array_ephys/readers/kilosort.py @@ -13,7 +13,6 @@ class Kilosort: - _kilosort_core_files = [ "params.py", "amplitudes.npy", diff --git a/element_array_ephys/readers/openephys.py b/element_array_ephys/readers/openephys.py index d1f93231..db6097d8 100644 --- a/element_array_ephys/readers/openephys.py +++ b/element_array_ephys/readers/openephys.py @@ -135,7 +135,6 @@ def load_probe_data(self): # noqa: C901 probes[probe.probe_SN] = probe for probe_index, probe_SN in enumerate(probes): - probe = probes[probe_SN] for rec in self.experiment.recordings: From 48a1e768ad6b8cf05bf519cdcbf0e503aa73e613 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 29 Jun 2023 16:04:35 -0500 Subject: [PATCH 9/9] Temporarily remove Docker image builds --- .github/workflows/release.yaml | 18 +++++++++--------- .github/workflows/test.yaml | 4 ++-- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 4a5f2cb5..9ae4ef02 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -16,12 +16,12 @@ jobs: uses: datajoint/.github/.github/workflows/mkdocs_release.yaml@main permissions: contents: write - devcontainer-build: - uses: datajoint/.github/.github/workflows/devcontainer-build.yaml@main - devcontainer-publish: - needs: - - devcontainer-build - uses: datajoint/.github/.github/workflows/devcontainer-publish.yaml@main - secrets: - DOCKERHUB_USERNAME: ${{secrets.DOCKERHUB_USERNAME}} - DOCKERHUB_TOKEN: ${{secrets.DOCKERHUB_TOKEN_FOR_ELEMENTS}} \ No newline at end of file + # devcontainer-build: + # uses: datajoint/.github/.github/workflows/devcontainer-build.yaml@main + # devcontainer-publish: + # needs: + # - devcontainer-build + # uses: datajoint/.github/.github/workflows/devcontainer-publish.yaml@main + # secrets: + # DOCKERHUB_USERNAME: ${{secrets.DOCKERHUB_USERNAME}} + # DOCKERHUB_TOKEN: ${{secrets.DOCKERHUB_TOKEN_FOR_ELEMENTS}} \ No newline at end of file diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index e5e6a07a..acaddca0 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -4,8 +4,8 @@ on: pull_request: workflow_dispatch: jobs: - devcontainer-build: - uses: datajoint/.github/.github/workflows/devcontainer-build.yaml@main + # devcontainer-build: + # uses: datajoint/.github/.github/workflows/devcontainer-build.yaml@main tests: runs-on: ubuntu-latest strategy: