diff --git a/.github/workflows/development.yaml b/.github/workflows/development.yaml
deleted file mode 100644
index 9ad02579..00000000
--- a/.github/workflows/development.yaml
+++ /dev/null
@@ -1,177 +0,0 @@
-name: Development
-on:
- push:
- branches:
- - '**' # every branch
- - '!gh-pages' # exclude gh-pages branch
- - '!stage*' # exclude branches beginning with stage
- tags:
- - '\d+\.\d+\.\d+' # only semver tags
- pull_request:
- branches:
- - '**' # every branch
- - '!gh-pages' # exclude gh-pages branch
- - '!stage*' # exclude branches beginning with stage
-jobs:
- test-changelog:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: Get changelog entry
- id: changelog_reader
- uses: guzman-raphael/changelog-reader-action@v5
- with:
- path: ./CHANGELOG.md
- - name: Verify changelog parsing
- env:
- TAG_NAME: ${{steps.changelog_reader.outputs.version}}
- RELEASE_NAME: Release ${{steps.changelog_reader.outputs.version}}
- BODY: ${{steps.changelog_reader.outputs.changes}}
- PRERELEASE: ${{steps.changelog_reader.outputs.status == 'prereleased'}}
- DRAFT: ${{steps.changelog_reader.outputs.status == 'unreleased'}}
- run: |
- echo "TAG_NAME=${TAG_NAME}"
- echo "RELEASE_NAME=${RELEASE_NAME}"
- echo "BODY=${BODY}"
- echo "PRERELEASE=${PRERELEASE}"
- echo "DRAFT=${DRAFT}"
- build:
- needs: test-changelog
- runs-on: ubuntu-latest
- strategy:
- matrix:
- include:
- - py_ver: 3.8
- distro: alpine
- image: djbase
- env:
- PY_VER: ${{matrix.py_ver}}
- DISTRO: ${{matrix.distro}}
- IMAGE: ${{matrix.image}}
- DOCKER_CLIENT_TIMEOUT: "120"
- COMPOSE_HTTP_TIMEOUT: "120"
- steps:
- - uses: actions/checkout@v2
- - name: Compile image
- run: |
- export PKG_NAME=$(python3 -c "print([p for p in __import__('setuptools').find_packages() if '.' not in p][0])")
- export PKG_VERSION=$(cat ${PKG_NAME}/version.py | awk -F\' '/__version__ = / {print $2}')
- export HOST_UID=$(id -u)
- docker-compose -f docker-compose-build.yaml up --exit-code-from element --build
- IMAGE=$(docker images --filter "reference=datajoint/${PKG_NAME}*" \
- --format "{{.Repository}}")
- TAG=$(docker images --filter "reference=datajoint/${PKG_NAME}*" --format "{{.Tag}}")
- docker save "${IMAGE}:${TAG}" | \
- gzip > "image-${PKG_NAME}-${PKG_VERSION}-py${PY_VER}-${DISTRO}.tar.gz"
- echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_ENV
- echo "PKG_VERSION=${PKG_VERSION}" >> $GITHUB_ENV
- - name: Add image artifact
- uses: actions/upload-artifact@v2
- with:
- name: image-${{env.PKG_NAME}}-${{env.PKG_VERSION}}-py${{matrix.py_ver}}-${{matrix.distro}}
- path:
- "image-${{env.PKG_NAME}}-${{env.PKG_VERSION}}-py${{matrix.py_ver}}-\
- ${{matrix.distro}}.tar.gz"
- retention-days: 1
- - if: matrix.py_ver == '3.8' && matrix.distro == 'alpine'
- name: Add pip artifacts
- uses: actions/upload-artifact@v2
- with:
- name: pip-${{env.PKG_NAME}}-${{env.PKG_VERSION}}
- path: dist
- retention-days: 1
- publish-release:
- if: |
- github.event_name == 'push' &&
- startsWith(github.ref, 'refs/tags')
- needs: build
- runs-on: ubuntu-latest
- env:
- TWINE_USERNAME: ${{secrets.twine_username}}
- TWINE_PASSWORD: ${{secrets.twine_password}}
- outputs:
- release_upload_url: ${{steps.create_gh_release.outputs.upload_url}}
- steps:
- - uses: actions/checkout@v2
- - name: Determine package version
- run: |
- PKG_NAME=$(python3 -c "print([p for p in __import__('setuptools').find_packages() if '.' not in p][0])")
- SDIST_PKG_NAME=$(echo ${PKG_NAME} | sed 's|_|-|g')
- PKG_VERSION=$(cat ${PKG_NAME}/version.py | awk -F\' '/__version__ = / {print $2}')
- echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_ENV
- echo "PKG_VERSION=${PKG_VERSION}" >> $GITHUB_ENV
- echo "SDIST_PKG_NAME=${SDIST_PKG_NAME}" >> $GITHUB_ENV
- - name: Get changelog entry
- id: changelog_reader
- uses: guzman-raphael/changelog-reader-action@v5
- with:
- path: ./CHANGELOG.md
- version: ${{env.PKG_VERSION}}
- - name: Create GH release
- id: create_gh_release
- uses: actions/create-release@v1
- env:
- GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
- with:
- tag_name: ${{steps.changelog_reader.outputs.version}}
- release_name: Release ${{steps.changelog_reader.outputs.version}}
- body: ${{steps.changelog_reader.outputs.changes}}
- prerelease: ${{steps.changelog_reader.outputs.status == 'prereleased'}}
- draft: ${{steps.changelog_reader.outputs.status == 'unreleased'}}
- - name: Fetch image artifact
- uses: actions/download-artifact@v2
- with:
- name: image-${{env.PKG_NAME}}-${{env.PKG_VERSION}}-py3.8-alpine
- - name: Fetch pip artifacts
- uses: actions/download-artifact@v2
- with:
- name: pip-${{env.PKG_NAME}}-${{env.PKG_VERSION}}
- path: dist
- - name: Publish pip release
- run: |
- export HOST_UID=$(id -u)
- docker load < "image-${{env.PKG_NAME}}-${PKG_VERSION}-py3.8-alpine.tar.gz"
- docker-compose -f docker-compose-build.yaml run \
- -e TWINE_USERNAME=${TWINE_USERNAME} -e TWINE_PASSWORD=${TWINE_PASSWORD} element \
- sh -lc "pip install twine && python -m twine upload dist/*"
- - name: Determine pip artifact paths
- run: |
- echo "PKG_WHEEL_PATH=$(ls dist/${PKG_NAME}-*.whl)" >> $GITHUB_ENV
- echo "PKG_SDIST_PATH=$(ls dist/${SDIST_PKG_NAME}-*.tar.gz)" >> $GITHUB_ENV
- - name: Upload pip wheel asset to release
- uses: actions/upload-release-asset@v1
- env:
- GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
- with:
- upload_url: ${{steps.create_gh_release.outputs.upload_url}}
- asset_path: ${{env.PKG_WHEEL_PATH}}
- asset_name: pip-${{env.PKG_NAME}}-${{env.PKG_VERSION}}.whl
- asset_content_type: application/zip
- - name: Upload pip sdist asset to release
- uses: actions/upload-release-asset@v1
- env:
- GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
- with:
- upload_url: ${{steps.create_gh_release.outputs.upload_url}}
- asset_path: ${{env.PKG_SDIST_PATH}}
- asset_name: pip-${{env.SDIST_PKG_NAME}}-${{env.PKG_VERSION}}.tar.gz
- asset_content_type: application/gzip
- publish-docs:
- if: |
- github.event_name == 'push' &&
- startsWith(github.ref, 'refs/tags')
- needs: build
- runs-on: ubuntu-latest
- env:
- DOCKER_CLIENT_TIMEOUT: "120"
- COMPOSE_HTTP_TIMEOUT: "120"
- steps:
- - uses: actions/checkout@v2
- - name: Deploy docs
- run: |
- export MODE=BUILD
- export PACKAGE=element_array_ephys
- export UPSTREAM_REPO=https://github.com/${GITHUB_REPOSITORY}.git
- export HOST_UID=$(id -u)
- docker compose -f docs/docker-compose.yaml up --exit-code-from docs --build
- git push origin gh-pages
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
new file mode 100644
index 00000000..9ae4ef02
--- /dev/null
+++ b/.github/workflows/release.yaml
@@ -0,0 +1,27 @@
+name: Release
+on:
+ workflow_dispatch:
+jobs:
+ make_github_release:
+ uses: datajoint/.github/.github/workflows/make_github_release.yaml@main
+ pypi_release:
+ needs: make_github_release
+ uses: datajoint/.github/.github/workflows/pypi_release.yaml@main
+ secrets:
+ TWINE_USERNAME: ${{secrets.TWINE_USERNAME}}
+ TWINE_PASSWORD: ${{secrets.TWINE_PASSWORD}}
+ with:
+ UPLOAD_URL: ${{needs.make_github_release.outputs.release_upload_url}}
+ mkdocs_release:
+ uses: datajoint/.github/.github/workflows/mkdocs_release.yaml@main
+ permissions:
+ contents: write
+ # devcontainer-build:
+ # uses: datajoint/.github/.github/workflows/devcontainer-build.yaml@main
+ # devcontainer-publish:
+ # needs:
+ # - devcontainer-build
+ # uses: datajoint/.github/.github/workflows/devcontainer-publish.yaml@main
+ # secrets:
+ # DOCKERHUB_USERNAME: ${{secrets.DOCKERHUB_USERNAME}}
+ # DOCKERHUB_TOKEN: ${{secrets.DOCKERHUB_TOKEN_FOR_ELEMENTS}}
\ No newline at end of file
diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml
new file mode 100644
index 00000000..acaddca0
--- /dev/null
+++ b/.github/workflows/test.yaml
@@ -0,0 +1,34 @@
+name: Test
+on:
+ push:
+ pull_request:
+ workflow_dispatch:
+jobs:
+ # devcontainer-build:
+ # uses: datajoint/.github/.github/workflows/devcontainer-build.yaml@main
+ tests:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ py_ver: ["3.9", "3.10"]
+ mysql_ver: ["8.0", "5.7"]
+ include:
+ - py_ver: "3.8"
+ mysql_ver: "5.7"
+ - py_ver: "3.7"
+ mysql_ver: "5.7"
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python ${{matrix.py_ver}}
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{matrix.py_ver}}
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install flake8 "black[jupyter]"
+ - name: Run style tests
+ run: |
+ python_version=${{matrix.py_ver}}
+ black element_array_ephys --check --verbose --target-version py${python_version//.}
+
diff --git a/.github/workflows/u24_element_before_release.yml b/.github/workflows/u24_element_before_release.yml
deleted file mode 100644
index 692cf82e..00000000
--- a/.github/workflows/u24_element_before_release.yml
+++ /dev/null
@@ -1,17 +0,0 @@
-name: u24_element_before_release
-on:
- pull_request:
- push:
- branches:
- - '**'
- tags-ignore:
- - '**'
- workflow_dispatch:
-jobs:
- call_context_check:
- uses: dj-sciops/djsciops-cicd/.github/workflows/context_check.yaml@main
- call_u24_elements_build_alpine:
- uses: dj-sciops/djsciops-cicd/.github/workflows/u24_element_build.yaml@main
- with:
- py_ver: 3.9
- image: djbase
diff --git a/.github/workflows/u24_element_release_call.yml b/.github/workflows/u24_element_release_call.yml
deleted file mode 100644
index 8cd75d58..00000000
--- a/.github/workflows/u24_element_release_call.yml
+++ /dev/null
@@ -1,29 +0,0 @@
-name: u24_element_release_call
-on:
- workflow_run:
- workflows: ["u24_element_tag_to_release"]
- types:
- - completed
-jobs:
- call_context_check:
- uses: dj-sciops/djsciops-cicd/.github/workflows/context_check.yaml@main
- test_call_u24_elements_release_alpine:
- if: >-
- github.event.workflow_run.conclusion == 'success' && ( contains(github.event.workflow_run.head_branch, 'test') || (github.event.workflow_run.event == 'pull_request'))
- uses: dj-sciops/djsciops-cicd/.github/workflows/u24_element_release.yaml@main
- with:
- py_ver: 3.9
- twine_repo: testpypi
- secrets:
- TWINE_USERNAME: ${{secrets.TWINE_TEST_USERNAME}}
- TWINE_PASSWORD: ${{secrets.TWINE_TEST_PASSWORD}}
- call_u24_elements_release_alpine:
- if: >-
- github.event.workflow_run.conclusion == 'success' && github.repository_owner == 'datajoint' && !contains(github.event.workflow_run.head_branch, 'test')
- uses: dj-sciops/djsciops-cicd/.github/workflows/u24_element_release.yaml@main
- with:
- py_ver: 3.9
- secrets:
- TWINE_USERNAME: ${{secrets.TWINE_USERNAME}}
- TWINE_PASSWORD: ${{secrets.TWINE_PASSWORD}}
-
diff --git a/.github/workflows/u24_element_tag_to_release.yml b/.github/workflows/u24_element_tag_to_release.yml
deleted file mode 100644
index 57334e9a..00000000
--- a/.github/workflows/u24_element_tag_to_release.yml
+++ /dev/null
@@ -1,14 +0,0 @@
-name: u24_element_tag_to_release
-on:
- push:
- tags:
- - '*.*.*'
- - 'test*.*.*'
-jobs:
- call_context_check:
- uses: dj-sciops/djsciops-cicd/.github/workflows/context_check.yaml@main
- call_u24_elements_build_alpine:
- uses: dj-sciops/djsciops-cicd/.github/workflows/u24_element_build.yaml@main
- with:
- py_ver: 3.9
- image: djbase
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 11579ee1..35f7120e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,9 +3,12 @@
Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and
[Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention.
-## [Unreleased] - 2023-06-23
+## [0.2.11] - 2023-06-29
+ Update - Improve kilosort triggering routine - better logging, remove temporary files, robust resumable processing
++ Add - Null value for `package_version` to patch bug
++ Update - GitHub Actions workflows
++ Update - README instructions
## [0.2.10] - 2023-05-26
diff --git a/README.md b/README.md
index a73d45f1..9324328f 100644
--- a/README.md
+++ b/README.md
@@ -1,12 +1,40 @@
-# DataJoint Element - Array Electrophysiology Element
+[![PyPI version](https://badge.fury.io/py/element-array-ephys.svg)](http://badge.fury.io/py/element-array-ephys)
-DataJoint Element for extracellular array electrophysiology. DataJoint Elements
-collectively standardize and automate data collection and analysis for neuroscience
-experiments. Each Element is a modular pipeline for data storage and processing with
-corresponding database tables that can be combined with other Elements to assemble a
-fully functional pipeline.
+# DataJoint Element for Extracellular Electrophysiology
-![diagram](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/diagram_flowchart.svg)
+DataJoint Element for extracellular array electrophysiology that processes data
+acquired with a polytrode probe
+(e.g. [Neuropixels](https://www.neuropixels.org), Neuralynx) using the
+[SpikeGLX](https://github.com/billkarsh/SpikeGLX) or
+[OpenEphys](https://open-ephys.org/gui) acquisition software and
+[MATLAB-based Kilosort](https://github.com/MouseLand/Kilosort) or [python-based
+Kilosort](https://github.com/MouseLand/pykilosort) spike sorting software. DataJoint
+Elements collectively standardize and automate data collection and analysis for
+neuroscience experiments. Each Element is a modular pipeline for data storage and
+processing with corresponding database tables that can be combined with other Elements
+to assemble a fully functional pipeline.
-Installation and usage instructions can be found at the
-[Element documentation](https://datajoint.com/docs/elements/element-array-ephys).
+## Experiment flowchart
+
+![flowchart](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/diagram_flowchart.svg)
+
+## Data Pipeline Diagram
+
+![datajoint](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/attached_array_ephys_element_acute.svg)
+
+
+## Getting Started
+
++ Install from PyPI
+
+ ```bash
+ pip install element-array-ephys
+ ```
+
++ [Interactive tutorial on GitHub Codespaces](https://github.com/datajoint/workflow-array-ephys#interactive-tutorial)
+
++ [Documentation](https://datajoint.com/docs/elements/element-array-ephys)
+
+## Support
+
++ If you need help getting started or run into any errors, please contact our team by email at support@datajoint.com.
diff --git a/element_array_ephys/ephys_acute.py b/element_array_ephys/ephys_acute.py
index d7abec62..9b7b5c8d 100644
--- a/element_array_ephys/ephys_acute.py
+++ b/element_array_ephys/ephys_acute.py
@@ -910,7 +910,7 @@ def make(self, key):
raise ValueError(f"Unknown task mode: {task_mode}")
creation_time, _, _ = kilosort.extract_clustering_info(kilosort_dir)
- self.insert1({**key, "clustering_time": creation_time})
+ self.insert1({**key, "clustering_time": creation_time, "package_version": ""})
@schema
diff --git a/element_array_ephys/ephys_chronic.py b/element_array_ephys/ephys_chronic.py
index 9c2bc853..61c325a9 100644
--- a/element_array_ephys/ephys_chronic.py
+++ b/element_array_ephys/ephys_chronic.py
@@ -839,7 +839,7 @@ def make(self, key):
raise ValueError(f"Unknown task mode: {task_mode}")
creation_time, _, _ = kilosort.extract_clustering_info(kilosort_dir)
- self.insert1({**key, "clustering_time": creation_time})
+ self.insert1({**key, "clustering_time": creation_time, "package_version": ""})
@schema
diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py
index 70c4d079..bc297044 100644
--- a/element_array_ephys/ephys_no_curation.py
+++ b/element_array_ephys/ephys_no_curation.py
@@ -916,7 +916,7 @@ def make(self, key):
raise ValueError(f"Unknown task mode: {task_mode}")
creation_time, _, _ = kilosort.extract_clustering_info(kilosort_dir)
- self.insert1({**key, "clustering_time": creation_time})
+ self.insert1({**key, "clustering_time": creation_time, "package_version": ""})
@schema
diff --git a/element_array_ephys/ephys_precluster.py b/element_array_ephys/ephys_precluster.py
index 9308d7bc..8c573a4c 100644
--- a/element_array_ephys/ephys_precluster.py
+++ b/element_array_ephys/ephys_precluster.py
@@ -518,7 +518,7 @@ def make(self, key):
else:
raise ValueError(f"Unknown task mode: {task_mode}")
- self.insert1({**key, "precluster_time": creation_time})
+ self.insert1({**key, "precluster_time": creation_time, "package_version": ""})
@schema
@@ -832,7 +832,7 @@ def make(self, key):
else:
raise ValueError(f"Unknown task mode: {task_mode}")
- self.insert1({**key, "clustering_time": creation_time})
+ self.insert1({**key, "clustering_time": creation_time, "package_version": ""})
@schema
diff --git a/element_array_ephys/ephys_report.py b/element_array_ephys/ephys_report.py
index 8911505c..ce8f6cad 100644
--- a/element_array_ephys/ephys_report.py
+++ b/element_array_ephys/ephys_report.py
@@ -53,7 +53,6 @@ class ProbeLevelReport(dj.Computed):
"""
def make(self, key):
-
from .plotting.probe_level import plot_driftmap
save_dir = _make_save_dir()
@@ -63,7 +62,6 @@ def make(self, key):
shanks = set((probe.ProbeType.Electrode & units).fetch("shank"))
for shank_no in shanks:
-
table = units * ephys.ProbeInsertion * probe.ProbeType.Electrode & {
"shank": shank_no
}
@@ -120,7 +118,6 @@ class UnitLevelReport(dj.Computed):
"""
def make(self, key):
-
from .plotting.unit_level import (
plot_auto_correlogram,
plot_depth_waveforms,
@@ -168,7 +165,7 @@ class QualityMetricCutoffs(dj.Lookup):
isi_violations_maximum (float): Optional. ISI violation ratio cut-off.
cutoffs_hash (uuid): uuid for the cut-off values.
"""
-
+
definition = """
cutoffs_id : smallint
---
@@ -234,7 +231,7 @@ class QualityMetricSet(dj.Manual):
ephys.QualityMetrics (foreign key): ephys.QualityMetrics primary key.
QualityMetricCutoffs (foreign key): QualityMetricCutoffs primary key.
"""
-
+
definition = """
-> ephys.QualityMetrics
-> QualityMetricCutoffs
@@ -249,7 +246,7 @@ class QualityMetricReport(dj.Computed):
QualityMetricSet (foreign key): QualityMetricSet primary key.
plot_grid (longblob): Plotly figure object.
"""
-
+
definition = """
-> QualityMetricSet
---
diff --git a/element_array_ephys/export/nwb/nwb.py b/element_array_ephys/export/nwb/nwb.py
index d498d468..a45eb754 100644
--- a/element_array_ephys/export/nwb/nwb.py
+++ b/element_array_ephys/export/nwb/nwb.py
@@ -219,7 +219,6 @@ def create_units_table(
(ephys.CuratedClustering.Unit & clustering_query.proj()).fetch(as_dict=True),
desc=f"creating units table for paramset {paramset_record['paramset_idx']}",
):
-
probe_id, shank_num = (
ephys.ProbeInsertion
* ephys.CuratedClustering.Unit
diff --git a/element_array_ephys/plotting/qc.py b/element_array_ephys/plotting/qc.py
index 7797bad9..16e88d3a 100644
--- a/element_array_ephys/plotting/qc.py
+++ b/element_array_ephys/plotting/qc.py
@@ -198,7 +198,7 @@ class initialization.
return fig.add_trace(
go.Scatter(
x=histogram_bins[:-1],
- y=gaussian_filter1d(histogram, 1),
+ y=gaussian_filter1d(histogram, 1),
mode="lines",
line=dict(color="rgb(0, 160, 223)", width=2 * scale), # DataJoint Blue
hovertemplate="%{x:.2f}
%{y:.2f}",
diff --git a/element_array_ephys/plotting/unit_level.py b/element_array_ephys/plotting/unit_level.py
index a19b0fbe..54130916 100644
--- a/element_array_ephys/plotting/unit_level.py
+++ b/element_array_ephys/plotting/unit_level.py
@@ -183,7 +183,6 @@ def plot_depth_waveforms(
# Plot figure
fig = go.Figure()
for electrode, wf, coord in zip(electrodes_to_plot, waveforms, coords):
-
wf_scaled = wf * y_scale_factor
wf_scaled -= wf_scaled.mean()
color = "red" if electrode == peak_electrode else "rgb(51, 76.5, 204)"
diff --git a/element_array_ephys/plotting/widget.py b/element_array_ephys/plotting/widget.py
index a26fc843..29338d59 100644
--- a/element_array_ephys/plotting/widget.py
+++ b/element_array_ephys/plotting/widget.py
@@ -11,7 +11,6 @@
def main(ephys: types.ModuleType) -> widgets:
-
# Build dropdown widgets
probe_dropdown_wg = widgets.Dropdown(
options=ephys.CuratedClustering & ephys_report.ProbeLevelReport,
@@ -66,7 +65,6 @@ def probe_dropdown_evt(change):
)
def plot_probe_widget(probe_key, shank):
-
fig_name = (
ephys_report.ProbeLevelReport & probe_key & f"shank={shank}"
).fetch1("drift_map_plot")
@@ -92,7 +90,6 @@ def plot_probe_widget(probe_key, shank):
display(go.FigureWidget(probe_fig))
def plot_unit_widget(unit):
-
waveform_fig, autocorrelogram_fig, depth_waveform_fig = (
ephys_report.UnitLevelReport & probe_dropdown_wg.value & f"unit={unit}"
).fetch1("waveform_plotly", "autocorrelogram_plotly", "depth_waveform_plotly")
diff --git a/element_array_ephys/probe.py b/element_array_ephys/probe.py
index 417fa3bc..497f1792 100644
--- a/element_array_ephys/probe.py
+++ b/element_array_ephys/probe.py
@@ -218,7 +218,6 @@ def build_electrode_layouts(
shank_spacing: float = None,
y_origin="bottom",
) -> list[dict]:
-
"""Builds electrode layouts.
Args:
diff --git a/element_array_ephys/readers/kilosort.py b/element_array_ephys/readers/kilosort.py
index abddee74..80ae5510 100644
--- a/element_array_ephys/readers/kilosort.py
+++ b/element_array_ephys/readers/kilosort.py
@@ -13,7 +13,6 @@
class Kilosort:
-
_kilosort_core_files = [
"params.py",
"amplitudes.npy",
diff --git a/element_array_ephys/readers/openephys.py b/element_array_ephys/readers/openephys.py
index d1f93231..db6097d8 100644
--- a/element_array_ephys/readers/openephys.py
+++ b/element_array_ephys/readers/openephys.py
@@ -135,7 +135,6 @@ def load_probe_data(self): # noqa: C901
probes[probe.probe_SN] = probe
for probe_index, probe_SN in enumerate(probes):
-
probe = probes[probe_SN]
for rec in self.experiment.recordings:
diff --git a/element_array_ephys/version.py b/element_array_ephys/version.py
index d2ea9f20..122aedf1 100644
--- a/element_array_ephys/version.py
+++ b/element_array_ephys/version.py
@@ -1,2 +1,2 @@
"""Package metadata."""
-__version__ = "0.2.10"
+__version__ = "0.2.11"